From feed1808f25b98b45d7a9d87e6f44a312eb2c4a1 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 1 Sep 2025 02:49:39 +0000 Subject: [PATCH 01/77] Fix SPDX check --- dstack-util/tests/fixtures/luks_header_cipher_null.license | 3 +++ dstack-util/tests/fixtures/luks_header_good.license | 3 +++ 2 files changed, 6 insertions(+) create mode 100644 dstack-util/tests/fixtures/luks_header_cipher_null.license create mode 100644 dstack-util/tests/fixtures/luks_header_good.license diff --git a/dstack-util/tests/fixtures/luks_header_cipher_null.license b/dstack-util/tests/fixtures/luks_header_cipher_null.license new file mode 100644 index 00000000..8f861f09 --- /dev/null +++ b/dstack-util/tests/fixtures/luks_header_cipher_null.license @@ -0,0 +1,3 @@ +SPDX-FileCopyrightText: © 2025 Phala Network + +SPDX-License-Identifier: Apache-2.0 \ No newline at end of file diff --git a/dstack-util/tests/fixtures/luks_header_good.license b/dstack-util/tests/fixtures/luks_header_good.license new file mode 100644 index 00000000..8f861f09 --- /dev/null +++ b/dstack-util/tests/fixtures/luks_header_good.license @@ -0,0 +1,3 @@ +SPDX-FileCopyrightText: © 2025 Phala Network + +SPDX-License-Identifier: Apache-2.0 \ No newline at end of file From 3e4e462cac2a57c204698d2443d252d13e75cd29 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 1 Sep 2025 02:52:04 +0000 Subject: [PATCH 02/77] Update .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 42567eed..8d90929a 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ node_modules/ /.cargo .venv /tmp +.claude From ee04ffae247e65fe4a22b9a68aa12575db9af1af Mon Sep 17 00:00:00 2001 From: Hang Yin Date: Mon, 1 Sep 2025 08:15:54 +0000 Subject: [PATCH 03/77] fix: reuse lint --- REUSE.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/REUSE.toml b/REUSE.toml index e48336ed..abf4a08c 100644 --- a/REUSE.toml +++ b/REUSE.toml @@ -157,3 +157,8 @@ SPDX-License-Identifier = "CC0-1.0" path = "guest-api/src/generated/*" SPDX-FileCopyrightText = "NONE" SPDX-License-Identifier = "CC0-1.0" + +[[annotations]] +path = "dstack-util/tests/fixtures/*" +SPDX-FileCopyrightText = "NONE" +SPDX-License-Identifier = "CC0-1.0" From c769b2d3b7aae92e58c018979df8ffedc137cad4 Mon Sep 17 00:00:00 2001 From: Leechael Yim Date: Tue, 2 Sep 2025 19:41:12 +0800 Subject: [PATCH 04/77] imp(sdk/js): typing for TcbInfo --- sdk/js/src/index.ts | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/sdk/js/src/index.ts b/sdk/js/src/index.ts index 4448c066..d30d295f 100644 --- a/sdk/js/src/index.ts +++ b/sdk/js/src/index.ts @@ -44,14 +44,26 @@ export interface TcbInfo { rtmr1: string rtmr2: string rtmr3: string + app_compose: string event_log: EventLog[] } -export interface InfoResponse { +export type TcbInfoV03x = TcbInfo & { + rootfs_hash: string +} + +export type TcbInfoV05x = TcbInfo & { + mr_aggregated: string + os_image_hash: string + compose_hash: string + device_id: string +} + +export interface InfoResponse { app_id: string instance_id: string app_cert: string - tcb_info: TcbInfo + tcb_info: VersionTcbInfo app_name: string device_id: string os_image_hash?: string // Optional: empty if OS image is not measured by KMS @@ -132,7 +144,7 @@ export interface TlsKeyOptions { usageClientAuth?: boolean; } -export class DstackClient { +export class DstackClient { protected endpoint: string constructor(endpoint: string | undefined = undefined) { @@ -210,11 +222,11 @@ export class DstackClient { return Object.freeze(result) } - async info(): Promise { - const result = await send_rpc_request & { tcb_info: string }>(this.endpoint, '/Info', '{}') + async info(): Promise> { + const result = await send_rpc_request, 'tcb_info'> & { tcb_info: string }>(this.endpoint, '/Info', '{}') return Object.freeze({ ...result, - tcb_info: JSON.parse(result.tcb_info) as TcbInfo, + tcb_info: JSON.parse(result.tcb_info) as T, }) } @@ -283,7 +295,7 @@ export class DstackClient { } } -export class TappdClient extends DstackClient { +export class TappdClient extends DstackClient { constructor(endpoint: string | undefined = undefined) { if (endpoint === undefined) { if (process.env.TAPPD_SIMULATOR_ENDPOINT) { From caddb8a494b06a2171f1f46f7a94cce0dc724856 Mon Sep 17 00:00:00 2001 From: Leechael Yim Date: Tue, 2 Sep 2025 19:56:07 +0800 Subject: [PATCH 05/77] imp(sdk/python): Schema for TcbInfo --- sdk/python/src/dstack_sdk/dstack_client.py | 57 ++++++++++++++++++---- 1 file changed, 47 insertions(+), 10 deletions(-) diff --git a/sdk/python/src/dstack_sdk/dstack_client.py b/sdk/python/src/dstack_sdk/dstack_client.py index 78f34844..31411fe2 100644 --- a/sdk/python/src/dstack_sdk/dstack_client.py +++ b/sdk/python/src/dstack_sdk/dstack_client.py @@ -11,8 +11,10 @@ import os from typing import Any from typing import Dict +from typing import Generic from typing import List from typing import Optional +from typing import TypeVar from typing import cast import warnings @@ -157,23 +159,41 @@ class EventLog(BaseModel): class TcbInfo(BaseModel): + """Base TCB (Trusted Computing Base) information structure.""" + mrtd: str rtmr0: str rtmr1: str rtmr2: str rtmr3: str - os_image_hash: str = "" - compose_hash: str - device_id: str app_compose: str event_log: List[EventLog] -class InfoResponse(BaseModel): +class TcbInfoV03x(TcbInfo): + """TCB information for dstack OS version 0.3.x.""" + + rootfs_hash: str + + +class TcbInfoV05x(TcbInfo): + """TCB information for dstack OS version 0.5.x.""" + + mr_aggregated: str + os_image_hash: str + compose_hash: str + device_id: str + + +# Type variable for TCB info versions +T = TypeVar("T", bound=TcbInfo) + + +class InfoResponse(BaseModel, Generic[T]): app_id: str instance_id: str app_cert: str - tcb_info: TcbInfo + tcb_info: T app_name: str device_id: str os_image_hash: str = "" @@ -181,14 +201,21 @@ class InfoResponse(BaseModel): compose_hash: str @classmethod - def parse_response(cls, obj: Any) -> "InfoResponse": + def parse_response(cls, obj: Any, tcb_info_type: type[T]) -> "InfoResponse[T]": + """Parse response from service, automatically deserializing tcb_info. + + Args: + obj: Raw response object from service + tcb_info_type: The specific TcbInfo subclass to use for parsing + + """ if ( isinstance(obj, dict) and "tcb_info" in obj and isinstance(obj["tcb_info"], str) ): obj = dict(obj) - obj["tcb_info"] = TcbInfo(**json.loads(obj["tcb_info"])) + obj["tcb_info"] = tcb_info_type(**json.loads(obj["tcb_info"])) return cls(**obj) @@ -311,10 +338,10 @@ async def get_quote( result = await self._send_rpc_request("GetQuote", {"report_data": hex}) return GetQuoteResponse(**result) - async def info(self) -> InfoResponse: + async def info(self) -> InfoResponse[TcbInfo]: """Fetch service information including parsed TCB info.""" result = await self._send_rpc_request("Info", {}) - return InfoResponse.parse_response(result) + return InfoResponse.parse_response(result, TcbInfoV05x) async def emit_event( self, @@ -391,7 +418,7 @@ def get_quote( raise NotImplementedError @call_async - def info(self) -> InfoResponse: + def info(self) -> InfoResponse[TcbInfo]: """Fetch service information including parsed TCB info.""" raise NotImplementedError @@ -503,6 +530,11 @@ async def tdx_quote( return GetQuoteResponse(**result) + async def info(self) -> InfoResponse[TcbInfo]: + """Fetch service information including parsed TCB info.""" + result = await self._send_rpc_request("Info", {}) + return InfoResponse.parse_response(result, TcbInfoV03x) + class TappdClient(DstackClient): """Deprecated client kept for backward compatibility. @@ -537,6 +569,11 @@ def tdx_quote( """Use ``get_quote`` instead (deprecated).""" raise NotImplementedError + @call_async + def info(self) -> InfoResponse[TcbInfo]: + """Fetch service information including parsed TCB info.""" + raise NotImplementedError + @call_async def __enter__(self): raise NotImplementedError From 9e2f0609b68bc8019672bdd80661347e66e2a11e Mon Sep 17 00:00:00 2001 From: Leechael Yim Date: Tue, 2 Sep 2025 21:14:50 +0800 Subject: [PATCH 06/77] chore(sdks): bump versions. --- sdk/js/package.json | 2 +- sdk/python/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/js/package.json b/sdk/js/package.json index 7a25c9d8..ce7f6a2b 100644 --- a/sdk/js/package.json +++ b/sdk/js/package.json @@ -1,6 +1,6 @@ { "name": "@phala/dstack-sdk", - "version": "0.5.4", + "version": "0.5.5", "description": "dstack SDK", "main": "dist/node/index.js", "types": "dist/node/index.d.ts", diff --git a/sdk/python/pyproject.toml b/sdk/python/pyproject.toml index 1e1c9df5..266c1ac2 100644 --- a/sdk/python/pyproject.toml +++ b/sdk/python/pyproject.toml @@ -4,7 +4,7 @@ [project] name = "dstack-sdk" -version = "0.5.0" +version = "0.5.1" description = "dstack SDK for Python" authors = [ {name = "Leechael Yim", email = "yanleech@gmail.com"}, From 26609ec5b1ebd64ed3d6b1fe5f713cdef0af6b08 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 3 Sep 2025 00:18:28 +0000 Subject: [PATCH 07/77] Add dstack-sdk-type version in Cargo.toml --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index baa7012a..55f8cef2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,7 +57,7 @@ resolver = "2" # Internal dependencies ra-rpc = { path = "ra-rpc", default-features = false } ra-tls = { path = "ra-tls" } -dstack-sdk-types = { path = "sdk/rust/types", default-features = false } +dstack-sdk-types = { path = "sdk/rust/types", version = "0.1.0", default-features = false } dstack-gateway-rpc = { path = "gateway/rpc" } dstack-kms-rpc = { path = "kms/rpc" } dstack-guest-agent-rpc = { path = "guest-agent/rpc" } From 09e9c408d78fd08b969449257df557d548deb727 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 2 Sep 2025 15:24:57 +0000 Subject: [PATCH 08/77] ra-tls: Add KeyCertSign and CrlSign usages for CA cert --- ra-tls/src/cert.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ra-tls/src/cert.rs b/ra-tls/src/cert.rs index 30939ec0..02dd1dc1 100644 --- a/ra-tls/src/cert.rs +++ b/ra-tls/src/cert.rs @@ -258,6 +258,8 @@ impl CertRequest<'_, Key> { } if let Some(ca_level) = self.ca_level { params.is_ca = IsCa::Ca(BasicConstraints::Constrained(ca_level)); + params.key_usages.push(KeyUsagePurpose::KeyCertSign); + params.key_usages.push(KeyUsagePurpose::CrlSign); } if let Some(not_before) = self.not_before { params.not_before = not_before.into(); From 720f8529a5055f064b4eb75c7595601868aca1f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Sep 2025 15:38:03 +0000 Subject: [PATCH 09/77] build(deps): bump hono from 4.8.5 to 4.9.6 in /kms/auth-eth-bun Bumps [hono](https://github.com/honojs/hono) from 4.8.5 to 4.9.6. - [Release notes](https://github.com/honojs/hono/releases) - [Commits](https://github.com/honojs/hono/compare/v4.8.5...v4.9.6) --- updated-dependencies: - dependency-name: hono dependency-version: 4.9.6 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- kms/auth-eth-bun/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kms/auth-eth-bun/package.json b/kms/auth-eth-bun/package.json index 8b7ca5c3..e027a11c 100644 --- a/kms/auth-eth-bun/package.json +++ b/kms/auth-eth-bun/package.json @@ -15,7 +15,7 @@ "check": "bun run lint && bun run test:run" }, "dependencies": { - "hono": "4.8.5", + "hono": "4.9.6", "@hono/zod-validator": "0.2.2", "zod": "3.25.76", "viem": "2.31.7" From f6f98d2a50daddbbb66f1fb1199479d31be236a9 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Fri, 5 Sep 2025 01:25:46 +0000 Subject: [PATCH 10/77] Fix gateway dockerfile --- gateway/dstack-app/Dockerfile | 53 --------------------------- gateway/dstack-app/builder/Dockerfile | 3 +- 2 files changed, 2 insertions(+), 54 deletions(-) delete mode 100644 gateway/dstack-app/Dockerfile diff --git a/gateway/dstack-app/Dockerfile b/gateway/dstack-app/Dockerfile deleted file mode 100644 index 90aa8406..00000000 --- a/gateway/dstack-app/Dockerfile +++ /dev/null @@ -1,53 +0,0 @@ -# SPDX-FileCopyrightText: © 2025 Phala Network -# -# SPDX-License-Identifier: Apache-2.0 - -FROM rust:1.86.0@sha256:300ec56abce8cc9448ddea2172747d048ed902a3090e6b57babb2bf19f754081 AS gateway-builder -ARG DSTACK_REV -WORKDIR /src - -# Install build dependencies -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - git \ - build-essential \ - libssl-dev \ - protobuf-compiler \ - libprotobuf-dev \ - libclang-dev \ - && rm -rf /var/lib/apt/lists/* - -# Clone and checkout specific revision -RUN git clone https://github.com/Dstack-TEE/dstack.git && \ - cd dstack && \ - git checkout ${DSTACK_REV} - -# Build the gateway binary -WORKDIR /src/dstack -RUN cargo build --release -p dstack-gateway - -# Runtime stage -FROM debian:bookworm@sha256:ced9eb5eca0a3ba2e29d0045513863b3baaee71cd8c2eed403c9f7d3eaccfd2b -WORKDIR /app - -# Install runtime dependencies -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - wireguard-tools \ - iproute2 \ - jq \ - && rm -rf /var/lib/apt/lists/* - -# Copy the built binary -COPY --from=gateway-builder /src/dstack/target/release/dstack-gateway /usr/local/bin/dstack-gateway - -# Copy entrypoint script -COPY entrypoint.sh /app/entrypoint.sh -RUN chmod +x /app/entrypoint.sh - -# Store git revision for reproducibility -ARG DSTACK_REV -RUN echo "${DSTACK_REV}" > /etc/.GIT_REV - -ENTRYPOINT ["/app/entrypoint.sh"] -CMD ["dstack-gateway"] diff --git a/gateway/dstack-app/builder/Dockerfile b/gateway/dstack-app/builder/Dockerfile index 3e5b9c06..ba5bedb0 100644 --- a/gateway/dstack-app/builder/Dockerfile +++ b/gateway/dstack-app/builder/Dockerfile @@ -34,6 +34,7 @@ RUN ./pin-packages.sh ./pinned-packages.txt && \ wireguard-tools \ iproute2 \ jq \ + ca-certificates \ && rm -rf /var/lib/apt/lists/* /var/log/* /var/cache/ldconfig/aux-cache COPY --from=gateway-builder /build/dstack/target/x86_64-unknown-linux-musl/release/dstack-gateway /usr/local/bin/dstack-gateway COPY --from=gateway-builder /build/.GIT_REV /etc/ @@ -41,4 +42,4 @@ WORKDIR /app COPY entrypoint.sh /app/entrypoint.sh RUN chmod +x /app/entrypoint.sh ENTRYPOINT ["/app/entrypoint.sh"] -CMD ["dstack-gateway"] +CMD ["dstack-gateway", "-c", "/data/gateway/gateway.toml"] From f612be174dae53bd0efc051887e805db5b664d2a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 7 Sep 2025 13:18:29 +0000 Subject: [PATCH 11/77] build(deps): bump undici in /kms/auth-eth Bumps and [undici](https://github.com/nodejs/undici). These dependencies needed to be updated together. Updates `undici` from 5.28.4 to 5.29.0 - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v5.28.4...v5.29.0) Updates `undici` from 6.21.1 to 6.21.3 - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v5.28.4...v5.29.0) --- updated-dependencies: - dependency-name: undici dependency-version: 5.29.0 dependency-type: indirect - dependency-name: undici dependency-version: 6.21.3 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- kms/auth-eth/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/kms/auth-eth/package-lock.json b/kms/auth-eth/package-lock.json index 18003a43..6befa1d6 100644 --- a/kms/auth-eth/package-lock.json +++ b/kms/auth-eth/package-lock.json @@ -3218,9 +3218,9 @@ } }, "node_modules/@openzeppelin/hardhat-upgrades/node_modules/undici": { - "version": "6.21.1", - "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.1.tgz", - "integrity": "sha512-q/1rj5D0/zayJB2FraXdaWxbhWiNKDvu8naDT2dl1yTlvJp4BLtOcp2a5BvgGNQpYYJzau7tf1WgKv3b+7mqpQ==", + "version": "6.21.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz", + "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==", "dev": true, "license": "MIT", "engines": { @@ -13186,9 +13186,9 @@ } }, "node_modules/undici": { - "version": "5.28.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", - "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", "dev": true, "license": "MIT", "dependencies": { From 58fa9d60ce40a874163c06876ebabb26a5b54bf2 Mon Sep 17 00:00:00 2001 From: Leechael Yim Date: Wed, 10 Sep 2025 23:32:35 +0800 Subject: [PATCH 12/77] imp: when formatting app_url, skip port if it's 443 --- vmm/src/app/qemu.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/vmm/src/app/qemu.rs b/vmm/src/app/qemu.rs index b67bd8d0..b6924836 100644 --- a/vmm/src/app/qemu.rs +++ b/vmm/src/app/qemu.rs @@ -165,10 +165,14 @@ impl VmInfo { .then_some(self.instance_id.as_ref()) .flatten() .map(|id| { - format!( - "https://{id}-{}.{}:{}", - gw.agent_port, gw.base_domain, gw.port - ) + if gw.port == 443 { + format!("https://{id}-{}.{}", gw.agent_port, gw.base_domain) + } else { + format!( + "https://{id}-{}.{}:{}", + gw.agent_port, gw.base_domain, gw.port + ) + } }), app_id: self.manifest.app_id.clone(), instance_id: self.instance_id.as_deref().map(Into::into), From 3902b9ab6a2b33723ab9b447049c0671e35ab0e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Sep 2025 15:35:13 +0000 Subject: [PATCH 13/77] build(deps): bump hono from 4.8.5 to 4.9.6 in /kms/auth-mock Bumps [hono](https://github.com/honojs/hono) from 4.8.5 to 4.9.6. - [Release notes](https://github.com/honojs/hono/releases) - [Commits](https://github.com/honojs/hono/compare/v4.8.5...v4.9.6) --- updated-dependencies: - dependency-name: hono dependency-version: 4.9.6 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- kms/auth-mock/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kms/auth-mock/package.json b/kms/auth-mock/package.json index 6493ab83..a2c38999 100644 --- a/kms/auth-mock/package.json +++ b/kms/auth-mock/package.json @@ -15,7 +15,7 @@ "check": "bun run lint && bun run test:run" }, "dependencies": { - "hono": "4.8.5", + "hono": "4.9.6", "@hono/zod-validator": "0.2.2", "zod": "3.25.76" }, From 99361b4a6871766b8a64e528abffc82e2cc9a122 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Sep 2025 21:45:22 +0000 Subject: [PATCH 14/77] build(deps): bump hono from 4.9.6 to 4.9.7 in /kms/auth-eth-bun Bumps [hono](https://github.com/honojs/hono) from 4.9.6 to 4.9.7. - [Release notes](https://github.com/honojs/hono/releases) - [Commits](https://github.com/honojs/hono/compare/v4.9.6...v4.9.7) --- updated-dependencies: - dependency-name: hono dependency-version: 4.9.7 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- kms/auth-eth-bun/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kms/auth-eth-bun/package.json b/kms/auth-eth-bun/package.json index e027a11c..febd26eb 100644 --- a/kms/auth-eth-bun/package.json +++ b/kms/auth-eth-bun/package.json @@ -15,7 +15,7 @@ "check": "bun run lint && bun run test:run" }, "dependencies": { - "hono": "4.9.6", + "hono": "4.9.7", "@hono/zod-validator": "0.2.2", "zod": "3.25.76", "viem": "2.31.7" From 383596d703908ba8daf76914a0e378eab3228a6c Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 15 Sep 2025 06:01:29 +0000 Subject: [PATCH 15/77] Read qemu path from /etc/dstack/client.conf --- Cargo.lock | 24 ++++++++++++++++++++++++ Cargo.toml | 1 + vmm/Cargo.toml | 1 + vmm/src/config.rs | 41 +++++++++++++++++++++++++++++++++++++---- 4 files changed, 63 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b1c8f5da..32034056 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2473,6 +2473,7 @@ dependencies = [ "safe-write", "serde", "serde-human-bytes", + "serde_ini", "serde_json", "sha2 0.10.9", "shared_child", @@ -5693,6 +5694,12 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95325155c684b1c89f7765e30bc1c42e4a6da51ca513615660cb8a62ef9a88e3" +[[package]] +name = "result" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" + [[package]] name = "rfc6979" version = "0.4.0" @@ -6503,6 +6510,17 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "serde_ini" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb236687e2bb073a7521c021949be944641e671b8505a94069ca37b656c81139" +dependencies = [ + "result", + "serde", + "void", +] + [[package]] name = "serde_json" version = "1.0.142" @@ -7617,6 +7635,12 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "void" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" + [[package]] name = "vsock" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 55f8cef2..ec871bee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -115,6 +115,7 @@ scale = { version = "3.7.4", package = "parity-scale-codec", features = [ serde = { version = "1.0.219", features = ["derive"], default-features = false } serde-human-bytes = "0.1.0" serde_json = { version = "1.0.140", default-features = false } +serde_ini = "0.2.0" toml = "0.8.20" toml_edit = { version = "0.22.24", features = ["serde"] } yasna = "0.5.2" diff --git a/vmm/Cargo.toml b/vmm/Cargo.toml index ccdeff85..31e5851f 100644 --- a/vmm/Cargo.toml +++ b/vmm/Cargo.toml @@ -32,6 +32,7 @@ tailf.workspace = true tokio = { workspace = true, features = ["full"] } git-version.workspace = true rocket-apitoken.workspace = true +serde_ini.workspace = true supervisor-client.workspace = true ra-rpc = { workspace = true, features = ["client", "rocket"] } diff --git a/vmm/src/config.rs b/vmm/src/config.rs index 40b06d81..d49312a4 100644 --- a/vmm/src/config.rs +++ b/vmm/src/config.rs @@ -310,6 +310,32 @@ pub struct KeyProviderConfig { pub port: u16, } +const CLIENT_CONF_PATH: &str = "/etc/dstack/client.conf"; +fn read_qemu_path_from_client_conf() -> Option { + #[derive(Debug, Deserialize)] + struct ClientQemuSection { + path: Option, + } + #[derive(Debug, Deserialize)] + struct ClientIniConfig { + qemu: Option, + } + + let raw = fs_err::read_to_string(CLIENT_CONF_PATH).ok()?; + let parsed: ClientIniConfig = serde_ini::from_str(&raw).ok()?; + let path = parsed.qemu?.path?; + let path = path.trim().trim_matches('"').trim_matches('\''); + if path.is_empty() { + return None; + } + let path = PathBuf::from(path); + if path.exists() { + Some(path) + } else { + None + } +} + impl Config { pub fn extract_or_default(figment: &Figment) -> Result { let mut me: Self = figment.extract()?; @@ -323,11 +349,18 @@ impl Config { me.run_path = app_home.join("vm"); } if me.cvm.qemu_path == PathBuf::default() { - let cpu_arch = std::env::consts::ARCH; - let qemu_path = which::which(format!("qemu-system-{}", cpu_arch)) - .context("Failed to find qemu executable")?; - me.cvm.qemu_path = qemu_path; + // Prefer the path from dstack client config if present + if let Some(qemu_path) = read_qemu_path_from_client_conf() { + info!("Found QEMU path from client config: {CLIENT_CONF_PATH:?}"); + me.cvm.qemu_path = qemu_path; + } else { + let cpu_arch = std::env::consts::ARCH; + let qemu_path = which::which(format!("qemu-system-{}", cpu_arch)) + .context("Failed to find qemu executable")?; + me.cvm.qemu_path = qemu_path; + } } + info!("QEMU path: {}", me.cvm.qemu_path.display()); } Ok(me) } From ba4eb54b7e7d6eaf1a1f0d3338c57e2694112479 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 15 Sep 2025 15:43:27 +0800 Subject: [PATCH 16/77] attestation.md: no rootfs hash in RTMR3 --- attestation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/attestation.md b/attestation.md index af7e7837..ddafc835 100644 --- a/attestation.md +++ b/attestation.md @@ -25,7 +25,7 @@ The MR register values indicate the following: - RTMR0: OVMF records CVM's virtual hardware setup, including CPU count, memory size, and device configuration. While dstack uses fixed devices, CPU and memory specifications can vary. RTMR0 can be computed from these specifications. - RTMR1: OVMF records the Linux kernel measurement. - RTMR2: Linux kernel records kernel cmdline (including rootfs hash) and initrd measurements. - - RTMR3: initrd records dstack App details, including compose hash, instance id, app id, rootfs hash, and key provider. + - RTMR3: initrd records dstack App details, including compose hash, instance id, app id, and key provider. MRTD, RTMR0, RTMR1, and RTMR2 can be pre-calculated from the built image (given CPU+RAM specifications). Compare these with the verified quote's MRs to confirm correct base image code execution. From f7e5259cfc5bf5f11c40e2ab7964e6d1f3387285 Mon Sep 17 00:00:00 2001 From: Daniel Sharifi Date: Tue, 16 Sep 2025 21:24:51 +0000 Subject: [PATCH 17/77] bump alloy version --- Cargo.lock | 96 +++++++++++++++++++++++++++++++++++------------------- Cargo.toml | 2 +- 2 files changed, 64 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 32034056..983d5359 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -93,9 +93,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b064bd1cea105e70557a258cd2b317731896753ec08edf51da2d1fced587b05" +checksum = "36f63701831729cb154cf0b6945256af46c426074646c98b9d123148ba1d8bde" dependencies = [ "alloy-core", "alloy-signer", @@ -104,15 +104,16 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32c3f3bc4f2a6b725970cd354e78e9738ea1e8961a91898f57bf6317970b1915" +checksum = "64a3bd0305a44fb457cae77de1e82856eadd42ea3cdf0dae29df32eb3b592979" dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", "alloy-serde", "alloy-trie", + "alloy-tx-macros", "auto_impl", "c-kzg", "derive_more 2.0.1", @@ -128,9 +129,9 @@ dependencies = [ [[package]] name = "alloy-consensus-any" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dda014fb5591b8d8d24cab30f52690117d238e52254c6fb40658e91ea2ccd6c3" +checksum = "7a842b4023f571835e62ac39fb8d523d19fcdbacfa70bf796ff96e7e19586f50" dependencies = [ "alloy-consensus", "alloy-eips", @@ -187,9 +188,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7b2f7010581f29bcace81776cf2f0e022008d05a7d326884763f16f3044620" +checksum = "5cd749c57f38f8cbf433e651179fc5a676255e6b95044f467d49255d2b81725a" dependencies = [ "alloy-eip2124", "alloy-eip2930", @@ -202,7 +203,9 @@ dependencies = [ "derive_more 2.0.1", "either", "serde", + "serde_with", "sha2 0.10.9", + "thiserror 2.0.15", ] [[package]] @@ -219,12 +222,13 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca1e31b50f4ed9a83689ae97263d366b15b935a67c4acb5dd46d5b1c3b27e8e6" +checksum = "f614019a029c8fec14ae661aa7d4302e6e66bdbfb869dab40e78dcfba935fc97" dependencies = [ "alloy-primitives", "alloy-sol-types", + "http", "serde", "serde_json", "thiserror 2.0.15", @@ -233,9 +237,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879afc0f4a528908c8fe6935b2ab0bc07f77221a989186f71583f7592831689e" +checksum = "be8b6d58e98803017bbfea01dde96c4d270a29e7aed3beb65c8d28b5ab464e0e" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -259,9 +263,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec185bac9d32df79c1132558a450d48f6db0bfb5adef417dbb1a0258153f879b" +checksum = "db489617bffe14847bf89f175b1c183e5dd7563ef84713936e2c34255cfbd845" dependencies = [ "alloy-consensus", "alloy-eips", @@ -321,9 +325,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-any" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5a8f1efd77116915dad61092f9ef9295accd0b0b251062390d9c4e81599344" +checksum = "18f27c0c41a16cd0af4f5dbf791f7be2a60502ca8b0e840e0ad29803fac2d587" dependencies = [ "alloy-consensus-any", "alloy-rpc-types-eth", @@ -332,9 +336,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc1323310d87f9d950fb3ff58d943fdf832f5e10e6f902f405c0eaa954ffbaf1" +checksum = "7f5812f81c3131abc2cd8953dc03c41999e180cff7252abbccaba68676e15027" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -347,14 +351,15 @@ dependencies = [ "itertools 0.14.0", "serde", "serde_json", + "serde_with", "thiserror 2.0.15", ] [[package]] name = "alloy-serde" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05ace2ef3da874544c3ffacfd73261cdb1405d8631765deb991436a53ec6069" +checksum = "04dfe41a47805a34b848c83448946ca96f3d36842e8c074bcf8fa0870e337d12" dependencies = [ "alloy-primitives", "serde", @@ -363,9 +368,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67fdabad99ad3c71384867374c60bcd311fc1bb90ea87f5f9c779fd8c7ec36aa" +checksum = "f79237b4c1b0934d5869deea4a54e6f0a7425a8cd943a739d6293afdf893d847" dependencies = [ "alloy-primitives", "async-trait", @@ -378,9 +383,9 @@ dependencies = [ [[package]] name = "alloy-signer-local" -version = "0.15.11" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acb3f4e72378566b189624d54618c8adf07afbcf39d5f368f4486e35a66725b3" +checksum = "d6e90a3858da59d1941f496c17db8d505f643260f7e97cdcdd33823ddca48fc1" dependencies = [ "alloy-consensus", "alloy-network", @@ -464,9 +469,9 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.8.1" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "983d99aa81f586cef9dae38443245e585840fcf0fc58b09aee0b1f27aed1d500" +checksum = "e3412d52bb97c6c6cc27ccc28d4e6e8cf605469101193b50b0bd5813b1f990b5" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -478,6 +483,19 @@ dependencies = [ "tracing", ] +[[package]] +name = "alloy-tx-macros" +version = "1.0.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e434e0917dce890f755ea774f59d6f12557bc8c7dd9fa06456af80cfe0f0181e" +dependencies = [ + "alloy-primitives", + "darling 0.21.2", + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -1816,6 +1834,7 @@ dependencies = [ "ident_case", "proc-macro2", "quote", + "serde", "strsim", "syn 2.0.106", ] @@ -4539,13 +4558,14 @@ dependencies = [ [[package]] name = "nybbles" -version = "0.3.4" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8983bb634df7248924ee0c4c3a749609b5abcb082c28fffe3254b3eb3602b307" +checksum = "f0418987d1aaed324d95b4beffc93635e19be965ed5d63ec07a35980fe3b71a4" dependencies = [ "alloy-rlp", - "const-hex", + "cfg-if", "proptest", + "ruint", "serde", "smallvec", ] @@ -6457,10 +6477,11 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.219" +version = "1.0.225" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "fd6c24dee235d0da097043389623fb913daddf92c76e9f5a1db88607a0bcbd1d" dependencies = [ + "serde_core", "serde_derive", ] @@ -6499,11 +6520,20 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_core" +version = "1.0.225" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "659356f9a0cb1e529b24c01e43ad2bdf520ec4ceaf83047b83ddcc2251f96383" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.225" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "0ea936adf78b1f766949a4977b91d2f5595825bd6ec079aa9543ad2685fc4516" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index ec871bee..08c1cfd0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -172,7 +172,7 @@ k256 = "0.13.4" xsalsa20poly1305 = "0.9.0" salsa20 = "0.10" rand_core = "0.6.4" -alloy = { version = "0.15", default-features = false } +alloy = { version = "1.0.32", default-features = false } # Certificate/DNS hickory-resolver = "0.24.4" From 5e9a551bc27d3b166f2e1c144d9914b04812d2e7 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 17 Sep 2025 07:14:24 +0000 Subject: [PATCH 18/77] rust-sdk v0.1.1 --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- sdk/rust/Cargo.toml | 2 +- sdk/rust/types/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 983d5359..df58f866 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2372,7 +2372,7 @@ dependencies = [ [[package]] name = "dstack-sdk" -version = "0.1.0" +version = "0.1.1" dependencies = [ "alloy", "anyhow", @@ -2392,7 +2392,7 @@ dependencies = [ [[package]] name = "dstack-sdk-types" -version = "0.1.0" +version = "0.1.1" dependencies = [ "anyhow", "bon", diff --git a/Cargo.toml b/Cargo.toml index 08c1cfd0..074bc454 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,7 +57,7 @@ resolver = "2" # Internal dependencies ra-rpc = { path = "ra-rpc", default-features = false } ra-tls = { path = "ra-tls" } -dstack-sdk-types = { path = "sdk/rust/types", version = "0.1.0", default-features = false } +dstack-sdk-types = { path = "sdk/rust/types", version = "0.1.1", default-features = false } dstack-gateway-rpc = { path = "gateway/rpc" } dstack-kms-rpc = { path = "kms/rpc" } dstack-guest-agent-rpc = { path = "guest-agent/rpc" } diff --git a/sdk/rust/Cargo.toml b/sdk/rust/Cargo.toml index 961af1e6..ba0671e2 100644 --- a/sdk/rust/Cargo.toml +++ b/sdk/rust/Cargo.toml @@ -7,7 +7,7 @@ [package] name = "dstack-sdk" -version = "0.1.0" +version = "0.1.1" edition = "2021" license = "MIT" description = "This crate provides a rust client for communicating with dstack" diff --git a/sdk/rust/types/Cargo.toml b/sdk/rust/types/Cargo.toml index 1fd355ed..a22713c2 100644 --- a/sdk/rust/types/Cargo.toml +++ b/sdk/rust/types/Cargo.toml @@ -5,7 +5,7 @@ [package] name = "dstack-sdk-types" -version = "0.1.0" +version = "0.1.1" edition = "2021" license = "MIT" description = "This crate provides rust types for communication with dstack" From 69785db1fe4cade2027baaad715740c9e7d98ea8 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 17 Sep 2025 15:08:26 +0000 Subject: [PATCH 19/77] Add init_script in app-compose.json --- basefiles/dstack-prepare.sh | 8 ++++++++ docs/security-guide/cvm-boundaries.md | 4 +++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/basefiles/dstack-prepare.sh b/basefiles/dstack-prepare.sh index cd64f79e..dfa92b9b 100755 --- a/basefiles/dstack-prepare.sh +++ b/basefiles/dstack-prepare.sh @@ -45,3 +45,11 @@ mkdir -p $DATA_MNT/var/lib/docker mount --rbind $DATA_MNT/var/lib/docker /var/lib/docker mount --rbind $WORK_DIR /dstack mount_overlay /etc/users $OVERLAY_PERSIST + +cd /dstack + +if [ $(jq 'has("init_script")' app-compose.json) == true ]; then + echo "Running init script" + dstack-util notify-host -e "boot.progress" -d "init-script" || true + source <(jq -r '.init_script' app-compose.json) +fi diff --git a/docs/security-guide/cvm-boundaries.md b/docs/security-guide/cvm-boundaries.md index 433dee40..1095e5da 100644 --- a/docs/security-guide/cvm-boundaries.md +++ b/docs/security-guide/cvm-boundaries.md @@ -39,7 +39,9 @@ This is the main configuration file for the application in JSON format: | allowed_envs | array of string | List of allowed environment variable names | | no_instance_id | boolean | Disable instance ID generation | | secure_time | boolean | Whether secure time is enabled | -| pre_launch_script | string | Prelaunch bash script that runs before starting containers | +| pre_launch_script | string | Prelaunch bash script that runs before execute `docker compose up` | +| init_script | string | Bash script that executed prior to dockerd startup | + The hash of this file content is extended to RTMR3 as event name `compose-hash`. Remote verifier can extract the compose-hash during remote attestation. From d380a0295384d5dcfb155e56b1c4608df5052f70 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Fri, 19 Sep 2025 10:48:05 +0000 Subject: [PATCH 20/77] Revert the cert subject changes Part of fccd83de272ced9a8d5a4b9cead2d8ce8e9e7764 --- kms/src/main_service.rs | 4 ++-- kms/src/onboard_service.rs | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/kms/src/main_service.rs b/kms/src/main_service.rs index f7e50b1d..8d8a49b9 100644 --- a/kms/src/main_service.rs +++ b/kms/src/main_service.rs @@ -481,8 +481,8 @@ impl RpcHandler { .context("Failed to derive app disk key")?; let req = CertRequest::builder() .key(&app_key) - .org_name("dstack") - .subject("dstack App CA") + .org_name("Dstack") + .subject("Dstack App CA") .ca_level(0) .app_id(app_id) .special_usage("app:ca") diff --git a/kms/src/onboard_service.rs b/kms/src/onboard_service.rs index a92c4cee..ffe38f16 100644 --- a/kms/src/onboard_service.rs +++ b/kms/src/onboard_service.rs @@ -128,8 +128,8 @@ impl Keys { quote_enabled: bool, ) -> Result { let tmp_ca_cert = CertRequest::builder() - .org_name("dstack") - .subject("dstack Client Temp CA") + .org_name("Dstack") + .subject("Dstack Client Temp CA") .ca_level(0) .key(&tmp_ca_key) .build() @@ -137,8 +137,8 @@ impl Keys { // Create self-signed KMS cert let ca_cert = CertRequest::builder() - .org_name("dstack") - .subject("dstack KMS CA") + .org_name("Dstack") + .subject("Dstack KMS CA") .ca_level(1) .key(&ca_key) .build() From 0da9839910bd1b292d5372be0039ee8e60756cfd Mon Sep 17 00:00:00 2001 From: Leechael Yim Date: Fri, 19 Sep 2025 22:59:48 +0800 Subject: [PATCH 21/77] imp(sdk/python): increase the default timeout to 3 secs for Python SDK. --- sdk/python/src/dstack_sdk/dstack_client.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/sdk/python/src/dstack_sdk/dstack_client.py b/sdk/python/src/dstack_sdk/dstack_client.py index 31411fe2..7f23f11f 100644 --- a/sdk/python/src/dstack_sdk/dstack_client.py +++ b/sdk/python/src/dstack_sdk/dstack_client.py @@ -226,7 +226,7 @@ class BaseClient: class AsyncDstackClient(BaseClient): PATH_PREFIX = "/" - def __init__(self, endpoint: str | None = None, use_sync_http: bool = False): + def __init__(self, endpoint: str | None = None, *, use_sync_http: bool = False, timeout: float = 3): """Initialize async client with HTTP or Unix-socket transport. Args: @@ -239,6 +239,7 @@ def __init__(self, endpoint: str | None = None, use_sync_http: bool = False): self._client: Optional[httpx.AsyncClient] = None self._sync_client: Optional[httpx.Client] = None self._client_ref_count = 0 + self._timeout = timeout if endpoint.startswith("http://") or endpoint.startswith("https://"): self.async_transport = httpx.AsyncHTTPTransport() @@ -255,14 +256,14 @@ def __init__(self, endpoint: str | None = None, use_sync_http: bool = False): def _get_client(self) -> httpx.AsyncClient: if self._client is None: self._client = httpx.AsyncClient( - transport=self.async_transport, base_url=self.base_url, timeout=0.5 + transport=self.async_transport, base_url=self.base_url, timeout=self._timeout ) return self._client def _get_sync_client(self) -> httpx.Client: if self._sync_client is None: self._sync_client = httpx.Client( - transport=self.sync_transport, base_url=self.base_url, timeout=0.5 + transport=self.sync_transport, base_url=self.base_url, timeout=self._timeout ) return self._sync_client @@ -392,13 +393,13 @@ async def is_reachable(self) -> bool: class DstackClient(BaseClient): PATH_PREFIX = "/" - def __init__(self, endpoint: str | None = None): + def __init__(self, endpoint: str | None = None, *, timeout: float = 3): """Initialize client with HTTP or Unix-socket transport. If a non-HTTP(S) endpoint is provided, it is treated as a Unix socket path and validated for existence. """ - self.async_client = AsyncDstackClient(endpoint, use_sync_http=True) + self.async_client = AsyncDstackClient(endpoint, use_sync_http=True, timeout=timeout) @call_async def get_key( @@ -463,7 +464,7 @@ class AsyncTappdClient(AsyncDstackClient): DEPRECATED: Use ``AsyncDstackClient`` instead. """ - def __init__(self, endpoint: str | None = None, use_sync_http: bool = False): + def __init__(self, endpoint: str | None = None, *, use_sync_http: bool = False, timeout: float = 3): """Initialize deprecated async tappd client wrapper.""" if not use_sync_http: # Already warned in TappdClient.__init__ @@ -472,7 +473,7 @@ def __init__(self, endpoint: str | None = None, use_sync_http: bool = False): ) endpoint = get_tappd_endpoint(endpoint) - super().__init__(endpoint, use_sync_http=use_sync_http) + super().__init__(endpoint, use_sync_http=use_sync_http, timeout=timeout) # Set the correct path prefix for tappd self.PATH_PREFIX = "/prpc/Tappd." @@ -542,13 +543,13 @@ class TappdClient(DstackClient): DEPRECATED: Use ``DstackClient`` instead. """ - def __init__(self, endpoint: str | None = None): + def __init__(self, endpoint: str | None = None, timeout: float = 3): """Initialize deprecated tappd client wrapper.""" emit_deprecation_warning( "TappdClient is deprecated, please use DstackClient instead" ) endpoint = get_tappd_endpoint(endpoint) - self.async_client = AsyncTappdClient(endpoint, use_sync_http=True) + self.async_client = AsyncTappdClient(endpoint, use_sync_http=True, timeout=timeout) @call_async def derive_key( From 3f56bc3914753430904f32d9c1bdde69753cdd23 Mon Sep 17 00:00:00 2001 From: Leechael Yim Date: Fri, 19 Sep 2025 23:01:50 +0800 Subject: [PATCH 22/77] fix(sdk): marked roofs_hash optional since it not returns from API anymore. --- sdk/js/src/index.ts | 2 +- sdk/python/src/dstack_sdk/dstack_client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/js/src/index.ts b/sdk/js/src/index.ts index d30d295f..d08357ea 100644 --- a/sdk/js/src/index.ts +++ b/sdk/js/src/index.ts @@ -49,7 +49,7 @@ export interface TcbInfo { } export type TcbInfoV03x = TcbInfo & { - rootfs_hash: string + rootfs_hash?: string } export type TcbInfoV05x = TcbInfo & { diff --git a/sdk/python/src/dstack_sdk/dstack_client.py b/sdk/python/src/dstack_sdk/dstack_client.py index 7f23f11f..ff9cd596 100644 --- a/sdk/python/src/dstack_sdk/dstack_client.py +++ b/sdk/python/src/dstack_sdk/dstack_client.py @@ -173,7 +173,7 @@ class TcbInfo(BaseModel): class TcbInfoV03x(TcbInfo): """TCB information for dstack OS version 0.3.x.""" - rootfs_hash: str + rootfs_hash: Optional[str] = None class TcbInfoV05x(TcbInfo): From 70ce94f86d087c3245a07bbd14ef0eaa096adf46 Mon Sep 17 00:00:00 2001 From: Leechael Yim Date: Fri, 19 Sep 2025 23:24:19 +0800 Subject: [PATCH 23/77] chore(sdk): bump versions --- sdk/js/package.json | 2 +- sdk/js/src/send-rpc-request.ts | 2 +- sdk/python/pyproject.toml | 2 +- sdk/python/src/dstack_sdk/dstack_client.py | 35 +++++++++++++++++----- 4 files changed, 31 insertions(+), 10 deletions(-) diff --git a/sdk/js/package.json b/sdk/js/package.json index ce7f6a2b..e2e057de 100644 --- a/sdk/js/package.json +++ b/sdk/js/package.json @@ -1,6 +1,6 @@ { "name": "@phala/dstack-sdk", - "version": "0.5.5", + "version": "0.5.6", "description": "dstack SDK", "main": "dist/node/index.js", "types": "dist/node/index.d.ts", diff --git a/sdk/js/src/send-rpc-request.ts b/sdk/js/src/send-rpc-request.ts index 6c17c344..fa6837a1 100644 --- a/sdk/js/src/send-rpc-request.ts +++ b/sdk/js/src/send-rpc-request.ts @@ -6,7 +6,7 @@ import http from 'http' import https from 'https' import net from 'net' -export const __version__ = "0.5.0" +export const __version__ = "0.5.6" export function send_rpc_request(endpoint: string, path: string, payload: string, timeoutMs?: number): Promise { diff --git a/sdk/python/pyproject.toml b/sdk/python/pyproject.toml index 266c1ac2..96d1a0e8 100644 --- a/sdk/python/pyproject.toml +++ b/sdk/python/pyproject.toml @@ -4,7 +4,7 @@ [project] name = "dstack-sdk" -version = "0.5.1" +version = "0.5.2" description = "dstack SDK for Python" authors = [ {name = "Leechael Yim", email = "yanleech@gmail.com"}, diff --git a/sdk/python/src/dstack_sdk/dstack_client.py b/sdk/python/src/dstack_sdk/dstack_client.py index ff9cd596..463aab47 100644 --- a/sdk/python/src/dstack_sdk/dstack_client.py +++ b/sdk/python/src/dstack_sdk/dstack_client.py @@ -23,7 +23,7 @@ logger = logging.getLogger("dstack_sdk") -__version__ = "0.2.0" +__version__ = "0.5.2" INIT_MR = "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" @@ -226,12 +226,19 @@ class BaseClient: class AsyncDstackClient(BaseClient): PATH_PREFIX = "/" - def __init__(self, endpoint: str | None = None, *, use_sync_http: bool = False, timeout: float = 3): + def __init__( + self, + endpoint: str | None = None, + *, + use_sync_http: bool = False, + timeout: float = 3, + ): """Initialize async client with HTTP or Unix-socket transport. Args: endpoint: HTTP/HTTPS URL or Unix socket path use_sync_http: If True, use sync HTTP client internally + timeout: Timeout in seconds """ endpoint = get_endpoint(endpoint) @@ -256,14 +263,18 @@ def __init__(self, endpoint: str | None = None, *, use_sync_http: bool = False, def _get_client(self) -> httpx.AsyncClient: if self._client is None: self._client = httpx.AsyncClient( - transport=self.async_transport, base_url=self.base_url, timeout=self._timeout + transport=self.async_transport, + base_url=self.base_url, + timeout=self._timeout, ) return self._client def _get_sync_client(self) -> httpx.Client: if self._sync_client is None: self._sync_client = httpx.Client( - transport=self.sync_transport, base_url=self.base_url, timeout=self._timeout + transport=self.sync_transport, + base_url=self.base_url, + timeout=self._timeout, ) return self._sync_client @@ -399,7 +410,9 @@ def __init__(self, endpoint: str | None = None, *, timeout: float = 3): If a non-HTTP(S) endpoint is provided, it is treated as a Unix socket path and validated for existence. """ - self.async_client = AsyncDstackClient(endpoint, use_sync_http=True, timeout=timeout) + self.async_client = AsyncDstackClient( + endpoint, use_sync_http=True, timeout=timeout + ) @call_async def get_key( @@ -464,7 +477,13 @@ class AsyncTappdClient(AsyncDstackClient): DEPRECATED: Use ``AsyncDstackClient`` instead. """ - def __init__(self, endpoint: str | None = None, *, use_sync_http: bool = False, timeout: float = 3): + def __init__( + self, + endpoint: str | None = None, + *, + use_sync_http: bool = False, + timeout: float = 3, + ): """Initialize deprecated async tappd client wrapper.""" if not use_sync_http: # Already warned in TappdClient.__init__ @@ -549,7 +568,9 @@ def __init__(self, endpoint: str | None = None, timeout: float = 3): "TappdClient is deprecated, please use DstackClient instead" ) endpoint = get_tappd_endpoint(endpoint) - self.async_client = AsyncTappdClient(endpoint, use_sync_http=True, timeout=timeout) + self.async_client = AsyncTappdClient( + endpoint, use_sync_http=True, timeout=timeout + ) @call_async def derive_key( From 8d3d7145755b3fc08330281c41218344eb629ebb Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 22 Sep 2025 03:33:56 +0000 Subject: [PATCH 24/77] dstack-mr: Add qemu_version in VmConfig --- dstack-mr/src/acpi.rs | 12 +++- dstack-mr/src/machine.rs | 54 +++++++++++++++- dstack-mr/src/tdvf.rs | 5 +- dstack-types/src/lib.rs | 7 +-- kms/src/main_service.rs | 5 +- vmm/src/app.rs | 1 + vmm/src/config.rs | 132 +++++++++++++++++++++++++++++++++++++-- vmm/src/one_shot.rs | 1 + vmm/vmm.toml | 5 +- 9 files changed, 205 insertions(+), 17 deletions(-) diff --git a/dstack-mr/src/acpi.rs b/dstack-mr/src/acpi.rs index b79a6301..b61337a1 100644 --- a/dstack-mr/src/acpi.rs +++ b/dstack-mr/src/acpi.rs @@ -85,7 +85,12 @@ impl Machine<'_> { } else { machine.push_str(",smm=off"); } - if self.pic { + + let vopt = self + .versioned_options() + .context("Failed to get versioned options")?; + + if vopt.pic { machine.push_str(",pic=on"); } else { machine.push_str(",pic=off"); @@ -148,8 +153,13 @@ impl Machine<'_> { debug!("qemu command: {cmd:?}"); + let ver = vopt.version; // Execute the command and capture output let output = cmd + .env( + "QEMU_ACPI_COMPAT_VER", + format!("{}.{}.{}", ver.0, ver.1, ver.2), + ) .output() .context("failed to execute dstack-acpi-tables")?; diff --git a/dstack-mr/src/machine.rs b/dstack-mr/src/machine.rs index 27a63a0f..2fe0f5c2 100644 --- a/dstack-mr/src/machine.rs +++ b/dstack-mr/src/machine.rs @@ -6,7 +6,7 @@ use crate::tdvf::Tdvf; use crate::util::debug_print_log; use crate::{kernel, TdxMeasurements}; use crate::{measure_log, measure_sha384}; -use anyhow::{Context, Result}; +use anyhow::{bail, Context, Result}; use fs_err as fs; use log::debug; @@ -18,8 +18,9 @@ pub struct Machine<'a> { pub kernel: &'a str, pub initrd: &'a str, pub kernel_cmdline: &'a str, - pub two_pass_add_pages: bool, - pub pic: bool, + pub two_pass_add_pages: Option, + pub pic: Option, + pub qemu_version: Option, #[builder(default = false)] pub smm: bool, pub pci_hole64_size: Option, @@ -30,6 +31,53 @@ pub struct Machine<'a> { pub root_verity: bool, } +fn parse_version_tuple(v: &str) -> Result<(u32, u32, u32)> { + let parts: Vec = v + .split('.') + .map(|p| p.parse::().context("Invalid version number")) + .collect::, _>>()?; + if parts.len() != 3 { + bail!( + "Version string must have exactly 3 parts (major.minor.patch), got {}", + parts.len() + ); + } + Ok((parts[0], parts[1], parts[2])) +} + +impl Machine<'_> { + pub fn versioned_options(&self) -> Result { + let version = match &self.qemu_version { + Some(v) => Some(parse_version_tuple(v).context("Failed to parse QEMU version")?), + None => None, + }; + let default_pic; + let default_two_pass; + let version = version.unwrap_or((9, 1, 0)); + if version < (8, 0, 0) { + bail!("Unsupported QEMU version: {version:?}"); + } + if ((8, 0, 0)..(9, 0, 0)).contains(&version) { + default_pic = true; + default_two_pass = true; + } else { + default_pic = false; + default_two_pass = false; + }; + Ok(VersionedOptions { + version, + pic: self.pic.unwrap_or(default_pic), + two_pass_add_pages: self.two_pass_add_pages.unwrap_or(default_two_pass), + }) + } +} + +pub struct VersionedOptions { + pub version: (u32, u32, u32), + pub pic: bool, + pub two_pass_add_pages: bool, +} + impl Machine<'_> { pub fn measure(&self) -> Result { debug!("measuring machine: {self:#?}"); diff --git a/dstack-mr/src/tdvf.rs b/dstack-mr/src/tdvf.rs index e04edcb3..8f02487b 100644 --- a/dstack-mr/src/tdvf.rs +++ b/dstack-mr/src/tdvf.rs @@ -223,7 +223,10 @@ impl<'a> Tdvf<'a> { } pub fn mrtd(&self, machine: &Machine) -> Result> { - self.compute_mrtd(if machine.two_pass_add_pages { + let opts = machine + .versioned_options() + .context("Failed to get versioned options")?; + self.compute_mrtd(if opts.two_pass_add_pages { PageAddOrder::TwoPass } else { PageAddOrder::SinglePass diff --git a/dstack-types/src/lib.rs b/dstack-types/src/lib.rs index 15183248..145d7ba6 100644 --- a/dstack-types/src/lib.rs +++ b/dstack-types/src/lib.rs @@ -138,10 +138,9 @@ pub struct VmConfig { pub cpu_count: u32, pub memory_size: u64, // https://github.com/intel-staging/qemu-tdx/issues/1 - #[serde(default)] - pub qemu_single_pass_add_pages: bool, - #[serde(default)] - pub pic: bool, + pub qemu_single_pass_add_pages: Option, + pub pic: Option, + pub qemu_version: Option, #[serde(default)] pub pci_hole64_size: u64, #[serde(default)] diff --git a/kms/src/main_service.rs b/kms/src/main_service.rs index f7e50b1d..ff9881e8 100644 --- a/kms/src/main_service.rs +++ b/kms/src/main_service.rs @@ -268,8 +268,9 @@ impl RpcHandler { .kernel_cmdline(&kernel_cmdline) .root_verity(true) .hotplug_off(vm_config.hotplug_off) - .two_pass_add_pages(vm_config.qemu_single_pass_add_pages) - .pic(vm_config.pic) + .maybe_two_pass_add_pages(vm_config.qemu_single_pass_add_pages) + .maybe_pic(vm_config.pic) + .maybe_qemu_version(vm_config.qemu_version.clone()) .maybe_pci_hole64_size(if vm_config.pci_hole64_size > 0 { Some(vm_config.pci_hole64_size) } else { diff --git a/vmm/src/app.rs b/vmm/src/app.rs index 133f823a..a9b6077a 100644 --- a/vmm/src/app.rs +++ b/vmm/src/app.rs @@ -513,6 +513,7 @@ impl App { memory_size: manifest.memory as u64 * 1024 * 1024, qemu_single_pass_add_pages: cfg.cvm.qemu_single_pass_add_pages, pic: cfg.cvm.qemu_pic, + qemu_version: cfg.cvm.qemu_version.clone(), pci_hole64_size: cfg.cvm.qemu_pci_hole64_size, hugepages: manifest.hugepages, num_gpus: gpus.gpus.len() as u32, diff --git a/vmm/src/config.rs b/vmm/src/config.rs index d49312a4..863e30f9 100644 --- a/vmm/src/config.rs +++ b/vmm/src/config.rs @@ -2,7 +2,7 @@ // // SPDX-License-Identifier: Apache-2.0 -use std::{net::IpAddr, path::PathBuf, str::FromStr}; +use std::{net::IpAddr, path::PathBuf, process::Command, str::FromStr}; use anyhow::{bail, Context, Result}; use load_config::load_config; @@ -11,9 +11,69 @@ use rocket::figment::Figment; use serde::{Deserialize, Serialize}; use lspci::{lspci_filtered, Device}; -use tracing::info; +use tracing::{info, warn}; pub const DEFAULT_CONFIG: &str = include_str!("../vmm.toml"); + +fn detect_qemu_version(qemu_path: &PathBuf) -> Result { + let output = Command::new(qemu_path) + .arg("--version") + .output() + .context("Failed to execute qemu --version")?; + + if !output.status.success() { + bail!("QEMU version command failed with status: {}", output.status); + } + + let version_output = + String::from_utf8(output.stdout).context("QEMU version output is not valid UTF-8")?; + + parse_qemu_version_from_output(&version_output) + .context("Could not parse QEMU version from output") +} + +fn parse_qemu_version_from_output(output: &str) -> Result { + // Parse version from output like: + // "QEMU emulator version 8.2.2 (Debian 2:8.2.2+ds-0ubuntu1.4+tdx1.0)" + // "QEMU emulator version 9.1.0" + let version = output + .lines() + .next() + .and_then(|line| { + let words: Vec<&str> = line.split_whitespace().collect(); + + // First try: Look for "version" keyword and get the next word (only if it looks like a version) + if let Some(version_idx) = words.iter().position(|&word| word == "version") { + if let Some(next_word) = words.get(version_idx + 1) { + // Only use the word after "version" if it looks like a version number + if next_word.chars().next().is_some_and(|c| c.is_ascii_digit()) + && (next_word.contains('.') + || next_word.chars().all(|c| c.is_ascii_digit() || c == '-')) + { + return Some(*next_word); + } + } + } + + // Fallback: find first word that looks like a version number + words + .iter() + .find(|word| { + // Check if word starts with digit and contains dots (version-like) + word.chars().next().is_some_and(|c| c.is_ascii_digit()) + && (word.contains('.') + || word.chars().all(|c| c.is_ascii_digit() || c == '-')) + }) + .copied() + }) + .context("Could not parse QEMU version from output")?; + + // Extract just the version number (e.g., "8.2.2" from "8.2.2+ds-0ubuntu1.4+tdx1.0") + let clean_version = version.split('+').next().unwrap_or(version).to_string(); + + Ok(clean_version) +} + pub fn load_config_figment(config_file: Option<&str>) -> Figment { load_config("vmm", DEFAULT_CONFIG, config_file, false) } @@ -127,9 +187,11 @@ pub struct CvmConfig { pub use_mrconfigid: bool, /// QEMU single pass add page - pub qemu_single_pass_add_pages: bool, + pub qemu_single_pass_add_pages: Option, /// QEMU pic - pub qemu_pic: bool, + pub qemu_pic: Option, + /// QEMU qemu_version + pub qemu_version: Option, /// QEMU pci_hole64_size pub qemu_pci_hole64_size: u64, /// QEMU hotplug_off @@ -361,7 +423,69 @@ impl Config { } } info!("QEMU path: {}", me.cvm.qemu_path.display()); + + // Detect QEMU version if not already set + match &me.cvm.qemu_version { + None => match detect_qemu_version(&me.cvm.qemu_path) { + Ok(version) => { + info!("Detected QEMU version: {version}"); + me.cvm.qemu_version = Some(version); + } + Err(e) => { + warn!("Failed to detect QEMU version: {e}"); + // Continue without version - the system will use defaults + } + }, + Some(version) => info!("Configured QEMU version: {version}"), + } } Ok(me) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_qemu_version_debian_format() { + let output = "QEMU emulator version 8.2.2 (Debian 2:8.2.2+ds-0ubuntu1.4+tdx1.0)\nCopyright (c) 2003-2023 Fabrice Bellard and the QEMU Project developers"; + let version = parse_qemu_version_from_output(output).unwrap(); + assert_eq!(version, "8.2.2"); + } + + #[test] + fn test_parse_qemu_version_simple_format() { + let output = "QEMU emulator version 9.1.0\nCopyright (c) 2003-2024 Fabrice Bellard and the QEMU Project developers"; + let version = parse_qemu_version_from_output(output).unwrap(); + assert_eq!(version, "9.1.0"); + } + + #[test] + fn test_parse_qemu_version_old_debian_format() { + let output = "QEMU emulator version 8.2.2 (Debian 1:8.2.2+ds-0ubuntu1.2)\nCopyright (c) 2003-2023 Fabrice Bellard and the QEMU Project developers"; + let version = parse_qemu_version_from_output(output).unwrap(); + assert_eq!(version, "8.2.2"); + } + + #[test] + fn test_parse_qemu_version_with_rc() { + let output = "QEMU emulator version 9.0.0-rc1\nCopyright (c) 2003-2024 Fabrice Bellard and the QEMU Project developers"; + let version = parse_qemu_version_from_output(output).unwrap(); + assert_eq!(version, "9.0.0-rc1"); + } + + #[test] + fn test_parse_qemu_version_fallback() { + let output = "Some unusual format 8.1.5 with version info"; + let version = parse_qemu_version_from_output(output).unwrap(); + assert_eq!(version, "8.1.5"); + } + + #[test] + fn test_parse_qemu_version_invalid() { + let output = "No version information here"; + let result = parse_qemu_version_from_output(output); + assert!(result.is_err()); + } +} diff --git a/vmm/src/one_shot.rs b/vmm/src/one_shot.rs index b51378e6..296f5df3 100644 --- a/vmm/src/one_shot.rs +++ b/vmm/src/one_shot.rs @@ -261,6 +261,7 @@ Compose file content (first 200 chars): memory_size: manifest.memory as u64 * 1024 * 1024, qemu_single_pass_add_pages: config.cvm.qemu_single_pass_add_pages, pic: config.cvm.qemu_pic, + qemu_version: config.cvm.qemu_version.clone(), pci_hole64_size: config.cvm.qemu_pci_hole64_size, hugepages: manifest.hugepages, num_gpus: manifest.gpus.as_ref().map_or(0, |g| g.gpus.len() as u32), diff --git a/vmm/vmm.toml b/vmm/vmm.toml index 4058ed74..568c2b2a 100644 --- a/vmm/vmm.toml +++ b/vmm/vmm.toml @@ -30,8 +30,9 @@ user = "" use_mrconfigid = true # QEMU flags -qemu_single_pass_add_pages = false -qemu_pic = true +#qemu_single_pass_add_pages = false +#qemu_pic = true +#qemu_version = "" qemu_pci_hole64_size = 0 qemu_hotplug_off = false From 4b4f55b968bdf096a9cc40438553a64a0f33c6a0 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 22 Sep 2025 08:50:34 +0000 Subject: [PATCH 25/77] Add image name in VmConfig --- dstack-types/src/lib.rs | 1 + vmm/src/app.rs | 1 + vmm/src/one_shot.rs | 1 + 3 files changed, 3 insertions(+) diff --git a/dstack-types/src/lib.rs b/dstack-types/src/lib.rs index 145d7ba6..439d9905 100644 --- a/dstack-types/src/lib.rs +++ b/dstack-types/src/lib.rs @@ -151,6 +151,7 @@ pub struct VmConfig { pub num_nvswitches: u32, #[serde(default)] pub hotplug_off: bool, + pub image: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] diff --git a/vmm/src/app.rs b/vmm/src/app.rs index a9b6077a..9a344e48 100644 --- a/vmm/src/app.rs +++ b/vmm/src/app.rs @@ -519,6 +519,7 @@ impl App { num_gpus: gpus.gpus.len() as u32, num_nvswitches: gpus.bridges.len() as u32, hotplug_off: cfg.cvm.qemu_hotplug_off, + image: Some(manifest.image.clone()), })?; json!({ "kms_urls": kms_urls, diff --git a/vmm/src/one_shot.rs b/vmm/src/one_shot.rs index 296f5df3..a377b307 100644 --- a/vmm/src/one_shot.rs +++ b/vmm/src/one_shot.rs @@ -267,6 +267,7 @@ Compose file content (first 200 chars): num_gpus: manifest.gpus.as_ref().map_or(0, |g| g.gpus.len() as u32), num_nvswitches: manifest.gpus.as_ref().map_or(0, |g| g.bridges.len() as u32), hotplug_off: config.cvm.qemu_hotplug_off, + image: Some(manifest.image.clone()), })? }); let sys_config_path = vm_work_dir.shared_dir().join(".sys-config.json"); From d3499412d721b83c25baba5e440785b3f0df3c8e Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 22 Sep 2025 08:49:45 +0000 Subject: [PATCH 26/77] Returns event log in dstack-mr --- dstack-mr/src/acpi.rs | 1 + dstack-mr/src/kernel.rs | 12 +++++------ dstack-mr/src/lib.rs | 5 ++++- dstack-mr/src/machine.rs | 41 +++++++++++++++++++++++++++++------- dstack-mr/src/tdvf.rs | 45 +++++++++++++++++++++++----------------- 5 files changed, 69 insertions(+), 35 deletions(-) diff --git a/dstack-mr/src/acpi.rs b/dstack-mr/src/acpi.rs index b61337a1..a93f30e1 100644 --- a/dstack-mr/src/acpi.rs +++ b/dstack-mr/src/acpi.rs @@ -13,6 +13,7 @@ use crate::Machine; const LDR_LENGTH: usize = 4096; const FIXED_STRING_LEN: usize = 56; +#[derive(Debug, Clone)] pub struct Tables { pub tables: Vec, pub rsdp: Vec, diff --git a/dstack-mr/src/kernel.rs b/dstack-mr/src/kernel.rs index 1f714ee0..9fd465e6 100644 --- a/dstack-mr/src/kernel.rs +++ b/dstack-mr/src/kernel.rs @@ -2,7 +2,7 @@ // // SPDX-License-Identifier: Apache-2.0 -use crate::{measure_log, measure_sha384, num::read_le, utf16_encode, util::debug_print_log}; +use crate::{measure_sha384, num::read_le, utf16_encode}; use anyhow::{bail, Context, Result}; use object::pe; use sha2::{Digest, Sha384}; @@ -201,24 +201,22 @@ fn patch_kernel( } /// Measures a QEMU-patched TDX kernel image. -pub(crate) fn measure_kernel( +pub(crate) fn rtmr1_log( kernel_data: &[u8], initrd_size: u32, mem_size: u64, acpi_data_size: u32, -) -> Result> { +) -> Result>> { let kd = patch_kernel(kernel_data, initrd_size, mem_size, acpi_data_size) .context("Failed to patch kernel")?; let kernel_hash = authenticode_sha384_hash(&kd).context("Failed to compute kernel hash")?; - let rtmr1_log = vec![ + Ok(vec![ kernel_hash, measure_sha384(b"Calling EFI Application from Boot Option"), measure_sha384(&[0x00, 0x00, 0x00, 0x00]), // Separator measure_sha384(b"Exit Boot Services Invocation"), measure_sha384(b"Exit Boot Services Returned with Success"), - ]; - debug_print_log("RTMR1", &rtmr1_log); - Ok(measure_log(&rtmr1_log)) + ]) } /// Measures the kernel command line by converting to UTF-16LE and hashing. diff --git a/dstack-mr/src/lib.rs b/dstack-mr/src/lib.rs index 936e283f..a8d5825e 100644 --- a/dstack-mr/src/lib.rs +++ b/dstack-mr/src/lib.rs @@ -5,10 +5,13 @@ use serde::{Deserialize, Serialize}; use serde_human_bytes as hex_bytes; -pub use machine::Machine; +pub use machine::{Machine, TdxMeasurementDetails}; use util::{measure_log, measure_sha384, utf16_encode}; +pub type RtmrLog = Vec>; +pub type RtmrLogs = [RtmrLog; 3]; + mod acpi; mod kernel; mod machine; diff --git a/dstack-mr/src/machine.rs b/dstack-mr/src/machine.rs index 2fe0f5c2..c08e6cdf 100644 --- a/dstack-mr/src/machine.rs +++ b/dstack-mr/src/machine.rs @@ -2,9 +2,10 @@ // // SPDX-License-Identifier: Apache-2.0 +use crate::acpi::Tables; use crate::tdvf::Tdvf; use crate::util::debug_print_log; -use crate::{kernel, TdxMeasurements}; +use crate::{kernel, RtmrLogs, TdxMeasurements}; use crate::{measure_log, measure_sha384}; use anyhow::{bail, Context, Result}; use fs_err as fs; @@ -78,21 +79,41 @@ pub struct VersionedOptions { pub two_pass_add_pages: bool, } +#[derive(Debug, Clone)] +pub struct TdxMeasurementDetails { + pub measurements: TdxMeasurements, + pub rtmr_logs: RtmrLogs, + pub acpi_tables: Tables, +} + impl Machine<'_> { pub fn measure(&self) -> Result { + self.measure_with_logs().map(|details| details.measurements) + } + + pub fn measure_with_logs(&self) -> Result { debug!("measuring machine: {self:#?}"); let fw_data = fs::read(self.firmware)?; let kernel_data = fs::read(self.kernel)?; let initrd_data = fs::read(self.initrd)?; let tdvf = Tdvf::parse(&fw_data).context("Failed to parse TDVF metadata")?; + let mrtd = tdvf.mrtd(self).context("Failed to compute MR TD")?; - let rtmr0 = tdvf.rtmr0(self).context("Failed to compute RTMR0")?; - let rtmr1 = kernel::measure_kernel( + + let (rtmr0_log, acpi_tables) = tdvf + .rtmr0_log(self) + .context("Failed to compute RTMR0 log")?; + debug_print_log("RTMR0", &rtmr0_log); + let rtmr0 = measure_log(&rtmr0_log); + + let rtmr1_log = kernel::rtmr1_log( &kernel_data, initrd_data.len() as u32, self.memory_size, 0x28000, )?; + debug_print_log("RTMR1", &rtmr1_log); + let rtmr1 = measure_log(&rtmr1_log); let rtmr2_log = vec![ kernel::measure_cmdline(self.kernel_cmdline), @@ -101,11 +122,15 @@ impl Machine<'_> { debug_print_log("RTMR2", &rtmr2_log); let rtmr2 = measure_log(&rtmr2_log); - Ok(TdxMeasurements { - mrtd, - rtmr0, - rtmr1, - rtmr2, + Ok(TdxMeasurementDetails { + measurements: TdxMeasurements { + mrtd, + rtmr0, + rtmr1, + rtmr2, + }, + rtmr_logs: [rtmr0_log, rtmr1_log, rtmr2_log], + acpi_tables, }) } } diff --git a/dstack-mr/src/tdvf.rs b/dstack-mr/src/tdvf.rs index 8f02487b..a5d577a8 100644 --- a/dstack-mr/src/tdvf.rs +++ b/dstack-mr/src/tdvf.rs @@ -6,9 +6,9 @@ use anyhow::{anyhow, bail, Context, Result}; use hex_literal::hex; use sha2::{Digest, Sha384}; +use crate::acpi::Tables; use crate::num::read_le; -use crate::util::debug_print_log; -use crate::{measure_log, measure_sha384, utf16_encode, Machine}; +use crate::{measure_log, measure_sha384, utf16_encode, Machine, RtmrLog}; const PAGE_SIZE: u64 = 0x1000; const MR_EXTEND_GRANULARITY: usize = 0x100; @@ -233,7 +233,13 @@ impl<'a> Tdvf<'a> { }) } + #[allow(dead_code)] pub fn rtmr0(&self, machine: &Machine) -> Result> { + let (rtmr0_log, _) = self.rtmr0_log(machine)?; + Ok(measure_log(&rtmr0_log)) + } + + pub fn rtmr0_log(&self, machine: &Machine) -> Result<(RtmrLog, Tables)> { let td_hob_hash = self.measure_td_hob(machine.memory_size)?; let cfv_image_hash = hex!("344BC51C980BA621AAA00DA3ED7436F7D6E549197DFE699515DFA2C6583D95E6412AF21C097D473155875FFD561D6790"); let boot000_hash = hex!("23ADA07F5261F12F34A0BD8E46760962D6B4D576A416F1FEA1C64BC656B1D28EACF7047AE6E967C58FD2A98BFA74C298"); @@ -245,23 +251,24 @@ impl<'a> Tdvf<'a> { // RTMR0 calculation - let rtmr0_log = vec![ - td_hob_hash, - cfv_image_hash.to_vec(), - measure_tdx_efi_variable("8BE4DF61-93CA-11D2-AA0D-00E098032B8C", "SecureBoot")?, - measure_tdx_efi_variable("8BE4DF61-93CA-11D2-AA0D-00E098032B8C", "PK")?, - measure_tdx_efi_variable("8BE4DF61-93CA-11D2-AA0D-00E098032B8C", "KEK")?, - measure_tdx_efi_variable("D719B2CB-3D3A-4596-A3BC-DAD00E67656F", "db")?, - measure_tdx_efi_variable("D719B2CB-3D3A-4596-A3BC-DAD00E67656F", "dbx")?, - measure_sha384(&[0x00, 0x00, 0x00, 0x00]), // Separator - acpi_loader_hash, - acpi_rsdp_hash, - acpi_tables_hash, - measure_sha384(&[0x00, 0x00]), // BootOrder - boot000_hash.to_vec(), - ]; - debug_print_log("RTMR0", &rtmr0_log); - Ok(measure_log(&rtmr0_log)) + Ok(( + vec![ + td_hob_hash, + cfv_image_hash.to_vec(), + measure_tdx_efi_variable("8BE4DF61-93CA-11D2-AA0D-00E098032B8C", "SecureBoot")?, + measure_tdx_efi_variable("8BE4DF61-93CA-11D2-AA0D-00E098032B8C", "PK")?, + measure_tdx_efi_variable("8BE4DF61-93CA-11D2-AA0D-00E098032B8C", "KEK")?, + measure_tdx_efi_variable("D719B2CB-3D3A-4596-A3BC-DAD00E67656F", "db")?, + measure_tdx_efi_variable("D719B2CB-3D3A-4596-A3BC-DAD00E67656F", "dbx")?, + measure_sha384(&[0x00, 0x00, 0x00, 0x00]), // Separator + acpi_loader_hash, + acpi_rsdp_hash, + acpi_tables_hash, + measure_sha384(&[0x00, 0x00]), // BootOrder + boot000_hash.to_vec(), + ], + tables, + )) } fn measure_td_hob(&self, memory_size: u64) -> Result> { From 846637c65982fe57fb07beaa6c34460a80c64b19 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 22 Sep 2025 10:14:19 +0000 Subject: [PATCH 27/77] Add dstack-verifier --- .github/workflows/verifier-release.yml | 80 +++ Cargo.lock | 25 + Cargo.toml | 1 + REUSE.toml | 10 + kms/dstack-app/builder/Dockerfile | 4 +- verifier/Cargo.toml | 37 ++ verifier/README.md | 163 +++++ verifier/builder/Dockerfile | 86 +++ verifier/builder/build-image.sh | 87 +++ .../shared/builder-pinned-packages.txt | 435 ++++++++++++ verifier/builder/shared/config-qemu.sh | 28 + verifier/builder/shared/pin-packages.sh | 21 + verifier/builder/shared/pinned-packages.txt | 108 +++ .../builder/shared/qemu-pinned-packages.txt | 236 +++++++ verifier/dstack-verifier.toml | 19 + verifier/fixtures/quote-report.json | 1 + verifier/src/main.rs | 241 +++++++ verifier/src/types.rs | 80 +++ verifier/src/verification.rs | 620 ++++++++++++++++++ verifier/test.sh | 127 ++++ 20 files changed, 2407 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/verifier-release.yml create mode 100644 verifier/Cargo.toml create mode 100644 verifier/README.md create mode 100644 verifier/builder/Dockerfile create mode 100755 verifier/builder/build-image.sh create mode 100644 verifier/builder/shared/builder-pinned-packages.txt create mode 100755 verifier/builder/shared/config-qemu.sh create mode 100755 verifier/builder/shared/pin-packages.sh create mode 100644 verifier/builder/shared/pinned-packages.txt create mode 100644 verifier/builder/shared/qemu-pinned-packages.txt create mode 100644 verifier/dstack-verifier.toml create mode 100644 verifier/fixtures/quote-report.json create mode 100644 verifier/src/main.rs create mode 100644 verifier/src/types.rs create mode 100644 verifier/src/verification.rs create mode 100755 verifier/test.sh diff --git a/.github/workflows/verifier-release.yml b/.github/workflows/verifier-release.yml new file mode 100644 index 00000000..2c8d25f2 --- /dev/null +++ b/.github/workflows/verifier-release.yml @@ -0,0 +1,80 @@ +# SPDX-FileCopyrightText: © 2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +name: Verifier Release + +on: + workflow_dispatch: + push: + tags: + - 'verifier-v*' +permissions: + attestations: write + id-token: write + contents: write + packages: write + +jobs: + build-and-release: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Parse version from tag + run: | + VERSION=${GITHUB_REF#refs/tags/verifier-v} + echo "VERSION=$VERSION" >> $GITHUB_ENV + echo "Parsed version: $VERSION" + + - name: Log in to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Get Git commit timestamps + run: | + echo "TIMESTAMP=$(git log -1 --pretty=%ct)" >> $GITHUB_ENV + echo "GIT_REV=$(git rev-parse HEAD)" >> $GITHUB_ENV + + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@v5 + env: + SOURCE_DATE_EPOCH: ${{ env.TIMESTAMP }} + with: + context: verifier + file: verifier/builder/Dockerfile + push: true + tags: ${{ vars.DOCKERHUB_USERNAME }}/dstack-verifier:${{ env.VERSION }} + platforms: linux/amd64 + provenance: false + build-args: | + DSTACK_REV=${{ env.GIT_REV }} + DSTACK_SRC_URL=${{ github.server_url }}/${{ github.repository }}.git + SOURCE_DATE_EPOCH=${{ env.TIMESTAMP }} + + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + with: + subject-name: "docker.io/${{ vars.DOCKERHUB_USERNAME }}/dstack-verifier" + subject-digest: ${{ steps.build-and-push.outputs.digest }} + push-to-registry: true + + - name: GitHub Release + uses: softprops/action-gh-release@v1 + with: + name: "Verifier Release v${{ env.VERSION }}" + body: | + ## Docker Image Information + + **Image**: `docker.io/${{ vars.DOCKERHUB_USERNAME }}/dstack-verifier:${{ env.VERSION }}` + + **Digest (SHA256)**: `${{ steps.build-and-push.outputs.digest }}` + + **Verification**: [Verify on Sigstore](https://search.sigstore.dev/?hash=${{ steps.build-and-push.outputs.digest }}) diff --git a/Cargo.lock b/Cargo.lock index df58f866..d17f8282 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2461,6 +2461,31 @@ dependencies = [ "x509-parser", ] +[[package]] +name = "dstack-verifier" +version = "0.5.4" +dependencies = [ + "anyhow", + "cc-eventlog", + "clap", + "dcap-qvl", + "dstack-mr", + "dstack-types", + "figment", + "fs-err", + "hex", + "ra-tls", + "reqwest", + "rocket", + "serde", + "serde_json", + "sha2 0.10.9", + "tempfile", + "tokio", + "tracing", + "tracing-subscriber", +] + [[package]] name = "dstack-vmm" version = "0.5.4" diff --git a/Cargo.toml b/Cargo.toml index 074bc454..b26e068d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -49,6 +49,7 @@ members = [ "serde-duration", "dstack-mr", "dstack-mr/cli", + "verifier", "no_std_check", ] resolver = "2" diff --git a/REUSE.toml b/REUSE.toml index abf4a08c..bf435ec4 100644 --- a/REUSE.toml +++ b/REUSE.toml @@ -162,3 +162,13 @@ SPDX-License-Identifier = "CC0-1.0" path = "dstack-util/tests/fixtures/*" SPDX-FileCopyrightText = "NONE" SPDX-License-Identifier = "CC0-1.0" + +[[annotations]] +path = "verifier/fixtures/*" +SPDX-FileCopyrightText = "NONE" +SPDX-License-Identifier = "CC0-1.0" + +[[annotations]] +path = "verifier/builder/shared/*.txt" +SPDX-FileCopyrightText = "NONE" +SPDX-License-Identifier = "CC0-1.0" diff --git a/kms/dstack-app/builder/Dockerfile b/kms/dstack-app/builder/Dockerfile index 5e954755..e9c9448b 100644 --- a/kms/dstack-app/builder/Dockerfile +++ b/kms/dstack-app/builder/Dockerfile @@ -27,7 +27,7 @@ RUN cd dstack && cargo build --release -p dstack-kms --target x86_64-unknown-lin FROM debian:bookworm@sha256:0d8498a0e9e6a60011df39aab78534cfe940785e7c59d19dfae1eb53ea59babe COPY ./shared /build WORKDIR /build -ARG QEMU_REV=d98440811192c08eafc07c7af110593c6b3758ff +ARG QEMU_REV=dbcec07c0854bf873d346a09e87e4c993ccf2633 RUN ./pin-packages.sh ./qemu-pinned-packages.txt && \ apt-get update && \ apt-get install -y --no-install-recommends \ @@ -43,7 +43,7 @@ RUN ./pin-packages.sh ./qemu-pinned-packages.txt && \ flex \ bison && \ rm -rf /var/lib/apt/lists/* /var/log/* /var/cache/ldconfig/aux-cache -RUN git clone https://github.com/kvinwang/qemu-tdx.git --depth 1 --branch passthrough-dump-acpi --single-branch && \ +RUN git clone https://github.com/kvinwang/qemu-tdx.git --depth 1 --branch dstack-qemu-9.2.1 --single-branch && \ cd qemu-tdx && git fetch --depth 1 origin ${QEMU_REV} && \ git checkout ${QEMU_REV} && \ ../config-qemu.sh ./build /usr/local && \ diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml new file mode 100644 index 00000000..78706da9 --- /dev/null +++ b/verifier/Cargo.toml @@ -0,0 +1,37 @@ +# SPDX-FileCopyrightText: © 2024-2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +[package] +name = "dstack-verifier" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } +figment.workspace = true +fs-err.workspace = true +hex.workspace = true +rocket = { workspace = true, features = ["json"] } +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +tokio = { workspace = true, features = ["full"] } +tracing.workspace = true +tracing-subscriber.workspace = true +reqwest.workspace = true +tempfile.workspace = true + +# Internal dependencies +ra-tls.workspace = true +dstack-types.workspace = true +dstack-mr.workspace = true + +# Crypto/verification dependencies +dcap-qvl.workspace = true +cc-eventlog.workspace = true +sha2.workspace = true diff --git a/verifier/README.md b/verifier/README.md new file mode 100644 index 00000000..51cf3dbc --- /dev/null +++ b/verifier/README.md @@ -0,0 +1,163 @@ +# dstack-verifier + +A HTTP server that provides CVM (Confidential Virtual Machine) verification services using the same verification process as the dstack KMS. + +## Features + +- **TDX Quote Verification**: Uses dcap-qvl to verify TDX quotes +- **Event Log Verification**: Validates event logs and extracts app information +- **OS Image Hash Verification**: Uses dstack-mr to ensure OS image hash matches expected measurements +- **Automatic Image Download**: Downloads and caches OS images automatically when not found locally +- **RESTful API**: Simple HTTP endpoints for verification requests + +## API Endpoints + +### POST /verify + +Verifies a CVM attestation with the provided quote, event log, and VM configuration. + +**Request Body:** +```json +{ + "quote": "hex-encoded-quote", + "event_log": "hex-encoded-event-log", + "vm_config": "json-vm-config-string", + "pccs_url": "optional-pccs-url" +} +``` + +**Response:** +```json +{ + "is_valid": true, + "details": { + "quote_verified": true, + "event_log_verified": true, + "os_image_hash_verified": true, + "report_data": "hex-encoded-64-byte-report-data", + "tcb_status": "OK", + "advisory_ids": [], + "app_info": { + "app_id": "hex-string", + "compose_hash": "hex-string", + "instance_id": "hex-string", + "device_id": "hex-string", + "mrtd": "hex-string", + "rtmr0": "hex-string", + "rtmr1": "hex-string", + "rtmr2": "hex-string", + "rtmr3": "hex-string", + "mr_system": "hex-string", + "mr_aggregated": "hex-string", + "os_image_hash": "hex-string", + "key_provider_info": "hex-string" + } + }, + "reason": null +} +``` + +### GET /health + +Health check endpoint that returns service status. + +**Response:** +```json +{ + "status": "ok", + "service": "dstack-verifier" +} +``` + +## Configuration + +Configuration can be provided via: +1. TOML file (default: `dstack-verifier.toml`) +2. Environment variables with prefix `DSTACK_VERIFIER_` +3. Command line arguments + +### Configuration Options + +- `host`: Server bind address (default: "0.0.0.0") +- `port`: Server port (default: 8080) +- `image_cache_dir`: Directory for cached OS images (default: "/tmp/dstack-verifier/cache") +- `image_download_url`: URL template for downloading OS images (default: GitHub releases URL) +- `image_download_timeout_secs`: Download timeout in seconds (default: 300) +- `pccs_url`: Optional PCCS URL for quote verification + +### Example Configuration File + +```toml +host = "0.0.0.0" +port = 8080 +image_cache_dir = "/var/cache/dstack-verifier" +image_download_url = "http://0.0.0.0:8000/mr_{OS_IMAGE_HASH}.tar.gz" +image_download_timeout_secs = 300 +pccs_url = "https://pccs.example.com" +``` + +## Usage + +```bash +# Run with default config +cargo run --bin dstack-verifier + +# Run with custom config file +cargo run --bin dstack-verifier -- --config /path/to/config.toml + +# Set via environment variables +DSTACK_VERIFIER_PORT=9000 cargo run --bin dstack-verifier +``` + +## Testing + +Two test scripts are provided for easy testing: + +### Full Test (with server management) +```bash +./test.sh +``` +This script will: +- Build the project +- Start the server +- Run the verification test +- Display detailed results +- Clean up automatically + +### Quick Test (assumes server is running) +```bash +./quick-test.sh +``` +This script assumes the server is already running and just sends a test request. + +## Verification Process + +The verifier performs three main verification steps: + +1. **Quote Verification**: Validates the TDX quote using dcap-qvl, checking the quote signature and TCB status +2. **Event Log Verification**: Replays event logs to ensure RTMR values match and extracts app information +3. **OS Image Hash Verification**: + - Automatically downloads OS images if not cached locally + - Uses dstack-mr to compute expected measurements + - Compares against the verified measurements from the quote + +All three steps must pass for the verification to be considered valid. + +### Automatic Image Download + +When an OS image is not found in the local cache, the verifier will: + +1. **Download**: Fetch the image tarball from the configured URL +2. **Extract**: Extract the tarball contents to a temporary directory +3. **Verify**: Check SHA256 checksums to ensure file integrity +4. **Validate**: Confirm the OS image hash matches the computed hash +5. **Cache**: Move the validated files to the cache directory for future use + +The download URL template uses `{OS_IMAGE_HASH}` as a placeholder that gets replaced with the actual OS image hash from the verification request. + +## Dependencies + +- dcap-qvl: TDX quote verification +- dstack-mr: OS image measurement computation +- ra-tls: Attestation handling and verification +- rocket: HTTP server framework \ No newline at end of file diff --git a/verifier/builder/Dockerfile b/verifier/builder/Dockerfile new file mode 100644 index 00000000..cef0d128 --- /dev/null +++ b/verifier/builder/Dockerfile @@ -0,0 +1,86 @@ +# SPDX-FileCopyrightText: © 2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +FROM rust:1.86.0@sha256:300ec56abce8cc9448ddea2172747d048ed902a3090e6b57babb2bf19f754081 AS verifier-builder +COPY builder/shared /build/shared +ARG DSTACK_REV +ARG DSTACK_SRC_URL=https://github.com/Dstack-TEE/dstack.git +WORKDIR /build +RUN ./shared/pin-packages.sh ./shared/builder-pinned-packages.txt +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + git \ + build-essential \ + musl-tools \ + libssl-dev \ + protobuf-compiler \ + libprotobuf-dev \ + clang \ + libclang-dev \ + pkg-config \ + ca-certificates \ + curl && \ + rm -rf /var/lib/apt/lists/* /var/log/* /var/cache/ldconfig/aux-cache +RUN git clone ${DSTACK_SRC_URL} && \ + cd dstack && \ + git checkout ${DSTACK_REV} +RUN rustup target add x86_64-unknown-linux-musl +RUN cd dstack && cargo build --release -p dstack-verifier --target x86_64-unknown-linux-musl +RUN echo "${DSTACK_REV}" > /build/.GIT_REV + +FROM debian:bookworm@sha256:0d8498a0e9e6a60011df39aab78534cfe940785e7c59d19dfae1eb53ea59babe AS acpi-builder +COPY builder/shared /build +WORKDIR /build +ARG QEMU_REV=dbcec07c0854bf873d346a09e87e4c993ccf2633 +RUN ./pin-packages.sh ./qemu-pinned-packages.txt && \ + apt-get update && \ + apt-get install -y --no-install-recommends \ + git \ + libslirp-dev \ + python3-pip \ + ninja-build \ + pkg-config \ + libglib2.0-dev \ + python3-sphinx \ + python3-sphinx-rtd-theme \ + build-essential \ + flex \ + bison && \ + rm -rf /var/lib/apt/lists/* /var/log/* /var/cache/ldconfig/aux-cache +RUN git clone https://github.com/kvinwang/qemu-tdx.git --depth 1 --branch dstack-qemu-9.2.1 --single-branch && \ + cd qemu-tdx && git fetch --depth 1 origin ${QEMU_REV} && \ + git checkout ${QEMU_REV} && \ + ../config-qemu.sh ./build /usr/local && \ + cd build && \ + ninja && \ + strip qemu-system-x86_64 && \ + install -m 755 qemu-system-x86_64 /usr/local/bin/dstack-acpi-tables && \ + cd ../ && \ + install -d /usr/local/share/qemu && \ + install -m 644 pc-bios/efi-virtio.rom /usr/local/share/qemu/ && \ + install -m 644 pc-bios/kvmvapic.bin /usr/local/share/qemu/ && \ + install -m 644 pc-bios/linuxboot_dma.bin /usr/local/share/qemu/ && \ + cd .. && rm -rf qemu-tdx + +FROM debian:bookworm@sha256:0d8498a0e9e6a60011df39aab78534cfe940785e7c59d19dfae1eb53ea59babe +COPY builder/shared /build +WORKDIR /build +RUN ./pin-packages.sh ./pinned-packages.txt && \ + apt-get update && \ + apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + libglib2.0-0 \ + libslirp0 \ + && rm -rf /var/lib/apt/lists/* /var/log/* /var/cache/ldconfig/aux-cache +COPY --from=verifier-builder /build/dstack/target/x86_64-unknown-linux-musl/release/dstack-verifier /usr/local/bin/dstack-verifier +COPY --from=verifier-builder /build/.GIT_REV /etc/ +COPY --from=acpi-builder /usr/local/bin/dstack-acpi-tables /usr/local/bin/dstack-acpi-tables +COPY --from=acpi-builder /usr/local/share/qemu /usr/local/share/qemu +RUN mkdir -p /etc/dstack +COPY dstack-verifier.toml /etc/dstack/dstack-verifier.toml +WORKDIR /var/lib/dstack-verifier +EXPOSE 8080 +ENTRYPOINT ["/usr/local/bin/dstack-verifier"] +CMD ["--config", "/etc/dstack/dstack-verifier.toml"] diff --git a/verifier/builder/build-image.sh b/verifier/builder/build-image.sh new file mode 100755 index 00000000..75bcca79 --- /dev/null +++ b/verifier/builder/build-image.sh @@ -0,0 +1,87 @@ +#!/bin/bash + +# SPDX-FileCopyrightText: © 2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +set -euo pipefail + +SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +CONTEXT_DIR=$(dirname "$SCRIPT_DIR") +REPO_ROOT=$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel) +SHARED_DIR="$SCRIPT_DIR/shared" +SHARED_GIT_PATH=$(realpath --relative-to="$REPO_ROOT" "$SHARED_DIR") +DOCKERFILE="$SCRIPT_DIR/Dockerfile" + +NO_CACHE=${NO_CACHE:-} +NAME=${1:-} +if [ -z "$NAME" ]; then + echo "Usage: $0 [:]" >&2 + exit 1 +fi + +extract_packages() { + local image_name=$1 + local pkg_list_file=$2 + if [ -z "$pkg_list_file" ]; then + return + fi + docker run --rm --entrypoint bash "$image_name" \ + -c "dpkg -l | grep '^ii' | awk '{print \$2\"=\"\$3}' | sort" \ + >"$pkg_list_file" +} + +docker_build() { + local image_name=$1 + local target=$2 + local pkg_list_file=$3 + + local commit_timestamp + commit_timestamp=$(git -C "$REPO_ROOT" show -s --format=%ct "$GIT_REV") + + local args=( + --builder buildkit_20 + --progress=plain + --output type=docker,name="$image_name",rewrite-timestamp=true + --build-arg SOURCE_DATE_EPOCH="$commit_timestamp" + --build-arg DSTACK_REV="$GIT_REV" + --build-arg DSTACK_SRC_URL="$DSTACK_SRC_URL" + ) + + if [ -n "$NO_CACHE" ]; then + args+=(--no-cache) + fi + + if [ -n "$target" ]; then + args+=(--target "$target") + fi + + docker buildx build "${args[@]}" \ + --file "$DOCKERFILE" \ + "$CONTEXT_DIR" + + extract_packages "$image_name" "$pkg_list_file" +} + +if ! docker buildx inspect buildkit_20 &>/dev/null; then + docker buildx create --use --driver-opt image=moby/buildkit:v0.20.2 --name buildkit_20 +fi + +mkdir -p "$SHARED_DIR" +touch "$SHARED_DIR/builder-pinned-packages.txt" +touch "$SHARED_DIR/qemu-pinned-packages.txt" +touch "$SHARED_DIR/pinned-packages.txt" + +GIT_REV=${GIT_REV:-HEAD} +GIT_REV=$(git -C "$REPO_ROOT" rev-parse "$GIT_REV") +DSTACK_SRC_URL=${DSTACK_SRC_URL:-https://github.com/Dstack-TEE/dstack.git} + +docker_build "$NAME" "" "$SHARED_DIR/pinned-packages.txt" +docker_build "verifier-builder-temp" "verifier-builder" "$SHARED_DIR/builder-pinned-packages.txt" +docker_build "verifier-acpi-builder-temp" "acpi-builder" "$SHARED_DIR/qemu-pinned-packages.txt" + +git_status=$(git -C "$REPO_ROOT" status --porcelain -- "$SHARED_GIT_PATH") +if [ -n "$git_status" ]; then + echo "The working tree has updates in $SHARED_GIT_PATH. Commit or stash before re-running." >&2 + exit 1 +fi diff --git a/verifier/builder/shared/builder-pinned-packages.txt b/verifier/builder/shared/builder-pinned-packages.txt new file mode 100644 index 00000000..69c95e45 --- /dev/null +++ b/verifier/builder/shared/builder-pinned-packages.txt @@ -0,0 +1,435 @@ +adduser=3.134 +apt=2.6.1 +autoconf=2.71-3 +automake=1:1.16.5-1.3 +autotools-dev=20220109.1 +base-files=12.4+deb12u10 +base-passwd=3.6.1 +bash=5.2.15-2+b7 +binutils-common:amd64=2.40-2 +binutils-x86-64-linux-gnu=2.40-2 +binutils=2.40-2 +bsdutils=1:2.38.1-5+deb12u3 +build-essential=12.9 +bzip2=1.0.8-5+b1 +ca-certificates=20230311 +clang-14=1:14.0.6-12 +clang=1:14.0-55.7~deb12u1 +comerr-dev:amd64=2.1-1.47.0-2 +coreutils=9.1-1 +cpp-12=12.2.0-14+deb12u1 +cpp=4:12.2.0-3 +curl=7.88.1-10+deb12u12 +dash=0.5.12-2 +debconf=1.5.82 +debian-archive-keyring=2023.3+deb12u1 +debianutils=5.7-0.5~deb12u1 +default-libmysqlclient-dev:amd64=1.1.0 +diffutils=1:3.8-4 +dirmngr=2.2.40-1.1 +dpkg-dev=1.21.22 +dpkg=1.21.22 +e2fsprogs=1.47.0-2 +file=1:5.44-3 +findutils=4.9.0-4 +fontconfig-config=2.14.1-4 +fontconfig=2.14.1-4 +fonts-dejavu-core=2.37-6 +g++-12=12.2.0-14+deb12u1 +g++=4:12.2.0-3 +gcc-12-base:amd64=12.2.0-14+deb12u1 +gcc-12=12.2.0-14+deb12u1 +gcc=4:12.2.0-3 +gir1.2-freedesktop:amd64=1.74.0-3 +gir1.2-gdkpixbuf-2.0:amd64=2.42.10+dfsg-1+deb12u1 +gir1.2-glib-2.0:amd64=1.74.0-3 +gir1.2-rsvg-2.0:amd64=2.54.7+dfsg-1~deb12u1 +git-man=1:2.39.5-0+deb12u2 +git=1:2.39.5-0+deb12u2 +gnupg-l10n=2.2.40-1.1 +gnupg-utils=2.2.40-1.1 +gnupg=2.2.40-1.1 +gpg-agent=2.2.40-1.1 +gpg-wks-client=2.2.40-1.1 +gpg-wks-server=2.2.40-1.1 +gpg=2.2.40-1.1 +gpgconf=2.2.40-1.1 +gpgsm=2.2.40-1.1 +gpgv=2.2.40-1.1 +grep=3.8-5 +gzip=1.12-1 +hicolor-icon-theme=0.17-2 +hostname=3.23+nmu1 +icu-devtools=72.1-3 +imagemagick-6-common=8:6.9.11.60+dfsg-1.6+deb12u2 +imagemagick-6.q16=8:6.9.11.60+dfsg-1.6+deb12u2 +imagemagick=8:6.9.11.60+dfsg-1.6+deb12u2 +init-system-helpers=1.65.2 +krb5-multidev:amd64=1.20.1-2+deb12u2 +libacl1:amd64=2.3.1-3 +libaom3:amd64=3.6.0-1+deb12u1 +libapr1:amd64=1.7.2-3+deb12u1 +libaprutil1:amd64=1.6.3-1 +libapt-pkg6.0:amd64=2.6.1 +libasan8:amd64=12.2.0-14+deb12u1 +libassuan0:amd64=2.5.5-5 +libatomic1:amd64=12.2.0-14+deb12u1 +libattr1:amd64=1:2.5.1-4 +libaudit-common=1:3.0.9-1 +libaudit1:amd64=1:3.0.9-1 +libbinutils:amd64=2.40-2 +libblkid-dev:amd64=2.38.1-5+deb12u3 +libblkid1:amd64=2.38.1-5+deb12u3 +libbrotli-dev:amd64=1.0.9-2+b6 +libbrotli1:amd64=1.0.9-2+b6 +libbsd0:amd64=0.11.7-2 +libbz2-1.0:amd64=1.0.8-5+b1 +libbz2-dev:amd64=1.0.8-5+b1 +libc-bin=2.36-9+deb12u10 +libc-dev-bin=2.36-9+deb12u10 +libc6-dev:amd64=2.36-9+deb12u10 +libc6:amd64=2.36-9+deb12u10 +libcairo-gobject2:amd64=1.16.0-7 +libcairo-script-interpreter2:amd64=1.16.0-7 +libcairo2-dev:amd64=1.16.0-7 +libcairo2:amd64=1.16.0-7 +libcap-ng0:amd64=0.8.3-1+b3 +libcap2:amd64=1:2.66-4 +libcbor0.8:amd64=0.8.0-2+b1 +libcc1-0:amd64=12.2.0-14+deb12u1 +libclang-14-dev=1:14.0.6-12 +libclang-common-14-dev=1:14.0.6-12 +libclang-cpp14=1:14.0.6-12 +libclang-dev=1:14.0-55.7~deb12u1 +libclang1-14=1:14.0.6-12 +libcom-err2:amd64=1.47.0-2 +libcrypt-dev:amd64=1:4.4.33-2 +libcrypt1:amd64=1:4.4.33-2 +libctf-nobfd0:amd64=2.40-2 +libctf0:amd64=2.40-2 +libcurl3-gnutls:amd64=7.88.1-10+deb12u12 +libcurl4-openssl-dev:amd64=7.88.1-10+deb12u12 +libcurl4:amd64=7.88.1-10+deb12u12 +libdatrie1:amd64=0.2.13-2+b1 +libdav1d6:amd64=1.0.0-2+deb12u1 +libdb-dev:amd64=5.3.2 +libdb5.3-dev=5.3.28+dfsg2-1 +libdb5.3:amd64=5.3.28+dfsg2-1 +libde265-0:amd64=1.0.11-1+deb12u2 +libdebconfclient0:amd64=0.270 +libdeflate-dev:amd64=1.14-1 +libdeflate0:amd64=1.14-1 +libdjvulibre-dev:amd64=3.5.28-2+b1 +libdjvulibre-text=3.5.28-2 +libdjvulibre21:amd64=3.5.28-2+b1 +libdpkg-perl=1.21.22 +libedit2:amd64=3.1-20221030-2 +libelf1:amd64=0.188-2.1 +liberror-perl=0.17029-2 +libevent-2.1-7:amd64=2.1.12-stable-8 +libevent-core-2.1-7:amd64=2.1.12-stable-8 +libevent-dev=2.1.12-stable-8 +libevent-extra-2.1-7:amd64=2.1.12-stable-8 +libevent-openssl-2.1-7:amd64=2.1.12-stable-8 +libevent-pthreads-2.1-7:amd64=2.1.12-stable-8 +libexif-dev:amd64=0.6.24-1+b1 +libexif12:amd64=0.6.24-1+b1 +libexpat1-dev:amd64=2.5.0-1+deb12u1 +libexpat1:amd64=2.5.0-1+deb12u1 +libext2fs2:amd64=1.47.0-2 +libffi-dev:amd64=3.4.4-1 +libffi8:amd64=3.4.4-1 +libfftw3-double3:amd64=3.3.10-1 +libfido2-1:amd64=1.12.0-2+b1 +libfontconfig-dev:amd64=2.14.1-4 +libfontconfig1:amd64=2.14.1-4 +libfreetype-dev:amd64=2.12.1+dfsg-5+deb12u4 +libfreetype6-dev:amd64=2.12.1+dfsg-5+deb12u4 +libfreetype6:amd64=2.12.1+dfsg-5+deb12u4 +libfribidi0:amd64=1.0.8-2.1 +libgc1:amd64=1:8.2.2-3 +libgcc-12-dev:amd64=12.2.0-14+deb12u1 +libgcc-s1:amd64=12.2.0-14+deb12u1 +libgcrypt20:amd64=1.10.1-3 +libgdbm-compat4:amd64=1.23-3 +libgdbm-dev:amd64=1.23-3 +libgdbm6:amd64=1.23-3 +libgdk-pixbuf-2.0-0:amd64=2.42.10+dfsg-1+deb12u1 +libgdk-pixbuf-2.0-dev:amd64=2.42.10+dfsg-1+deb12u1 +libgdk-pixbuf2.0-bin=2.42.10+dfsg-1+deb12u1 +libgdk-pixbuf2.0-common=2.42.10+dfsg-1+deb12u1 +libgirepository-1.0-1:amd64=1.74.0-3 +libglib2.0-0:amd64=2.74.6-2+deb12u5 +libglib2.0-bin=2.74.6-2+deb12u5 +libglib2.0-data=2.74.6-2+deb12u5 +libglib2.0-dev-bin=2.74.6-2+deb12u5 +libglib2.0-dev:amd64=2.74.6-2+deb12u5 +libgmp-dev:amd64=2:6.2.1+dfsg1-1.1 +libgmp10:amd64=2:6.2.1+dfsg1-1.1 +libgmpxx4ldbl:amd64=2:6.2.1+dfsg1-1.1 +libgnutls30:amd64=3.7.9-2+deb12u4 +libgomp1:amd64=12.2.0-14+deb12u1 +libgpg-error0:amd64=1.46-1 +libgprofng0:amd64=2.40-2 +libgraphite2-3:amd64=1.3.14-1 +libgssapi-krb5-2:amd64=1.20.1-2+deb12u2 +libgssrpc4:amd64=1.20.1-2+deb12u2 +libharfbuzz0b:amd64=6.0.0+dfsg-3 +libheif1:amd64=1.15.1-1+deb12u1 +libhogweed6:amd64=3.8.1-2 +libice-dev:amd64=2:1.0.10-1 +libice6:amd64=2:1.0.10-1 +libicu-dev:amd64=72.1-3 +libicu72:amd64=72.1-3 +libidn2-0:amd64=2.3.3-1+b1 +libimath-3-1-29:amd64=3.1.6-1 +libimath-dev:amd64=3.1.6-1 +libisl23:amd64=0.25-1.1 +libitm1:amd64=12.2.0-14+deb12u1 +libjansson4:amd64=2.14-2 +libjbig-dev:amd64=2.1-6.1 +libjbig0:amd64=2.1-6.1 +libjpeg-dev:amd64=1:2.1.5-2 +libjpeg62-turbo-dev:amd64=1:2.1.5-2 +libjpeg62-turbo:amd64=1:2.1.5-2 +libk5crypto3:amd64=1.20.1-2+deb12u2 +libkadm5clnt-mit12:amd64=1.20.1-2+deb12u2 +libkadm5srv-mit12:amd64=1.20.1-2+deb12u2 +libkdb5-10:amd64=1.20.1-2+deb12u2 +libkeyutils1:amd64=1.6.3-2 +libkrb5-3:amd64=1.20.1-2+deb12u2 +libkrb5-dev:amd64=1.20.1-2+deb12u2 +libkrb5support0:amd64=1.20.1-2+deb12u2 +libksba8:amd64=1.6.3-2 +liblcms2-2:amd64=2.14-2 +liblcms2-dev:amd64=2.14-2 +libldap-2.5-0:amd64=2.5.13+dfsg-5 +liblerc-dev:amd64=4.0.0+ds-2 +liblerc4:amd64=4.0.0+ds-2 +libllvm14:amd64=1:14.0.6-12 +liblqr-1-0-dev:amd64=0.4.2-2.1 +liblqr-1-0:amd64=0.4.2-2.1 +liblsan0:amd64=12.2.0-14+deb12u1 +libltdl-dev:amd64=2.4.7-7~deb12u1 +libltdl7:amd64=2.4.7-7~deb12u1 +liblz4-1:amd64=1.9.4-1 +liblzma-dev:amd64=5.4.1-1 +liblzma5:amd64=5.4.1-1 +liblzo2-2:amd64=2.10-2 +libmagic-mgc=1:5.44-3 +libmagic1:amd64=1:5.44-3 +libmagickcore-6-arch-config:amd64=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickcore-6-headers=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickcore-6.q16-6-extra:amd64=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickcore-6.q16-6:amd64=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickcore-6.q16-dev:amd64=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickcore-dev=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickwand-6-headers=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickwand-6.q16-6:amd64=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickwand-6.q16-dev:amd64=8:6.9.11.60+dfsg-1.6+deb12u2 +libmagickwand-dev=8:6.9.11.60+dfsg-1.6+deb12u2 +libmariadb-dev-compat=1:10.11.11-0+deb12u1 +libmariadb-dev=1:10.11.11-0+deb12u1 +libmariadb3:amd64=1:10.11.11-0+deb12u1 +libmaxminddb-dev:amd64=1.7.1-1 +libmaxminddb0:amd64=1.7.1-1 +libmd0:amd64=1.0.4-2 +libmount-dev:amd64=2.38.1-5+deb12u3 +libmount1:amd64=2.38.1-5+deb12u3 +libmpc3:amd64=1.3.1-1 +libmpfr6:amd64=4.2.0-1 +libncurses-dev:amd64=6.4-4 +libncurses5-dev:amd64=6.4-4 +libncurses6:amd64=6.4-4 +libncursesw5-dev:amd64=6.4-4 +libncursesw6:amd64=6.4-4 +libnettle8:amd64=3.8.1-2 +libnghttp2-14:amd64=1.52.0-1+deb12u2 +libnpth0:amd64=1.6-3 +libnsl-dev:amd64=1.3.0-2 +libnsl2:amd64=1.3.0-2 +libnuma1:amd64=2.0.16-1 +libobjc-12-dev:amd64=12.2.0-14+deb12u1 +libobjc4:amd64=12.2.0-14+deb12u1 +libopenexr-3-1-30:amd64=3.1.5-5 +libopenexr-dev=3.1.5-5 +libopenjp2-7-dev:amd64=2.5.0-2+deb12u1 +libopenjp2-7:amd64=2.5.0-2+deb12u1 +libp11-kit0:amd64=0.24.1-2 +libpam-modules-bin=1.5.2-6+deb12u1 +libpam-modules:amd64=1.5.2-6+deb12u1 +libpam-runtime=1.5.2-6+deb12u1 +libpam0g:amd64=1.5.2-6+deb12u1 +libpango-1.0-0:amd64=1.50.12+ds-1 +libpangocairo-1.0-0:amd64=1.50.12+ds-1 +libpangoft2-1.0-0:amd64=1.50.12+ds-1 +libpcre2-16-0:amd64=10.42-1 +libpcre2-32-0:amd64=10.42-1 +libpcre2-8-0:amd64=10.42-1 +libpcre2-dev:amd64=10.42-1 +libpcre2-posix3:amd64=10.42-1 +libperl5.36:amd64=5.36.0-7+deb12u2 +libpixman-1-0:amd64=0.42.2-1 +libpixman-1-dev:amd64=0.42.2-1 +libpkgconf3:amd64=1.8.1-1 +libpng-dev:amd64=1.6.39-2 +libpng16-16:amd64=1.6.39-2 +libpq-dev=15.12-0+deb12u2 +libpq5:amd64=15.12-0+deb12u2 +libproc2-0:amd64=2:4.0.2-3 +libprotobuf-dev:amd64=3.21.12-3 +libprotobuf-lite32:amd64=3.21.12-3 +libprotobuf32:amd64=3.21.12-3 +libprotoc32:amd64=3.21.12-3 +libpsl5:amd64=0.21.2-1 +libpthread-stubs0-dev:amd64=0.4-1 +libpython3-stdlib:amd64=3.11.2-1+b1 +libpython3.11-minimal:amd64=3.11.2-6+deb12u5 +libpython3.11-stdlib:amd64=3.11.2-6+deb12u5 +libquadmath0:amd64=12.2.0-14+deb12u1 +libreadline-dev:amd64=8.2-1.3 +libreadline8:amd64=8.2-1.3 +librsvg2-2:amd64=2.54.7+dfsg-1~deb12u1 +librsvg2-common:amd64=2.54.7+dfsg-1~deb12u1 +librsvg2-dev:amd64=2.54.7+dfsg-1~deb12u1 +librtmp1:amd64=2.4+20151223.gitfa8646d.1-2+b2 +libsasl2-2:amd64=2.1.28+dfsg-10 +libsasl2-modules-db:amd64=2.1.28+dfsg-10 +libseccomp2:amd64=2.5.4-1+deb12u1 +libselinux1-dev:amd64=3.4-1+b6 +libselinux1:amd64=3.4-1+b6 +libsemanage-common=3.4-1 +libsemanage2:amd64=3.4-1+b5 +libsepol-dev:amd64=3.4-2.1 +libsepol2:amd64=3.4-2.1 +libserf-1-1:amd64=1.3.9-11 +libsm-dev:amd64=2:1.2.3-1 +libsm6:amd64=2:1.2.3-1 +libsmartcols1:amd64=2.38.1-5+deb12u3 +libsqlite3-0:amd64=3.40.1-2+deb12u1 +libsqlite3-dev:amd64=3.40.1-2+deb12u1 +libss2:amd64=1.47.0-2 +libssh2-1:amd64=1.10.0-3+b1 +libssl-dev:amd64=3.0.16-1~deb12u1 +libssl3:amd64=3.0.16-1~deb12u1 +libstdc++-12-dev:amd64=12.2.0-14+deb12u1 +libstdc++6:amd64=12.2.0-14+deb12u1 +libsvn1:amd64=1.14.2-4+deb12u1 +libsystemd0:amd64=252.36-1~deb12u1 +libtasn1-6:amd64=4.19.0-2+deb12u1 +libthai-data=0.1.29-1 +libthai0:amd64=0.1.29-1 +libtiff-dev:amd64=4.5.0-6+deb12u2 +libtiff6:amd64=4.5.0-6+deb12u2 +libtiffxx6:amd64=4.5.0-6+deb12u2 +libtinfo6:amd64=6.4-4 +libtirpc-common=1.3.3+ds-1 +libtirpc-dev:amd64=1.3.3+ds-1 +libtirpc3:amd64=1.3.3+ds-1 +libtool=2.4.7-7~deb12u1 +libtsan2:amd64=12.2.0-14+deb12u1 +libubsan1:amd64=12.2.0-14+deb12u1 +libudev1:amd64=252.36-1~deb12u1 +libunistring2:amd64=1.0-2 +libutf8proc2:amd64=2.8.0-1 +libuuid1:amd64=2.38.1-5+deb12u3 +libwebp-dev:amd64=1.2.4-0.2+deb12u1 +libwebp7:amd64=1.2.4-0.2+deb12u1 +libwebpdemux2:amd64=1.2.4-0.2+deb12u1 +libwebpmux3:amd64=1.2.4-0.2+deb12u1 +libwmf-0.2-7:amd64=0.2.12-5.1 +libwmf-dev=0.2.12-5.1 +libwmflite-0.2-7:amd64=0.2.12-5.1 +libx11-6:amd64=2:1.8.4-2+deb12u2 +libx11-data=2:1.8.4-2+deb12u2 +libx11-dev:amd64=2:1.8.4-2+deb12u2 +libx265-199:amd64=3.5-2+b1 +libxau-dev:amd64=1:1.0.9-1 +libxau6:amd64=1:1.0.9-1 +libxcb-render0-dev:amd64=1.15-1 +libxcb-render0:amd64=1.15-1 +libxcb-shm0-dev:amd64=1.15-1 +libxcb-shm0:amd64=1.15-1 +libxcb1-dev:amd64=1.15-1 +libxcb1:amd64=1.15-1 +libxdmcp-dev:amd64=1:1.1.2-3 +libxdmcp6:amd64=1:1.1.2-3 +libxext-dev:amd64=2:1.3.4-1+b1 +libxext6:amd64=2:1.3.4-1+b1 +libxml2-dev:amd64=2.9.14+dfsg-1.3~deb12u1 +libxml2:amd64=2.9.14+dfsg-1.3~deb12u1 +libxrender-dev:amd64=1:0.9.10-1.1 +libxrender1:amd64=1:0.9.10-1.1 +libxslt1-dev:amd64=1.1.35-1+deb12u1 +libxslt1.1:amd64=1.1.35-1+deb12u1 +libxt-dev:amd64=1:1.2.1-1.1 +libxt6:amd64=1:1.2.1-1.1 +libxxhash0:amd64=0.8.1-1 +libyaml-0-2:amd64=0.2.5-1 +libyaml-dev:amd64=0.2.5-1 +libz3-4:amd64=4.8.12-3.1 +libzstd-dev:amd64=1.5.4+dfsg2-5 +libzstd1:amd64=1.5.4+dfsg2-5 +linux-libc-dev:amd64=6.1.135-1 +llvm-14-linker-tools=1:14.0.6-12 +login=1:4.13+dfsg1-1+b1 +logsave=1.47.0-2 +m4=1.4.19-3 +make=4.3-4.1 +mariadb-common=1:10.11.11-0+deb12u1 +mawk=1.3.4.20200120-3.1 +media-types=10.0.0 +mercurial-common=6.3.2-1+deb12u1 +mercurial=6.3.2-1+deb12u1 +mount=2.38.1-5+deb12u3 +musl-dev:amd64=1.2.3-1 +musl-tools=1.2.3-1 +musl:amd64=1.2.3-1 +mysql-common=5.8+1.1.0 +ncurses-base=6.4-4 +ncurses-bin=6.4-4 +netbase=6.4 +openssh-client=1:9.2p1-2+deb12u5 +openssl=3.0.15-1~deb12u1 +passwd=1:4.13+dfsg1-1+b1 +patch=2.7.6-7 +perl-base=5.36.0-7+deb12u2 +perl-modules-5.36=5.36.0-7+deb12u2 +perl=5.36.0-7+deb12u2 +pinentry-curses=1.2.1-1 +pkg-config:amd64=1.8.1-1 +pkgconf-bin=1.8.1-1 +pkgconf:amd64=1.8.1-1 +procps=2:4.0.2-3 +protobuf-compiler=3.21.12-3 +python3-distutils=3.11.2-3 +python3-lib2to3=3.11.2-3 +python3-minimal=3.11.2-1+b1 +python3.11-minimal=3.11.2-6+deb12u5 +python3.11=3.11.2-6+deb12u5 +python3=3.11.2-1+b1 +readline-common=8.2-1.3 +rpcsvc-proto=1.4.3-1 +sed=4.9-1 +sensible-utils=0.0.17+nmu1 +shared-mime-info=2.2-1 +sq=0.27.0-2+b1 +subversion=1.14.2-4+deb12u1 +sysvinit-utils=3.06-4 +tar=1.34+dfsg-1.2+deb12u1 +tzdata=2025b-0+deb12u1 +ucf=3.0043+nmu1+deb12u1 +unzip=6.0-28 +usr-is-merged=37~deb12u1 +util-linux-extra=2.38.1-5+deb12u3 +util-linux=2.38.1-5+deb12u3 +uuid-dev:amd64=2.38.1-5+deb12u3 +wget=1.21.3-1+deb12u1 +x11-common=1:7.7+23 +x11proto-core-dev=2022.1-1 +x11proto-dev=2022.1-1 +xorg-sgml-doctools=1:1.11-1.1 +xtrans-dev=1.4.0-1 +xz-utils=5.4.1-1 +zlib1g-dev:amd64=1:1.2.13.dfsg-1 +zlib1g:amd64=1:1.2.13.dfsg-1 diff --git a/verifier/builder/shared/config-qemu.sh b/verifier/builder/shared/config-qemu.sh new file mode 100755 index 00000000..94174a58 --- /dev/null +++ b/verifier/builder/shared/config-qemu.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# SPDX-FileCopyrightText: © 2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +BUILD_DIR="$1" +PREFIX="$2" +if [ -z "$BUILD_DIR" ]; then + echo "Usage: $0 " + exit 1 +fi + +mkdir -p "$BUILD_DIR" +cd "$BUILD_DIR" + +export SOURCE_DATE_EPOCH=$(git -C .. log -1 --pretty=%ct) +export CFLAGS="-DDUMP_ACPI_TABLES -Wno-builtin-macro-redefined -D__DATE__=\"\" -D__TIME__=\"\" -D__TIMESTAMP__=\"\"" +export LDFLAGS="-Wl,--build-id=none" + +../configure \ + --prefix="$PREFIX" \ + --target-list=x86_64-softmmu \ + --disable-werror + +echo "" +echo "Build configured for reproducibility in $BUILD_DIR" +echo "To build, run: cd $BUILD_DIR && make" diff --git a/verifier/builder/shared/pin-packages.sh b/verifier/builder/shared/pin-packages.sh new file mode 100755 index 00000000..5aa8ba4a --- /dev/null +++ b/verifier/builder/shared/pin-packages.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +# SPDX-FileCopyrightText: © 2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +set -e +PKG_LIST=$1 + +echo 'deb [check-valid-until=no] https://snapshot.debian.org/archive/debian/20250626T204007Z bookworm main' > /etc/apt/sources.list +echo 'deb [check-valid-until=no] https://snapshot.debian.org/archive/debian-security/20250626T204007Z bookworm-security main' >> /etc/apt/sources.list +echo 'Acquire::Check-Valid-Until "false";' > /etc/apt/apt.conf.d/10no-check-valid-until + +mkdir -p /etc/apt/preferences.d +while IFS= read -r line; do + pkg=$(echo "$line" | cut -d= -f1) + ver=$(echo "$line" | cut -d= -f2) + if [ -n "$pkg" ] && [ -n "$ver" ]; then + printf 'Package: %s\nPin: version %s\nPin-Priority: 1001\n\n' "$pkg" "$ver" >> /etc/apt/preferences.d/pinned-packages + fi +done < "$PKG_LIST" diff --git a/verifier/builder/shared/pinned-packages.txt b/verifier/builder/shared/pinned-packages.txt new file mode 100644 index 00000000..409c097c --- /dev/null +++ b/verifier/builder/shared/pinned-packages.txt @@ -0,0 +1,108 @@ +adduser=3.134 +apt=2.6.1 +base-files=12.4+deb12u11 +base-passwd=3.6.1 +bash=5.2.15-2+b8 +bsdutils=1:2.38.1-5+deb12u3 +ca-certificates=20230311+deb12u1 +coreutils=9.1-1 +curl=7.88.1-10+deb12u14 +dash=0.5.12-2 +debconf=1.5.82 +debian-archive-keyring=2023.3+deb12u2 +debianutils=5.7-0.5~deb12u1 +diffutils=1:3.8-4 +dpkg=1.21.22 +e2fsprogs=1.47.0-2 +findutils=4.9.0-4 +gcc-12-base:amd64=12.2.0-14+deb12u1 +gpgv=2.2.40-1.1 +grep=3.8-5 +gzip=1.12-1 +hostname=3.23+nmu1 +init-system-helpers=1.65.2 +libacl1:amd64=2.3.1-3 +libapt-pkg6.0:amd64=2.6.1 +libattr1:amd64=1:2.5.1-4 +libaudit-common=1:3.0.9-1 +libaudit1:amd64=1:3.0.9-1 +libblkid1:amd64=2.38.1-5+deb12u3 +libbrotli1:amd64=1.0.9-2+b6 +libbz2-1.0:amd64=1.0.8-5+b1 +libc-bin=2.36-9+deb12u10 +libc6:amd64=2.36-9+deb12u10 +libcap-ng0:amd64=0.8.3-1+b3 +libcap2:amd64=1:2.66-4+deb12u1 +libcom-err2:amd64=1.47.0-2 +libcrypt1:amd64=1:4.4.33-2 +libcurl4:amd64=7.88.1-10+deb12u14 +libdb5.3:amd64=5.3.28+dfsg2-1 +libdebconfclient0:amd64=0.270 +libext2fs2:amd64=1.47.0-2 +libffi8:amd64=3.4.4-1 +libgcc-s1:amd64=12.2.0-14+deb12u1 +libgcrypt20:amd64=1.10.1-3 +libglib2.0-0:amd64=2.74.6-2+deb12u7 +libgmp10:amd64=2:6.2.1+dfsg1-1.1 +libgnutls30:amd64=3.7.9-2+deb12u4 +libgpg-error0:amd64=1.46-1 +libgssapi-krb5-2:amd64=1.20.1-2+deb12u4 +libhogweed6:amd64=3.8.1-2 +libidn2-0:amd64=2.3.3-1+b1 +libk5crypto3:amd64=1.20.1-2+deb12u4 +libkeyutils1:amd64=1.6.3-2 +libkrb5-3:amd64=1.20.1-2+deb12u4 +libkrb5support0:amd64=1.20.1-2+deb12u4 +libldap-2.5-0:amd64=2.5.13+dfsg-5 +liblz4-1:amd64=1.9.4-1 +liblzma5:amd64=5.4.1-1 +libmd0:amd64=1.0.4-2 +libmount1:amd64=2.38.1-5+deb12u3 +libnettle8:amd64=3.8.1-2 +libnghttp2-14:amd64=1.52.0-1+deb12u2 +libp11-kit0:amd64=0.24.1-2 +libpam-modules-bin=1.5.2-6+deb12u1 +libpam-modules:amd64=1.5.2-6+deb12u1 +libpam-runtime=1.5.2-6+deb12u1 +libpam0g:amd64=1.5.2-6+deb12u1 +libpcre2-8-0:amd64=10.42-1 +libpsl5:amd64=0.21.2-1 +librtmp1:amd64=2.4+20151223.gitfa8646d.1-2+b2 +libsasl2-2:amd64=2.1.28+dfsg-10 +libsasl2-modules-db:amd64=2.1.28+dfsg-10 +libseccomp2:amd64=2.5.4-1+deb12u1 +libselinux1:amd64=3.4-1+b6 +libsemanage-common=3.4-1 +libsemanage2:amd64=3.4-1+b5 +libsepol2:amd64=3.4-2.1 +libslirp0:amd64=4.7.0-1 +libsmartcols1:amd64=2.38.1-5+deb12u3 +libss2:amd64=1.47.0-2 +libssh2-1:amd64=1.10.0-3+b1 +libssl3:amd64=3.0.17-1~deb12u2 +libstdc++6:amd64=12.2.0-14+deb12u1 +libsystemd0:amd64=252.38-1~deb12u1 +libtasn1-6:amd64=4.19.0-2+deb12u1 +libtinfo6:amd64=6.4-4 +libudev1:amd64=252.38-1~deb12u1 +libunistring2:amd64=1.0-2 +libuuid1:amd64=2.38.1-5+deb12u3 +libxxhash0:amd64=0.8.1-1 +libzstd1:amd64=1.5.4+dfsg2-5 +login=1:4.13+dfsg1-1+deb12u1 +logsave=1.47.0-2 +mawk=1.3.4.20200120-3.1 +mount=2.38.1-5+deb12u3 +ncurses-base=6.4-4 +ncurses-bin=6.4-4 +openssl=3.0.17-1~deb12u2 +passwd=1:4.13+dfsg1-1+deb12u1 +perl-base=5.36.0-7+deb12u2 +sed=4.9-1 +sysvinit-utils=3.06-4 +tar=1.34+dfsg-1.2+deb12u1 +tzdata=2025b-0+deb12u1 +usr-is-merged=37~deb12u1 +util-linux-extra=2.38.1-5+deb12u3 +util-linux=2.38.1-5+deb12u3 +zlib1g:amd64=1:1.2.13.dfsg-1 diff --git a/verifier/builder/shared/qemu-pinned-packages.txt b/verifier/builder/shared/qemu-pinned-packages.txt new file mode 100644 index 00000000..1ae0d6b9 --- /dev/null +++ b/verifier/builder/shared/qemu-pinned-packages.txt @@ -0,0 +1,236 @@ +adduser=3.134 +apt=2.6.1 +base-files=12.4+deb12u11 +base-passwd=3.6.1 +bash=5.2.15-2+b8 +binutils-common:amd64=2.40-2 +binutils-x86-64-linux-gnu=2.40-2 +binutils=2.40-2 +bison=2:3.8.2+dfsg-1+b1 +bsdutils=1:2.38.1-5+deb12u3 +build-essential=12.9 +bzip2=1.0.8-5+b1 +ca-certificates=20230311+deb12u1 +coreutils=9.1-1 +cpp-12=12.2.0-14+deb12u1 +cpp=4:12.2.0-3 +dash=0.5.12-2 +debconf=1.5.82 +debian-archive-keyring=2023.3+deb12u2 +debianutils=5.7-0.5~deb12u1 +diffutils=1:3.8-4 +docutils-common=0.19+dfsg-6 +dpkg-dev=1.21.22 +dpkg=1.21.22 +e2fsprogs=1.47.0-2 +findutils=4.9.0-4 +flex=2.6.4-8.2 +fonts-font-awesome=5.0.10+really4.7.0~dfsg-4.1 +fonts-lato=2.0-2.1 +g++-12=12.2.0-14+deb12u1 +g++=4:12.2.0-3 +gcc-12-base:amd64=12.2.0-14+deb12u1 +gcc-12=12.2.0-14+deb12u1 +gcc=4:12.2.0-3 +git-man=1:2.39.5-0+deb12u2 +git=1:2.39.5-0+deb12u2 +gpgv=2.2.40-1.1 +grep=3.8-5 +gzip=1.12-1 +hostname=3.23+nmu1 +init-system-helpers=1.65.2 +libacl1:amd64=2.3.1-3 +libapt-pkg6.0:amd64=2.6.1 +libasan8:amd64=12.2.0-14+deb12u1 +libatomic1:amd64=12.2.0-14+deb12u1 +libattr1:amd64=1:2.5.1-4 +libaudit-common=1:3.0.9-1 +libaudit1:amd64=1:3.0.9-1 +libbinutils:amd64=2.40-2 +libblkid-dev:amd64=2.38.1-5+deb12u3 +libblkid1:amd64=2.38.1-5+deb12u3 +libbrotli1:amd64=1.0.9-2+b6 +libbz2-1.0:amd64=1.0.8-5+b1 +libc-bin=2.36-9+deb12u10 +libc-dev-bin=2.36-9+deb12u13 +libc6-dev:amd64=2.36-9+deb12u13 +libc6:amd64=2.36-9+deb12u13 +libcap-ng0:amd64=0.8.3-1+b3 +libcap2:amd64=1:2.66-4+deb12u1 +libcc1-0:amd64=12.2.0-14+deb12u1 +libcom-err2:amd64=1.47.0-2 +libcrypt-dev:amd64=1:4.4.33-2 +libcrypt1:amd64=1:4.4.33-2 +libctf-nobfd0:amd64=2.40-2 +libctf0:amd64=2.40-2 +libcurl3-gnutls:amd64=7.88.1-10+deb12u14 +libdb5.3:amd64=5.3.28+dfsg2-1 +libdebconfclient0:amd64=0.270 +libdpkg-perl=1.21.22 +libelf1:amd64=0.188-2.1 +liberror-perl=0.17029-2 +libexpat1:amd64=2.5.0-1+deb12u1 +libext2fs2:amd64=1.47.0-2 +libffi-dev:amd64=3.4.4-1 +libffi8:amd64=3.4.4-1 +libgcc-12-dev:amd64=12.2.0-14+deb12u1 +libgcc-s1:amd64=12.2.0-14+deb12u1 +libgcrypt20:amd64=1.10.1-3 +libgdbm-compat4:amd64=1.23-3 +libgdbm6:amd64=1.23-3 +libglib2.0-0:amd64=2.74.6-2+deb12u7 +libglib2.0-bin=2.74.6-2+deb12u7 +libglib2.0-data=2.74.6-2+deb12u7 +libglib2.0-dev-bin=2.74.6-2+deb12u7 +libglib2.0-dev:amd64=2.74.6-2+deb12u7 +libgmp10:amd64=2:6.2.1+dfsg1-1.1 +libgnutls30:amd64=3.7.9-2+deb12u4 +libgomp1:amd64=12.2.0-14+deb12u1 +libgpg-error0:amd64=1.46-1 +libgprofng0:amd64=2.40-2 +libgssapi-krb5-2:amd64=1.20.1-2+deb12u4 +libhogweed6:amd64=3.8.1-2 +libidn2-0:amd64=2.3.3-1+b1 +libisl23:amd64=0.25-1.1 +libitm1:amd64=12.2.0-14+deb12u1 +libjansson4:amd64=2.14-2 +libjs-jquery=3.6.1+dfsg+~3.5.14-1 +libjs-sphinxdoc=5.3.0-4 +libjs-underscore=1.13.4~dfsg+~1.11.4-3 +libjson-perl=4.10000-1 +libk5crypto3:amd64=1.20.1-2+deb12u4 +libkeyutils1:amd64=1.6.3-2 +libkrb5-3:amd64=1.20.1-2+deb12u4 +libkrb5support0:amd64=1.20.1-2+deb12u4 +libldap-2.5-0:amd64=2.5.13+dfsg-5 +liblsan0:amd64=12.2.0-14+deb12u1 +liblz4-1:amd64=1.9.4-1 +liblzma5:amd64=5.4.1-1 +libmd0:amd64=1.0.4-2 +libmount-dev:amd64=2.38.1-5+deb12u3 +libmount1:amd64=2.38.1-5+deb12u3 +libmpc3:amd64=1.3.1-1 +libmpfr6:amd64=4.2.0-1 +libncursesw6:amd64=6.4-4 +libnettle8:amd64=3.8.1-2 +libnghttp2-14:amd64=1.52.0-1+deb12u2 +libnsl-dev:amd64=1.3.0-2 +libnsl2:amd64=1.3.0-2 +libp11-kit0:amd64=0.24.1-2 +libpam-modules-bin=1.5.2-6+deb12u1 +libpam-modules:amd64=1.5.2-6+deb12u1 +libpam-runtime=1.5.2-6+deb12u1 +libpam0g:amd64=1.5.2-6+deb12u1 +libpcre2-16-0:amd64=10.42-1 +libpcre2-32-0:amd64=10.42-1 +libpcre2-8-0:amd64=10.42-1 +libpcre2-dev:amd64=10.42-1 +libpcre2-posix3:amd64=10.42-1 +libperl5.36:amd64=5.36.0-7+deb12u2 +libpkgconf3:amd64=1.8.1-1 +libpsl5:amd64=0.21.2-1 +libpython3-stdlib:amd64=3.11.2-1+b1 +libpython3.11-minimal:amd64=3.11.2-6+deb12u6 +libpython3.11-stdlib:amd64=3.11.2-6+deb12u6 +libquadmath0:amd64=12.2.0-14+deb12u1 +libreadline8:amd64=8.2-1.3 +librtmp1:amd64=2.4+20151223.gitfa8646d.1-2+b2 +libsasl2-2:amd64=2.1.28+dfsg-10 +libsasl2-modules-db:amd64=2.1.28+dfsg-10 +libseccomp2:amd64=2.5.4-1+deb12u1 +libselinux1-dev:amd64=3.4-1+b6 +libselinux1:amd64=3.4-1+b6 +libsemanage-common=3.4-1 +libsemanage2:amd64=3.4-1+b5 +libsepol-dev:amd64=3.4-2.1 +libsepol2:amd64=3.4-2.1 +libslirp-dev:amd64=4.7.0-1 +libslirp0:amd64=4.7.0-1 +libsmartcols1:amd64=2.38.1-5+deb12u3 +libsqlite3-0:amd64=3.40.1-2+deb12u2 +libss2:amd64=1.47.0-2 +libssh2-1:amd64=1.10.0-3+b1 +libssl3:amd64=3.0.17-1~deb12u2 +libstdc++-12-dev:amd64=12.2.0-14+deb12u1 +libstdc++6:amd64=12.2.0-14+deb12u1 +libsystemd0:amd64=252.38-1~deb12u1 +libtasn1-6:amd64=4.19.0-2+deb12u1 +libtinfo6:amd64=6.4-4 +libtirpc-common=1.3.3+ds-1 +libtirpc-dev:amd64=1.3.3+ds-1 +libtirpc3:amd64=1.3.3+ds-1 +libtsan2:amd64=12.2.0-14+deb12u1 +libubsan1:amd64=12.2.0-14+deb12u1 +libudev1:amd64=252.38-1~deb12u1 +libunistring2:amd64=1.0-2 +libuuid1:amd64=2.38.1-5+deb12u3 +libxxhash0:amd64=0.8.1-1 +libzstd1:amd64=1.5.4+dfsg2-5 +linux-libc-dev:amd64=6.1.148-1 +login=1:4.13+dfsg1-1+deb12u1 +logsave=1.47.0-2 +m4=1.4.19-3 +make=4.3-4.1 +mawk=1.3.4.20200120-3.1 +media-types=10.0.0 +mount=2.38.1-5+deb12u3 +ncurses-base=6.4-4 +ncurses-bin=6.4-4 +ninja-build=1.11.1-2~deb12u1 +openssl=3.0.17-1~deb12u2 +passwd=1:4.13+dfsg1-1+deb12u1 +patch=2.7.6-7 +perl-base=5.36.0-7+deb12u2 +perl-modules-5.36=5.36.0-7+deb12u2 +perl=5.36.0-7+deb12u2 +pkg-config:amd64=1.8.1-1 +pkgconf-bin=1.8.1-1 +pkgconf:amd64=1.8.1-1 +python-babel-localedata=2.10.3-1 +python3-alabaster=0.7.12-1 +python3-babel=2.10.3-1 +python3-certifi=2022.9.24-1 +python3-chardet=5.1.0+dfsg-2 +python3-charset-normalizer=3.0.1-2 +python3-distutils=3.11.2-3 +python3-docutils=0.19+dfsg-6 +python3-idna=3.3-1+deb12u1 +python3-imagesize=1.4.1-1 +python3-jinja2=3.1.2-1+deb12u3 +python3-lib2to3=3.11.2-3 +python3-markupsafe=2.1.2-1+b1 +python3-minimal=3.11.2-1+b1 +python3-packaging=23.0-1 +python3-pip=23.0.1+dfsg-1 +python3-pkg-resources=66.1.1-1+deb12u2 +python3-pygments=2.14.0+dfsg-1 +python3-requests=2.28.1+dfsg-1 +python3-roman=3.3-3 +python3-setuptools=66.1.1-1+deb12u2 +python3-six=1.16.0-4 +python3-snowballstemmer=2.2.0-2 +python3-sphinx-rtd-theme=1.2.0+dfsg-1 +python3-sphinx=5.3.0-4 +python3-tz=2022.7.1-4 +python3-urllib3=1.26.12-1+deb12u1 +python3-wheel=0.38.4-2 +python3.11-minimal=3.11.2-6+deb12u6 +python3.11=3.11.2-6+deb12u6 +python3=3.11.2-1+b1 +readline-common=8.2-1.3 +rpcsvc-proto=1.4.3-1 +sed=4.9-1 +sgml-base=1.31 +sphinx-common=5.3.0-4 +sphinx-rtd-theme-common=1.2.0+dfsg-1 +sysvinit-utils=3.06-4 +tar=1.34+dfsg-1.2+deb12u1 +tzdata=2025b-0+deb12u1 +usr-is-merged=37~deb12u1 +util-linux-extra=2.38.1-5+deb12u3 +util-linux=2.38.1-5+deb12u3 +uuid-dev:amd64=2.38.1-5+deb12u3 +xml-core=0.18+nmu1 +xz-utils=5.4.1-1 +zlib1g-dev:amd64=1:1.2.13.dfsg-1 +zlib1g:amd64=1:1.2.13.dfsg-1 diff --git a/verifier/dstack-verifier.toml b/verifier/dstack-verifier.toml new file mode 100644 index 00000000..c53b5351 --- /dev/null +++ b/verifier/dstack-verifier.toml @@ -0,0 +1,19 @@ +# SPDX-FileCopyrightText: © 2024-2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +# Server configuration +address = "0.0.0.0" +port = 8080 + +# Image cache directory for OS image verification +image_cache_dir = "/tmp/dstack-verifier/cache" + +# Image download URL template (replace {OS_IMAGE_HASH} with actual hash) +image_download_url = "https://dstack-images.phala.network/mr_{OS_IMAGE_HASH}.tar.gz" + +# Image download timeout in seconds +image_download_timeout_secs = 300 + +# Optional PCCS URL for quote verification +# pccs_url = "https://pccs.phala.network" \ No newline at end of file diff --git a/verifier/fixtures/quote-report.json b/verifier/fixtures/quote-report.json new file mode 100644 index 00000000..93624477 --- /dev/null +++ b/verifier/fixtures/quote-report.json @@ -0,0 +1 @@ +{"quote":"040002008100000000000000939a7233f79c4ca9940a0db3957f06071eadadc7f30fb7f911d24aa522afc590000000000b0104000000000000000000000000007bf063280e94fb051f5dd7b1fc59ce9aac42bb961df8d44b709c9b0ff87a7b4df648657ba6d1189589feab1d5a3c9a9d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000e702060000000000b24d3b24e9e3c16012376b52362ca09856c4adecb709d5fac33addf1c47e193da075b125b6c364115771390a5461e2170000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002e3843265f8ecdd4e2282694747f6f2f111605c33f2a8882f5734ee6f3a6ce63d8f34aeef06093dcda76fa5f9d33d8d6a1b79d76021970f57c45c4a7c395f780bab37011a4df27fe44e8559bd1abb4d6e52f12f866d1d08405448eb797a5970f1e31b59d605df7ee8160cf7966be9bafa6d0e1905de7e09695a24cd9748e71a603a51fae1297619fa0c30517addbcd070f787c3877f3e95095d5a4d13dd0fe0233803b30120d8469866719dc28f519ce021fe1e53459121e7a5a4443147185a812340000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000cc100000aa59a2ab97a78a0401ffc862efb76aa25eb7921915c96f1e284737ac287c30e9a2eeddd04176329c840ab282c0347659cf19681d8c205cec2185e9fa40b673002982655d89dbd3867e7370e8b1b27bbae5eb5f24dfaceea8a2ff9ad71161930c379cef3c7360ef97468031741483798585c1befb2f1d9827d2eb7a22299a01270600461000000404191b04ff0006000000000000000000000000000000000000000000000000000000000000000000000000000000001500000000000000e700000000000000e5a3a7b5d830c2953b98534c6c59a3a34fdc34e933f7f5898f0a85cf08846bca0000000000000000000000000000000000000000000000000000000000000000dc9e2a7c6f948f17474e34a7fc43ed030f7c1563f1babddf6340c82e0e54a8c5000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020006000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005be61ea67e69e2411dd59d258969727c5cd13f082b59b3dcc4721e2f7c3b7a4e0000000000000000000000000000000000000000000000000000000000000000a7d05873f18690a9dfa580c695e1c0bd4dde53423bbbed02ed798b8d8b0a727dc92cf12582189873197ef784fd97cd44feb2fee19d388bc5be0abf4231586ba72000000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f05005e0e00002d2d2d2d2d424547494e2043455254494649434154452d2d2d2d2d0a4d49494538544343424a65674177494241674956414c7142567a73712f787369354e387578426f59356c3641515a31444d416f4743437147534d343942414d430a4d484178496a416742674e5642414d4d47556c756447567349464e4857434251513073675547786864475a76636d306751304578476a415942674e5642416f4d0a45556c756447567349454e76636e4276636d4630615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b47413155450a4341774351304578437a414a42674e5642415954416c56544d423458445449314d446b784e6a41794d6a67784e566f5844544d794d446b784e6a41794d6a67780a4e566f77634445694d434147413155454177775a535735305a5777675530645949464244537942445a584a3061575a70593246305a5445614d426747413155450a43677752535735305a577767513239796347397959585270623234784644415342674e564241634d43314e68626e526849454e7359584a684d517377435159440a5651514944414a445154454c4d416b474131554542684d4356564d775754415442676371686b6a4f5051494242676771686b6a4f50514d4242774e43414151380a39455a4b755278457952677a5a5873542b3079304346342b31683453582f6a54554c644f6771637275466b5033354750346562383634517361797779345877440a42755a65434b664569484e57356f3431353959646f3449444444434341776777487759445652306a42426777466f41556c5739647a62306234656c4153636e550a3944504f4156634c336c5177617759445652306642475177596a42676f46366758495a616148523063484d364c79396863476b7564484a316333526c5a484e6c0a636e5a705932567a4c6d6c75644756734c6d4e766253397a5a3367765932567964476c6d61574e6864476c76626939324e4339775932746a636d772f593245390a6347786864475a76636d306d5a57356a62325270626d63395a4756794d42304741315564446751574242525a63752f70597452454655736837726857544952750a2b446974786a414f42674e56485138424166384542414d434273417744415944565230544151482f4241497741444343416a6b4743537147534962345451454e0a4151534341696f776767496d4d42344743697147534962345451454e4151454545496c395072647571496533672f6d7450516478413073776767466a42676f710a686b69472b453042445145434d494942557a415142677371686b69472b45304244514543415149424244415142677371686b69472b45304244514543416749420a4244415142677371686b69472b4530424451454341774942416a415142677371686b69472b4530424451454342414942416a415142677371686b69472b4530420a44514543425149424244415142677371686b69472b45304244514543426749424154415142677371686b69472b453042445145434277494241444151426773710a686b69472b45304244514543434149424254415142677371686b69472b45304244514543435149424144415142677371686b69472b45304244514543436749420a4144415142677371686b69472b45304244514543437749424144415142677371686b69472b45304244514543444149424144415142677371686b69472b4530420a44514543445149424144415142677371686b69472b45304244514543446749424144415142677371686b69472b453042445145434477494241444151426773710a686b69472b45304244514543454149424144415142677371686b69472b45304244514543455149424454416642677371686b69472b45304244514543456751510a42415143416751424141554141414141414141414144415142676f71686b69472b45304244514544424149414144415542676f71686b69472b453042445145450a4241615177473841414141774477594b4b6f5a496876684e4151304242516f424154416542676f71686b69472b45304244514547424243764f3535314c75626a0a2b49363352564e713558734e4d45514743697147534962345451454e415163774e6a415142677371686b69472b45304244514548415145422f7a4151426773710a686b69472b45304244514548416745422f7a415142677371686b69472b45304244514548417745422f7a414b42676771686b6a4f5051514441674e49414442460a41694541366748457a39306f4c30362b4c6f414442307261326f587943333453504c4d6e35434e473569783862303043494131304d37796f79537a6a755178440a75617a6f505048722f745862432f64762b6b384362314175656b4a650a2d2d2d2d2d454e442043455254494649434154452d2d2d2d2d0a2d2d2d2d2d424547494e2043455254494649434154452d2d2d2d2d0a4d4949436c6a4343416a32674177494241674956414a567658633239472b487051456e4a3150517a7a674658433935554d416f4743437147534d343942414d430a4d476778476a415942674e5642414d4d45556c756447567349464e48574342536232393049454e424d526f77474159445651514b4442464a626e526c624342440a62334a7762334a6864476c76626a45554d424947413155454277774c553246756447456751327868636d4578437a414a42674e564241674d416b4e424d5173770a435159445651514745774a56557a4165467730784f4441314d6a45784d4455774d5442614677307a4d7a41314d6a45784d4455774d5442614d484178496a41670a42674e5642414d4d47556c756447567349464e4857434251513073675547786864475a76636d306751304578476a415942674e5642416f4d45556c75644756730a49454e76636e4276636d4630615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b474131554543417743513045780a437a414a42674e5642415954416c56544d466b77457759484b6f5a497a6a3043415159494b6f5a497a6a304441516344516741454e53422f377432316c58534f0a3243757a7078773734654a423732457944476757357258437478327456544c7136684b6b367a2b5569525a436e71523770734f766771466553786c6d546c4a6c0a65546d693257597a33714f42757a43427544416642674e5648534d4547444157674251695a517a575770303069664f44744a5653763141624f536347724442530a42674e5648523845537a424a4d45656752614244686b466f64485277637a6f764c324e6c636e52705a6d6c6a5958526c63793530636e567a6447566b633256790a646d6c6a5a584d75615735305a577775593239744c306c756447567355306459556d397664454e424c6d526c636a416442674e5648513445466751556c5739640a7a62306234656c4153636e553944504f4156634c336c517744675944565230504151482f42415144416745474d42494741315564457745422f7751494d4159420a4166384341514177436759494b6f5a497a6a30454177494452774177524149675873566b6930772b6936565947573355462f32327561586530594a446a3155650a6e412b546a44316169356343494359623153416d4435786b66545670766f34556f79695359787244574c6d5552344349394e4b7966504e2b0a2d2d2d2d2d454e442043455254494649434154452d2d2d2d2d0a2d2d2d2d2d424547494e2043455254494649434154452d2d2d2d2d0a4d4949436a7a4343416a53674177494241674955496d554d316c71644e496e7a6737535655723951477a6b6e42717777436759494b6f5a497a6a3045417749770a614445614d4267474131554541777752535735305a5777675530645949464a766233516751304578476a415942674e5642416f4d45556c756447567349454e760a636e4276636d4630615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b47413155454341774351304578437a414a0a42674e5642415954416c56544d423458445445344d4455794d5445774e4455784d466f58445451354d54497a4d54497a4e546b314f566f77614445614d4267470a4131554541777752535735305a5777675530645949464a766233516751304578476a415942674e5642416f4d45556c756447567349454e76636e4276636d46300a615739754d5251774567594456515148444174545957353059534244624746795954454c4d416b47413155454341774351304578437a414a42674e56424159540a416c56544d466b77457759484b6f5a497a6a3043415159494b6f5a497a6a3044415163445167414543366e45774d4449595a4f6a2f69505773437a61454b69370a314f694f534c52466857476a626e42564a66566e6b59347533496a6b4459594c304d784f346d717379596a6c42616c54565978465032734a424b357a6c4b4f420a757a43427544416642674e5648534d4547444157674251695a517a575770303069664f44744a5653763141624f5363477244425342674e5648523845537a424a0a4d45656752614244686b466f64485277637a6f764c324e6c636e52705a6d6c6a5958526c63793530636e567a6447566b63325679646d6c6a5a584d75615735300a5a577775593239744c306c756447567355306459556d397664454e424c6d526c636a416442674e564851344546675155496d554d316c71644e496e7a673753560a55723951477a6b6e4271777744675944565230504151482f42415144416745474d42494741315564457745422f7751494d4159424166384341514577436759490a4b6f5a497a6a3045417749445351417752674968414f572f35516b522b533943695344634e6f6f774c7550524c735747662f59693747535839344267775477670a41694541344a306c72486f4d732b586f356f2f7358364f39515778485241765a55474f6452513763767152586171493d0a2d2d2d2d2d454e442043455254494649434154452d2d2d2d2d0a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","event_log":"[{\"imr\":0,\"event_type\":2147483659,\"digest\":\"8ae1e425351df7992c444586eff99d35af3b779aa2b0e981cb4b73bc5b279f2ade19b6a62a203fc3c3bbdaae80af596d\",\"event\":\"\",\"event_payload\":\"095464785461626c65000100000000000000af96bb93f2b9b84e9462e0ba745642360090800000000000\"},{\"imr\":0,\"event_type\":2147483658,\"digest\":\"344bc51c980ba621aaa00da3ed7436f7d6e549197dfe699515dfa2c6583d95e6412af21c097d473155875ffd561d6790\",\"event\":\"\",\"event_payload\":\"2946762858585858585858582d585858582d585858582d585858582d58585858585858585858585829000000c0ff000000000040080000000000\"},{\"imr\":0,\"event_type\":2147483649,\"digest\":\"9dc3a1f80bcec915391dcda5ffbb15e7419f77eab462bbf72b42166fb70d50325e37b36f93537a863769bcf9bedae6fb\",\"event\":\"\",\"event_payload\":\"61dfe48bca93d211aa0d00e098032b8c0a00000000000000000000000000000053006500630075007200650042006f006f007400\"},{\"imr\":0,\"event_type\":2147483649,\"digest\":\"6f2e3cbc14f9def86980f5f66fd85e99d63e69a73014ed8a5633ce56eca5b64b692108c56110e22acadcef58c3250f1b\",\"event\":\"\",\"event_payload\":\"61dfe48bca93d211aa0d00e098032b8c0200000000000000000000000000000050004b00\"},{\"imr\":0,\"event_type\":2147483649,\"digest\":\"d607c0efb41c0d757d69bca0615c3a9ac0b1db06c557d992e906c6b7dee40e0e031640c7bfd7bcd35844ef9edeadc6f9\",\"event\":\"\",\"event_payload\":\"61dfe48bca93d211aa0d00e098032b8c030000000000000000000000000000004b0045004b00\"},{\"imr\":0,\"event_type\":2147483649,\"digest\":\"08a74f8963b337acb6c93682f934496373679dd26af1089cb4eaf0c30cf260a12e814856385ab8843e56a9acea19e127\",\"event\":\"\",\"event_payload\":\"cbb219d73a3d9645a3bcdad00e67656f0200000000000000000000000000000064006200\"},{\"imr\":0,\"event_type\":2147483649,\"digest\":\"18cc6e01f0c6ea99aa23f8a280423e94ad81d96d0aeb5180504fc0f7a40cb3619dd39bd6a95ec1680a86ed6ab0f9828d\",\"event\":\"\",\"event_payload\":\"cbb219d73a3d9645a3bcdad00e67656f03000000000000000000000000000000640062007800\"},{\"imr\":0,\"event_type\":4,\"digest\":\"394341b7182cd227c5c6b07ef8000cdfd86136c4292b8e576573ad7ed9ae41019f5818b4b971c9effc60e1ad9f1289f0\",\"event\":\"\",\"event_payload\":\"00000000\"},{\"imr\":0,\"event_type\":10,\"digest\":\"2065dd48d647e4377db277ba203526901a17845e93e0df4c2dfc3ce136e0910324ead1e1c86b8d90c2acdf9c85ffac53\",\"event\":\"\",\"event_payload\":\"414350492044415441\"},{\"imr\":0,\"event_type\":10,\"digest\":\"772b0169c66b52e4453fff9e3c6257635ea950ebcc8edd7ef2e2f8241cf6a155f39df01a7c7a194b6bc0abe5de11861d\",\"event\":\"\",\"event_payload\":\"414350492044415441\"},{\"imr\":0,\"event_type\":10,\"digest\":\"abfb2256644b5786eefdcb92303d2008c36cb9500d98997e215ef5080745d4bf2e5b3629090918e193e7f05b173d48c5\",\"event\":\"\",\"event_payload\":\"414350492044415441\"},{\"imr\":1,\"event_type\":2147483651,\"digest\":\"0761fbfa317a42d8edbe9e404178d102adc059cface98c5e07d1d535371c145c3497fd2a19b8398568b8c8a6f95e0a86\",\"event\":\"\",\"event_payload\":\"18400d7b0000000000d47d000000000000000000000000002a000000000000000403140072f728144ab61e44b8c39ebdd7f893c7040412006b00650072006e0065006c0000007fff0400\"},{\"imr\":0,\"event_type\":2147483650,\"digest\":\"1dd6f7b457ad880d840d41c961283bab688e94e4b59359ea45686581e90feccea3c624b1226113f824f315eb60ae0a7c\",\"event\":\"\",\"event_payload\":\"61dfe48bca93d211aa0d00e098032b8c0900000000000000020000000000000042006f006f0074004f0072006400650072000000\"},{\"imr\":0,\"event_type\":2147483650,\"digest\":\"23ada07f5261f12f34a0bd8e46760962d6b4d576a416f1fea1c64bc656b1d28eacf7047ae6e967c58fd2a98bfa74c298\",\"event\":\"\",\"event_payload\":\"61dfe48bca93d211aa0d00e098032b8c08000000000000003e0000000000000042006f006f0074003000300030003000090100002c0055006900410070007000000004071400c9bdb87cebf8344faaea3ee4af6516a10406140021aa2c4614760345836e8ab6f46623317fff0400\"},{\"imr\":1,\"event_type\":2147483655,\"digest\":\"77a0dab2312b4e1e57a84d865a21e5b2ee8d677a21012ada819d0a98988078d3d740f6346bfe0abaa938ca20439a8d71\",\"event\":\"\",\"event_payload\":\"43616c6c696e6720454649204170706c69636174696f6e2066726f6d20426f6f74204f7074696f6e\"},{\"imr\":1,\"event_type\":4,\"digest\":\"394341b7182cd227c5c6b07ef8000cdfd86136c4292b8e576573ad7ed9ae41019f5818b4b971c9effc60e1ad9f1289f0\",\"event\":\"\",\"event_payload\":\"00000000\"},{\"imr\":2,\"event_type\":6,\"digest\":\"4027cb4ec64dbc24b6d98d9470daeefc749bbb6a9b011762d215f6ed3eb833d58fd72d9ad850958f72878182e6f61924\",\"event\":\"\",\"event_payload\":\"ed223b8f1a0000004c4f414445445f494d4147453a3a4c6f61644f7074696f6e7300\"},{\"imr\":2,\"event_type\":6,\"digest\":\"63e06e29cf98f2fce71abd3a9629dff48457b47c010b64e11f7a2b42dd99bfa14ee35660b3f5d3fc376261d6ba9a6d6b\",\"event\":\"\",\"event_payload\":\"ec223b8f0d0000004c696e757820696e6974726400\"},{\"imr\":1,\"event_type\":2147483655,\"digest\":\"214b0bef1379756011344877743fdc2a5382bac6e70362d624ccf3f654407c1b4badf7d8f9295dd3dabdef65b27677e0\",\"event\":\"\",\"event_payload\":\"4578697420426f6f7420536572766963657320496e766f636174696f6e\"},{\"imr\":1,\"event_type\":2147483655,\"digest\":\"0a2e01c85deae718a530ad8c6d20a84009babe6c8989269e950d8cf440c6e997695e64d455c4174a652cd080f6230b74\",\"event\":\"\",\"event_payload\":\"4578697420426f6f742053657276696365732052657475726e656420776974682053756363657373\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"f9974020ef507068183313d0ca808e0d1ca9b2d1ad0c61f5784e7157c362c06536f5ddacdad4451693f48fcc72fff624\",\"event\":\"system-preparing\",\"event_payload\":\"\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"837c2dd72f8a4c740159e5e042ed79b7eaaa5ab3a151a45e27bc366bb8b27e6c3faec87aab1e95197d3e6d23308d448c\",\"event\":\"app-id\",\"event_payload\":\"3763bc34552cf3a27ff71ad5f7a90471562a1a2d\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"b883bee0b216618b1ce0e7a1bb4a9379b486cef8aadf0c682cb6e80c083f7982dbf104183c24a74693d860f4ffc8b72f\",\"event\":\"compose-hash\",\"event_payload\":\"3763bc34552cf3a27ff71ad5f7a90471562a1a2df552dfc1998cba2d60da27e7\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"9af8567194629f6798aafa76d95427bb7e84864145ee79fdf4ca29f5c743c159379c1c805934decfa513821edaa77fb7\",\"event\":\"instance-id\",\"event_payload\":\"c3714eb66990eace777b4e664c16e09375dec4c9\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"98bd7e6bd3952720b65027fd494834045d06b4a714bf737a06b874638b3ea00ff402f7f583e3e3b05e921c8570433ac6\",\"event\":\"boot-mr-done\",\"event_payload\":\"\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"74ca939b8c3c74aab3c30966a788f7743951d54a936a711dd01422f003ff9df6666f3cc54975d2e4f35c829865583f0f\",\"event\":\"key-provider\",\"event_payload\":\"7b226e616d65223a226c6f63616c2d736778222c226964223a2231623761343933373834303332343962363938366139303738343463616230393231656361333264643437653635376633633130333131636361656363663862227d\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"1a76b2a80a0be71eae59f80945d876351a7a3fb8e9fd1ff1cede5734aa84ea11fd72b4edfbb6f04e5a85edd114c751bd\",\"event\":\"system-ready\",\"event_payload\":\"\"},{\"imr\":3,\"event_type\":134217729,\"digest\":\"64c2c025c0e916a1802e8beee830954fe5693f3fb0f2ffb077d7d3f149c5525e2c1bfb0a15046b84f4038ba6f152588f\",\"event\":\"LIUM_MINER_HOTKEY\",\"event_payload\":\"35443333507467666b475951734d4c434d724b426a56454d54455371525944466666543672396a4264614833654c7434\"}]","report_data":"12340000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","vm_config":"{\"spec_version\": 1, \"os_image_hash\": \"14ad42d0270b444eaeb53918a5a94d9b17eec7a817cd336173b17c5327541c67\", \"cpu_count\": 16, \"memory_size\": 68719476736, \"qemu_single_pass_add_pages\": false, \"pic\": false, \"pci_hole64_size\": 17592186044416, \"num_gpus\": 1, \"num_nvswitches\": 0, \"hugepages\": false, \"hotplug_off\": true, \"qemu_version\": \"9.2.1\"}"} diff --git a/verifier/src/main.rs b/verifier/src/main.rs new file mode 100644 index 00000000..f21594e4 --- /dev/null +++ b/verifier/src/main.rs @@ -0,0 +1,241 @@ +// SPDX-FileCopyrightText: © 2024-2025 Phala Network +// +// SPDX-License-Identifier: Apache-2.0 + +use std::sync::Arc; + +use clap::Parser; +use figment::{ + providers::{Env, Format, Toml}, + Figment, +}; +use rocket::{fairing::AdHoc, get, post, serde::json::Json, State}; +use serde::{Deserialize, Serialize}; +use tracing::{error, info}; + +mod types; +mod verification; + +use types::{VerificationRequest, VerificationResponse}; +use verification::CvmVerifier; + +#[derive(Parser)] +#[command(name = "dstack-verifier")] +#[command(about = "HTTP server providing CVM verification services")] +struct Cli { + #[arg(short, long, default_value = "dstack-verifier.toml")] + config: String, + + /// Oneshot mode: verify a single report JSON file and exit + #[arg(long, value_name = "FILE")] + verify: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct Config { + pub address: String, + pub port: u16, + pub image_cache_dir: String, + pub pccs_url: Option, + pub image_download_url: String, + pub image_download_timeout_secs: u64, +} + +#[post("/verify", data = "")] +async fn verify_cvm( + verifier: &State>, + request: Json, +) -> Json { + match verifier.verify(&request.into_inner()).await { + Ok(response) => Json(response), + Err(e) => { + error!("Verification failed: {:?}", e); + Json(VerificationResponse { + is_valid: false, + details: types::VerificationDetails { + quote_verified: false, + event_log_verified: false, + os_image_hash_verified: false, + report_data: None, + tcb_status: None, + advisory_ids: vec![], + app_info: None, + acpi_tables: None, + rtmr_debug: None, + }, + reason: Some(format!("Internal error: {}", e)), + }) + } + } +} + +#[get("/health")] +fn health() -> Json { + Json(serde_json::json!({ + "status": "ok", + "service": "dstack-verifier" + })) +} + +async fn run_oneshot(file_path: &str, config: &Config) -> anyhow::Result<()> { + use std::fs; + + info!("Running in oneshot mode for file: {}", file_path); + + // Read the JSON file + let content = fs::read_to_string(file_path) + .map_err(|e| anyhow::anyhow!("Failed to read file {}: {}", file_path, e))?; + + // Parse as VerificationRequest + let mut request: VerificationRequest = serde_json::from_str(&content) + .map_err(|e| anyhow::anyhow!("Failed to parse JSON: {}", e))?; + + // Ensure PCCS URL is populated from config when the report omits it + request.pccs_url = request.pccs_url.or_else(|| config.pccs_url.clone()); + + // Create verifier + let verifier = CvmVerifier::new( + config.image_cache_dir.clone(), + config.image_download_url.clone(), + std::time::Duration::from_secs(config.image_download_timeout_secs), + ); + + // Run verification + info!("Starting verification..."); + let response = verifier.verify(&request).await?; + + // Persist response next to the input file for convenience + let output_path = format!("{file_path}.verification.json"); + let serialized = serde_json::to_string_pretty(&response) + .map_err(|e| anyhow::anyhow!("Failed to encode verification result: {}", e))?; + fs::write(&output_path, serialized).map_err(|e| { + anyhow::anyhow!( + "Failed to write verification result to {}: {}", + output_path, + e + ) + })?; + info!("Stored verification result at {}", output_path); + + // Output results + println!("\n=== Verification Results ==="); + println!("Valid: {}", response.is_valid); + println!("Quote verified: {}", response.details.quote_verified); + println!( + "Event log verified: {}", + response.details.event_log_verified + ); + println!( + "OS image hash verified: {}", + response.details.os_image_hash_verified + ); + + if let Some(tcb_status) = &response.details.tcb_status { + println!("TCB status: {}", tcb_status); + } + + if !response.details.advisory_ids.is_empty() { + println!("Advisory IDs: {:?}", response.details.advisory_ids); + } + + if let Some(reason) = &response.reason { + println!("Reason: {}", reason); + } + + if let Some(report_data) = &response.details.report_data { + println!("Report data: {}", report_data); + } + + if let Some(app_info) = &response.details.app_info { + println!("\n=== App Info ==="); + println!("App ID: {}", hex::encode(&app_info.app_id)); + println!("Instance ID: {}", hex::encode(&app_info.instance_id)); + println!("Compose hash: {}", hex::encode(&app_info.compose_hash)); + println!("MRTD: {}", hex::encode(app_info.mrtd)); + println!("RTMR0: {}", hex::encode(app_info.rtmr0)); + println!("RTMR1: {}", hex::encode(app_info.rtmr1)); + println!("RTMR2: {}", hex::encode(app_info.rtmr2)); + } + + // Exit with appropriate code + if !response.is_valid { + std::process::exit(1); + } + + Ok(()) +} + +#[rocket::launch] +fn rocket() -> _ { + tracing_subscriber::fmt::init(); + + let cli = Cli::parse(); + + let default_config_str = include_str!("../dstack-verifier.toml"); + + let figment = Figment::from(rocket::Config::default()) + .merge(Toml::string(default_config_str)) + .merge(Toml::file(&cli.config)) + .merge(Env::prefixed("DSTACK_VERIFIER_")); + + let config: Config = figment.extract().expect("Failed to load configuration"); + + // Check for oneshot mode + if let Some(file_path) = cli.verify { + // Run oneshot verification and exit + let rt = tokio::runtime::Runtime::new().expect("Failed to create runtime"); + rt.block_on(async { + if let Err(e) = run_oneshot(&file_path, &config).await { + error!("Oneshot verification failed: {:#}", e); + std::process::exit(1); + } + }); + std::process::exit(0); + } + + let verifier = Arc::new(CvmVerifier::new( + config.image_cache_dir.clone(), + config.image_download_url.clone(), + std::time::Duration::from_secs(config.image_download_timeout_secs), + )); + + rocket::custom(figment) + .mount("/", rocket::routes![verify_cvm, health]) + .manage(verifier) + .attach(AdHoc::on_liftoff("Startup", |_| { + Box::pin(async { + info!("dstack-verifier started successfully"); + }) + })) +} + +#[cfg(test)] +mod tests { + use super::*; + use rocket::http::{ContentType, Status}; + use rocket::local::asynchronous::Client; + + #[tokio::test] + async fn test_health_endpoint() { + let client = Client::tracked(rocket()) + .await + .expect("valid rocket instance"); + let response = client.get("/health").dispatch().await; + assert_eq!(response.status(), Status::Ok); + } + + #[tokio::test] + async fn test_verify_endpoint_invalid_request() { + let client = Client::tracked(rocket()) + .await + .expect("valid rocket instance"); + let response = client + .post("/verify") + .header(ContentType::JSON) + .body(r#"{"invalid": "request"}"#) + .dispatch() + .await; + + assert_eq!(response.status(), Status::UnprocessableEntity); + } +} diff --git a/verifier/src/types.rs b/verifier/src/types.rs new file mode 100644 index 00000000..28bdfe20 --- /dev/null +++ b/verifier/src/types.rs @@ -0,0 +1,80 @@ +// SPDX-FileCopyrightText: © 2024-2025 Phala Network +// +// SPDX-License-Identifier: Apache-2.0 + +use ra_tls::attestation::AppInfo; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct VerificationRequest { + pub quote: String, + pub event_log: String, + pub vm_config: String, + pub pccs_url: Option, +} + +#[derive(Debug, Clone, Serialize)] +pub struct VerificationResponse { + pub is_valid: bool, + pub details: VerificationDetails, + pub reason: Option, +} + +#[derive(Debug, Clone, Serialize)] +pub struct VerificationDetails { + pub quote_verified: bool, + pub event_log_verified: bool, + pub os_image_hash_verified: bool, + pub report_data: Option, + pub tcb_status: Option, + pub advisory_ids: Vec, + pub app_info: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub acpi_tables: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub rtmr_debug: Option>, +} + +#[derive(Debug, Clone, Serialize)] +pub struct AcpiTables { + pub tables: String, + pub rsdp: String, + pub loader: String, +} + +#[derive(Debug, Clone, Serialize)] +pub struct RtmrMismatch { + pub rtmr: String, + pub expected: String, + pub actual: String, + pub events: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub missing_expected_digests: Vec, +} + +#[derive(Debug, Clone, Serialize)] +pub struct RtmrEventEntry { + pub index: usize, + pub event_type: u32, + pub event_name: String, + pub actual_digest: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub expected_digest: Option, + pub payload_len: usize, + pub status: RtmrEventStatus, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum RtmrEventStatus { + Match, + Mismatch, + Extra, + Missing, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ErrorResponse { + pub error: String, + pub details: Option, +} diff --git a/verifier/src/verification.rs b/verifier/src/verification.rs new file mode 100644 index 00000000..e36f4b07 --- /dev/null +++ b/verifier/src/verification.rs @@ -0,0 +1,620 @@ +// SPDX-FileCopyrightText: © 2024-2025 Phala Network +// +// SPDX-License-Identifier: Apache-2.0 + +use std::{ffi::OsStr, path::Path, time::Duration}; + +use anyhow::{bail, Context, Result}; +use cc_eventlog::TdxEventLog as EventLog; +use dstack_mr::RtmrLog; +use dstack_types::VmConfig; +use ra_tls::attestation::{Attestation, VerifiedAttestation}; +use sha2::{Digest as _, Sha256, Sha384}; +use tokio::{io::AsyncWriteExt, process::Command}; +use tracing::{debug, info}; + +use crate::types::{ + AcpiTables, RtmrEventEntry, RtmrEventStatus, RtmrMismatch, VerificationDetails, + VerificationRequest, VerificationResponse, +}; + +#[derive(Debug, Clone)] +struct RtmrComputationResult { + event_indices: [Vec; 4], + rtmrs: [[u8; 48]; 4], +} + +fn replay_event_logs(eventlog: &[EventLog]) -> Result { + let mut event_indices: [Vec; 4] = Default::default(); + let mut rtmrs: [[u8; 48]; 4] = [[0u8; 48]; 4]; + + for idx in 0..4 { + for (event_idx, event) in eventlog.iter().enumerate() { + event + .validate() + .context("Failed to validate event digest")?; + + if event.imr == idx { + event_indices[idx as usize].push(event_idx); + let mut hasher = Sha384::new(); + hasher.update(rtmrs[idx as usize]); + hasher.update(event.digest); + rtmrs[idx as usize] = hasher.finalize().into(); + } + } + } + + Ok(RtmrComputationResult { + event_indices, + rtmrs, + }) +} + +fn collect_rtmr_mismatch( + rtmr_label: &str, + expected_hex: &str, + actual_hex: &str, + expected_sequence: &RtmrLog, + actual_indices: &[usize], + event_log: &[EventLog], +) -> RtmrMismatch { + let mut events = Vec::new(); + + for (&idx, expected_digest) in actual_indices.iter().zip(expected_sequence.iter()) { + match event_log.get(idx) { + Some(event) => { + let event_name = if event.event.is_empty() { + "(unnamed)".to_string() + } else { + event.event.clone() + }; + let status = if event.digest == expected_digest.as_slice() { + RtmrEventStatus::Match + } else { + RtmrEventStatus::Mismatch + }; + events.push(RtmrEventEntry { + index: idx, + event_type: event.event_type, + event_name, + actual_digest: hex::encode(event.digest), + expected_digest: Some(hex::encode(expected_digest)), + payload_len: event.event_payload.len(), + status, + }); + } + None => { + events.push(RtmrEventEntry { + index: idx, + event_type: 0, + event_name: "(missing)".to_string(), + actual_digest: String::new(), + expected_digest: Some(hex::encode(expected_digest)), + payload_len: 0, + status: RtmrEventStatus::Missing, + }); + } + } + } + + for &idx in actual_indices.iter().skip(expected_sequence.len()) { + let (event_type, event_name, actual_digest, payload_len) = match event_log.get(idx) { + Some(event) => ( + event.event_type, + if event.event.is_empty() { + "(unnamed)".to_string() + } else { + event.event.clone() + }, + hex::encode(event.digest), + event.event_payload.len(), + ), + None => (0, "(missing)".to_string(), String::new(), 0), + }; + events.push(RtmrEventEntry { + index: idx, + event_type, + event_name, + actual_digest, + expected_digest: None, + payload_len, + status: RtmrEventStatus::Extra, + }); + } + + let missing_expected_digests = if expected_sequence.len() > actual_indices.len() { + expected_sequence[actual_indices.len()..] + .iter() + .map(hex::encode) + .collect() + } else { + Vec::new() + }; + + RtmrMismatch { + rtmr: rtmr_label.to_string(), + expected: expected_hex.to_string(), + actual: actual_hex.to_string(), + events, + missing_expected_digests, + } +} + +pub struct CvmVerifier { + pub image_cache_dir: String, + pub download_url: String, + pub download_timeout: Duration, +} + +impl CvmVerifier { + pub fn new(image_cache_dir: String, download_url: String, download_timeout: Duration) -> Self { + Self { + image_cache_dir, + download_url, + download_timeout, + } + } + + pub async fn verify(&self, request: &VerificationRequest) -> Result { + let quote = hex::decode(&request.quote).context("Failed to decode quote hex")?; + + // Event log is always JSON string + let event_log = request.event_log.as_bytes().to_vec(); + + let attestation = Attestation::new(quote, event_log) + .context("Failed to create attestation from quote and event log")?; + + let mut details = VerificationDetails { + quote_verified: false, + event_log_verified: false, + os_image_hash_verified: false, + report_data: None, + tcb_status: None, + advisory_ids: vec![], + app_info: None, + acpi_tables: None, + rtmr_debug: None, + }; + + let vm_config: VmConfig = + serde_json::from_str(&request.vm_config).context("Failed to decode VM config JSON")?; + + // Step 1: Verify the TDX quote using dcap-qvl + let verified_attestation = match self.verify_quote(attestation, &request.pccs_url).await { + Ok(att) => { + details.quote_verified = true; + details.tcb_status = Some(att.report.status.clone()); + details.advisory_ids = att.report.advisory_ids.clone(); + // Extract and store report_data + if let Ok(report_data) = att.decode_report_data() { + details.report_data = Some(hex::encode(report_data)); + } + att + } + Err(e) => { + return Ok(VerificationResponse { + is_valid: false, + details, + reason: Some(format!("Quote verification failed: {}", e)), + }); + } + }; + + match verified_attestation.decode_app_info(false) { + Ok(info) => { + details.event_log_verified = true; + details.app_info = Some(info); + } + Err(e) => { + return Ok(VerificationResponse { + is_valid: false, + details, + reason: Some(format!("Event log verification failed: {}", e)), + }); + } + }; + + // Step 3: Verify os-image-hash matches using dstack-mr + if let Err(e) = self + .verify_os_image_hash(&vm_config, &verified_attestation, &mut details) + .await + { + return Ok(VerificationResponse { + is_valid: false, + details, + reason: Some(format!("OS image hash verification failed: {e:#}")), + }); + } + details.os_image_hash_verified = true; + + Ok(VerificationResponse { + is_valid: true, + details, + reason: None, + }) + } + + async fn verify_quote( + &self, + attestation: Attestation, + pccs_url: &Option, + ) -> Result { + // Extract report data from quote + let report_data = attestation.decode_report_data()?; + + attestation + .verify(&report_data, pccs_url.as_deref()) + .await + .context("Quote verification failed") + } + + async fn verify_os_image_hash( + &self, + vm_config: &VmConfig, + attestation: &VerifiedAttestation, + details: &mut VerificationDetails, + ) -> Result<()> { + let hex_os_image_hash = hex::encode(&vm_config.os_image_hash); + + // Get boot info from attestation + let report = attestation + .report + .report + .as_td10() + .context("Failed to decode TD report")?; + + let app_info = attestation.decode_app_info(false)?; + + let boot_info = upgrade_authority::BootInfo { + mrtd: report.mr_td.to_vec(), + rtmr0: report.rt_mr0.to_vec(), + rtmr1: report.rt_mr1.to_vec(), + rtmr2: report.rt_mr2.to_vec(), + rtmr3: report.rt_mr3.to_vec(), + mr_aggregated: app_info.mr_aggregated.to_vec(), + os_image_hash: vm_config.os_image_hash.clone(), + mr_system: app_info.mr_system.to_vec(), + app_id: app_info.app_id, + compose_hash: app_info.compose_hash, + instance_id: app_info.instance_id, + device_id: app_info.device_id, + key_provider_info: app_info.key_provider_info, + event_log: String::from_utf8(attestation.raw_event_log.clone()) + .context("Failed to serialize event log")?, + tcb_status: attestation.report.status.clone(), + advisory_ids: attestation.report.advisory_ids.clone(), + }; + + // Extract the verified MRs from the boot info + let verified_mrs = Mrs::from(&boot_info); + + // Get image directory + let image_dir = Path::new(&self.image_cache_dir) + .join("images") + .join(&hex_os_image_hash); + + let metadata_path = image_dir.join("metadata.json"); + if !metadata_path.exists() { + info!("Image {} not found, downloading", hex_os_image_hash); + tokio::time::timeout( + self.download_timeout, + self.download_image(&hex_os_image_hash, &image_dir), + ) + .await + .context("Download image timeout")? + .with_context(|| format!("Failed to download image {hex_os_image_hash}"))?; + } + + let image_info = + fs_err::read_to_string(metadata_path).context("Failed to read image metadata")?; + let image_info: dstack_types::ImageInfo = + serde_json::from_str(&image_info).context("Failed to parse image metadata")?; + + let fw_path = image_dir.join(&image_info.bios); + let kernel_path = image_dir.join(&image_info.kernel); + let initrd_path = image_dir.join(&image_info.initrd); + let kernel_cmdline = image_info.cmdline + " initrd=initrd"; + + // Use dstack-mr to compute expected MRs + let measurement_details = dstack_mr::Machine::builder() + .cpu_count(vm_config.cpu_count) + .memory_size(vm_config.memory_size) + .firmware(&fw_path.display().to_string()) + .kernel(&kernel_path.display().to_string()) + .initrd(&initrd_path.display().to_string()) + .kernel_cmdline(&kernel_cmdline) + .root_verity(true) + .hotplug_off(vm_config.hotplug_off) + .maybe_two_pass_add_pages(vm_config.qemu_single_pass_add_pages) + .maybe_pic(vm_config.pic) + .maybe_qemu_version(vm_config.qemu_version.clone()) + .maybe_pci_hole64_size(if vm_config.pci_hole64_size > 0 { + Some(vm_config.pci_hole64_size) + } else { + None + }) + .hugepages(vm_config.hugepages) + .num_gpus(vm_config.num_gpus) + .num_nvswitches(vm_config.num_nvswitches) + .build() + .measure_with_logs() + .context("Failed to compute expected MRs")?; + + let mrs = measurement_details.measurements; + let expected_logs = measurement_details.rtmr_logs; + details.acpi_tables = Some(AcpiTables { + tables: hex::encode(&measurement_details.acpi_tables.tables), + rsdp: hex::encode(&measurement_details.acpi_tables.rsdp), + loader: hex::encode(&measurement_details.acpi_tables.loader), + }); + + let expected_mrs = Mrs { + mrtd: hex::encode(&mrs.mrtd), + rtmr0: hex::encode(&mrs.rtmr0), + rtmr1: hex::encode(&mrs.rtmr1), + rtmr2: hex::encode(&mrs.rtmr2), + }; + + debug!( + "Expected MRs from dstack-mr: MRTD={}, RTMR0={}, RTMR1={}, RTMR2={}", + expected_mrs.mrtd, expected_mrs.rtmr0, expected_mrs.rtmr1, expected_mrs.rtmr2 + ); + debug!( + "Verified MRs from attestation: MRTD={}, RTMR0={}, RTMR1={}, RTMR2={}", + verified_mrs.mrtd, verified_mrs.rtmr0, verified_mrs.rtmr1, verified_mrs.rtmr2 + ); + let event_log: Vec = serde_json::from_slice(&attestation.raw_event_log) + .context("Failed to parse event log for mismatch analysis")?; + + let computation_result = replay_event_logs(&event_log) + .context("Failed to replay event logs for mismatch analysis")?; + + if computation_result.rtmrs[3] != *boot_info.rtmr3 { + bail!("RTMR3 mismatch"); + } + + match expected_mrs.assert_eq(&verified_mrs) { + Ok(()) => Ok(()), + Err(e) => { + let mut rtmr_debug = Vec::new(); + + if expected_mrs.rtmr0 != verified_mrs.rtmr0 { + rtmr_debug.push(collect_rtmr_mismatch( + "RTMR0", + &expected_mrs.rtmr0, + &verified_mrs.rtmr0, + &expected_logs[0], + &computation_result.event_indices[0], + &event_log, + )); + } + + if expected_mrs.rtmr1 != verified_mrs.rtmr1 { + rtmr_debug.push(collect_rtmr_mismatch( + "RTMR1", + &expected_mrs.rtmr1, + &verified_mrs.rtmr1, + &expected_logs[1], + &computation_result.event_indices[1], + &event_log, + )); + } + + if expected_mrs.rtmr2 != verified_mrs.rtmr2 { + rtmr_debug.push(collect_rtmr_mismatch( + "RTMR2", + &expected_mrs.rtmr2, + &verified_mrs.rtmr2, + &expected_logs[2], + &computation_result.event_indices[2], + &event_log, + )); + } + + if !rtmr_debug.is_empty() { + details.rtmr_debug = Some(rtmr_debug); + } + + Err(e.context("MRs do not match")) + } + } + } + + async fn download_image(&self, hex_os_image_hash: &str, dst_dir: &Path) -> Result<()> { + let url = self + .download_url + .replace("{OS_IMAGE_HASH}", hex_os_image_hash); + + // Create a temporary directory for extraction within the cache directory + let cache_dir = Path::new(&self.image_cache_dir).join("images").join("tmp"); + fs_err::create_dir_all(&cache_dir).context("Failed to create cache directory")?; + let auto_delete_temp_dir = tempfile::Builder::new() + .prefix("tmp-download-") + .tempdir_in(&cache_dir) + .context("Failed to create temporary directory")?; + let tmp_dir = auto_delete_temp_dir.path(); + + info!("Downloading image from {}", url); + let client = reqwest::Client::new(); + let response = client + .get(&url) + .send() + .await + .context("Failed to download image")?; + + if !response.status().is_success() { + bail!( + "Failed to download image: HTTP status {}, url: {url}", + response.status(), + ); + } + + // Save the tarball to a temporary file using streaming + let tarball_path = tmp_dir.join("image.tar.gz"); + let mut file = tokio::fs::File::create(&tarball_path) + .await + .context("Failed to create tarball file")?; + let mut response = response; + while let Some(chunk) = response.chunk().await? { + file.write_all(&chunk) + .await + .context("Failed to write chunk to file")?; + } + + let extracted_dir = tmp_dir.join("extracted"); + fs_err::create_dir_all(&extracted_dir).context("Failed to create extraction directory")?; + + // Extract the tarball + let output = Command::new("tar") + .arg("xzf") + .arg(&tarball_path) + .current_dir(&extracted_dir) + .output() + .await + .context("Failed to extract tarball")?; + + if !output.status.success() { + bail!( + "Failed to extract tarball: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + + // Verify checksum + let output = Command::new("sha256sum") + .arg("-c") + .arg("sha256sum.txt") + .current_dir(&extracted_dir) + .output() + .await + .context("Failed to verify checksum")?; + + if !output.status.success() { + bail!( + "Checksum verification failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + + // Remove the files that are not listed in sha256sum.txt + let sha256sum_path = extracted_dir.join("sha256sum.txt"); + let files_doc = + fs_err::read_to_string(&sha256sum_path).context("Failed to read sha256sum.txt")?; + let listed_files: Vec<&OsStr> = files_doc + .lines() + .flat_map(|line| line.split_whitespace().nth(1)) + .map(|s| s.as_ref()) + .collect(); + let files = fs_err::read_dir(&extracted_dir).context("Failed to read directory")?; + for file in files { + let file = file.context("Failed to read directory entry")?; + let filename = file.file_name(); + if !listed_files.contains(&filename.as_os_str()) { + if file.path().is_dir() { + fs_err::remove_dir_all(file.path()).context("Failed to remove directory")?; + } else { + fs_err::remove_file(file.path()).context("Failed to remove file")?; + } + } + } + + // os_image_hash should eq to sha256sum of the sha256sum.txt + let os_image_hash = Sha256::new_with_prefix(files_doc.as_bytes()).finalize(); + if hex::encode(os_image_hash) != hex_os_image_hash { + bail!("os_image_hash does not match sha256sum of the sha256sum.txt"); + } + + // Move the extracted files to the destination directory + let metadata_path = extracted_dir.join("metadata.json"); + if !metadata_path.exists() { + bail!("metadata.json not found in the extracted archive"); + } + + if dst_dir.exists() { + fs_err::remove_dir_all(dst_dir).context("Failed to remove destination directory")?; + } + let dst_dir_parent = dst_dir.parent().context("Failed to get parent directory")?; + fs_err::create_dir_all(dst_dir_parent).context("Failed to create parent directory")?; + // Move the extracted files to the destination directory + fs_err::rename(extracted_dir, dst_dir) + .context("Failed to move extracted files to destination directory")?; + Ok(()) + } +} + +#[derive(Debug, Clone)] +struct Mrs { + mrtd: String, + rtmr0: String, + rtmr1: String, + rtmr2: String, +} + +impl Mrs { + fn assert_eq(&self, other: &Self) -> Result<()> { + if self.mrtd != other.mrtd { + bail!( + "MRTD does not match: expected={}, actual={}", + self.mrtd, + other.mrtd + ); + } + if self.rtmr0 != other.rtmr0 { + bail!( + "RTMR0 does not match: expected={}, actual={}", + self.rtmr0, + other.rtmr0 + ); + } + if self.rtmr1 != other.rtmr1 { + bail!( + "RTMR1 does not match: expected={}, actual={}", + self.rtmr1, + other.rtmr1 + ); + } + if self.rtmr2 != other.rtmr2 { + bail!( + "RTMR2 does not match: expected={}, actual={}", + self.rtmr2, + other.rtmr2 + ); + } + Ok(()) + } +} + +impl From<&upgrade_authority::BootInfo> for Mrs { + fn from(report: &upgrade_authority::BootInfo) -> Self { + Self { + mrtd: hex::encode(&report.mrtd), + rtmr0: hex::encode(&report.rtmr0), + rtmr1: hex::encode(&report.rtmr1), + rtmr2: hex::encode(&report.rtmr2), + } + } +} + +mod upgrade_authority { + use serde::{Deserialize, Serialize}; + + #[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] + pub struct BootInfo { + pub mrtd: Vec, + pub rtmr0: Vec, + pub rtmr1: Vec, + pub rtmr2: Vec, + pub rtmr3: Vec, + pub mr_aggregated: Vec, + pub os_image_hash: Vec, + pub mr_system: Vec, + pub app_id: Vec, + pub compose_hash: Vec, + pub instance_id: Vec, + pub device_id: Vec, + pub key_provider_info: Vec, + pub event_log: String, + pub tcb_status: String, + pub advisory_ids: Vec, + } +} diff --git a/verifier/test.sh b/verifier/test.sh new file mode 100755 index 00000000..11c2ad1e --- /dev/null +++ b/verifier/test.sh @@ -0,0 +1,127 @@ +#!/bin/bash + +# SPDX-FileCopyrightText: © 2024-2025 Phala Network +# +# SPDX-License-Identifier: Apache-2.0 + +set -e + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Configuration +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +BINARY="$PROJECT_ROOT/target/debug/dstack-verifier" +LOG_FILE="/tmp/verifier-test.log" +FIXTURE_FILE="$SCRIPT_DIR/fixtures/quote-report.json" + +echo -e "${YELLOW}dstack-verifier Test Script${NC}" +echo "==================================" + +# Function to cleanup on exit +cleanup() { + echo -e "\n${YELLOW}Cleaning up...${NC}" + pkill -f dstack-verifier 2>/dev/null || true + sleep 1 +} +trap cleanup EXIT + +# Build the project +echo -e "${YELLOW}Building dstack-verifier...${NC}" +cd "$PROJECT_ROOT" +cargo build --bin dstack-verifier --quiet + +if [ ! -f "$BINARY" ]; then + echo -e "${RED}Error: Binary not found at $BINARY${NC}" + exit 1 +fi + +# Start the server +echo -e "${YELLOW}Starting dstack-verifier server...${NC}" +"$BINARY" >"$LOG_FILE" 2>&1 & +SERVER_PID=$! + +# Wait for server to start +echo -e "${YELLOW}Waiting for server to start...${NC}" +for i in {1..10}; do + if curl -s http://localhost:8080/health >/dev/null 2>&1; then + echo -e "${GREEN}Server started successfully${NC}" + break + fi + if [ $i -eq 10 ]; then + echo -e "${RED}Server failed to start${NC}" + echo "Server logs:" + cat "$LOG_FILE" + exit 1 + fi + sleep 1 +done + +# Check if fixture file exists +if [ ! -f "$FIXTURE_FILE" ]; then + echo -e "${RED}Error: Fixture file not found at $FIXTURE_FILE${NC}" + exit 1 +fi + +# Run the verification test +echo -e "${YELLOW}Running verification test...${NC}" +echo "Using fixture: $FIXTURE_FILE" + +RESPONSE=$(curl -s -X POST http://localhost:8080/verify \ + -H "Content-Type: application/json" \ + -d @"$FIXTURE_FILE") + +# Parse and display results +echo -e "\n${YELLOW}Test Results:${NC}" +echo "=============" + +IS_VALID=$(echo "$RESPONSE" | jq -r '.is_valid') +QUOTE_VERIFIED=$(echo "$RESPONSE" | jq -r '.details.quote_verified') +EVENT_LOG_VERIFIED=$(echo "$RESPONSE" | jq -r '.details.event_log_verified') +OS_IMAGE_VERIFIED=$(echo "$RESPONSE" | jq -r '.details.os_image_hash_verified') +TCB_STATUS=$(echo "$RESPONSE" | jq -r '.details.tcb_status') +REASON=$(echo "$RESPONSE" | jq -r '.reason // "null"') + +echo -e "Overall Valid: $([ "$IS_VALID" = "true" ] && echo -e "${GREEN}✓${NC}" || echo -e "${RED}✗${NC}") $IS_VALID" +echo -e "Quote Verified: $([ "$QUOTE_VERIFIED" = "true" ] && echo -e "${GREEN}✓${NC}" || echo -e "${RED}✗${NC}") $QUOTE_VERIFIED" +echo -e "Event Log Verified: $([ "$EVENT_LOG_VERIFIED" = "true" ] && echo -e "${GREEN}✓${NC}" || echo -e "${RED}✗${NC}") $EVENT_LOG_VERIFIED" +echo -e "OS Image Verified: $([ "$OS_IMAGE_VERIFIED" = "true" ] && echo -e "${GREEN}✓${NC}" || echo -e "${RED}✗${NC}") $OS_IMAGE_VERIFIED" +echo -e "TCB Status: ${GREEN}$TCB_STATUS${NC}" + +if [ "$REASON" != "null" ]; then + echo -e "${RED}Failure Reason:${NC}" + echo "$REASON" +fi + +# Show app info if available +APP_ID=$(echo "$RESPONSE" | jq -r '.details.app_info.app_id // "null"') +if [ "$APP_ID" != "null" ]; then + echo -e "\n${YELLOW}App Information:${NC}" + echo "App ID: $APP_ID" + echo "Instance ID: $(echo "$RESPONSE" | jq -r '.details.app_info.instance_id')" + echo "Compose Hash: $(echo "$RESPONSE" | jq -r '.details.app_info.compose_hash')" +fi + +# Show report data +REPORT_DATA=$(echo "$RESPONSE" | jq -r '.details.report_data // "null"') +if [ "$REPORT_DATA" != "null" ]; then + echo -e "\n${YELLOW}Report Data:${NC}" + echo "$REPORT_DATA" +fi + +echo -e "\n${YELLOW}Server Logs:${NC}" +echo "============" +tail -10 "$LOG_FILE" + +echo -e "\n${YELLOW}Test completed!${NC}" +if [ "$IS_VALID" = "true" ]; then + echo -e "${GREEN}✓ Verification PASSED${NC}" + exit 0 +else + echo -e "${RED}✗ Verification FAILED${NC}" + exit 1 +fi From a1591646b676c73b204f87207fbe76a61d4a9273 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 23 Sep 2025 09:15:35 +0000 Subject: [PATCH 28/77] Add os_image_hash in verifier report --- verifier/src/verification.rs | 119 +++++++++++++---------------------- verifier/test.sh | 3 +- 2 files changed, 45 insertions(+), 77 deletions(-) diff --git a/verifier/src/verification.rs b/verifier/src/verification.rs index e36f4b07..2525d92a 100644 --- a/verifier/src/verification.rs +++ b/verifier/src/verification.rs @@ -11,7 +11,7 @@ use dstack_types::VmConfig; use ra_tls::attestation::{Attestation, VerifiedAttestation}; use sha2::{Digest as _, Sha256, Sha384}; use tokio::{io::AsyncWriteExt, process::Command}; -use tracing::{debug, info}; +use tracing::info; use crate::types::{ AcpiTables, RtmrEventEntry, RtmrEventStatus, RtmrMismatch, VerificationDetails, @@ -52,12 +52,15 @@ fn replay_event_logs(eventlog: &[EventLog]) -> Result { fn collect_rtmr_mismatch( rtmr_label: &str, - expected_hex: &str, - actual_hex: &str, + expected: &[u8], + actual: &[u8], expected_sequence: &RtmrLog, actual_indices: &[usize], event_log: &[EventLog], ) -> RtmrMismatch { + let expected_hex = hex::encode(expected); + let actual_hex = hex::encode(actual); + let mut events = Vec::new(); for (&idx, expected_digest) in actual_indices.iter().zip(expected_sequence.iter()) { @@ -200,20 +203,6 @@ impl CvmVerifier { } }; - match verified_attestation.decode_app_info(false) { - Ok(info) => { - details.event_log_verified = true; - details.app_info = Some(info); - } - Err(e) => { - return Ok(VerificationResponse { - is_valid: false, - details, - reason: Some(format!("Event log verification failed: {}", e)), - }); - } - }; - // Step 3: Verify os-image-hash matches using dstack-mr if let Err(e) = self .verify_os_image_hash(&vm_config, &verified_attestation, &mut details) @@ -226,6 +215,20 @@ impl CvmVerifier { }); } details.os_image_hash_verified = true; + match verified_attestation.decode_app_info(false) { + Ok(mut info) => { + info.os_image_hash = vm_config.os_image_hash; + details.event_log_verified = true; + details.app_info = Some(info); + } + Err(e) => { + return Ok(VerificationResponse { + is_valid: false, + details, + reason: Some(format!("Event log verification failed: {}", e)), + }); + } + }; Ok(VerificationResponse { is_valid: true, @@ -263,31 +266,14 @@ impl CvmVerifier { .as_td10() .context("Failed to decode TD report")?; - let app_info = attestation.decode_app_info(false)?; - - let boot_info = upgrade_authority::BootInfo { + // Extract the verified MRs from the report + let verified_mrs = Mrs { mrtd: report.mr_td.to_vec(), rtmr0: report.rt_mr0.to_vec(), rtmr1: report.rt_mr1.to_vec(), rtmr2: report.rt_mr2.to_vec(), - rtmr3: report.rt_mr3.to_vec(), - mr_aggregated: app_info.mr_aggregated.to_vec(), - os_image_hash: vm_config.os_image_hash.clone(), - mr_system: app_info.mr_system.to_vec(), - app_id: app_info.app_id, - compose_hash: app_info.compose_hash, - instance_id: app_info.instance_id, - device_id: app_info.device_id, - key_provider_info: app_info.key_provider_info, - event_log: String::from_utf8(attestation.raw_event_log.clone()) - .context("Failed to serialize event log")?, - tcb_status: attestation.report.status.clone(), - advisory_ids: attestation.report.advisory_ids.clone(), }; - // Extract the verified MRs from the boot info - let verified_mrs = Mrs::from(&boot_info); - // Get image directory let image_dir = Path::new(&self.image_cache_dir) .join("images") @@ -349,27 +335,19 @@ impl CvmVerifier { }); let expected_mrs = Mrs { - mrtd: hex::encode(&mrs.mrtd), - rtmr0: hex::encode(&mrs.rtmr0), - rtmr1: hex::encode(&mrs.rtmr1), - rtmr2: hex::encode(&mrs.rtmr2), + mrtd: mrs.mrtd.clone(), + rtmr0: mrs.rtmr0.clone(), + rtmr1: mrs.rtmr1.clone(), + rtmr2: mrs.rtmr2.clone(), }; - debug!( - "Expected MRs from dstack-mr: MRTD={}, RTMR0={}, RTMR1={}, RTMR2={}", - expected_mrs.mrtd, expected_mrs.rtmr0, expected_mrs.rtmr1, expected_mrs.rtmr2 - ); - debug!( - "Verified MRs from attestation: MRTD={}, RTMR0={}, RTMR1={}, RTMR2={}", - verified_mrs.mrtd, verified_mrs.rtmr0, verified_mrs.rtmr1, verified_mrs.rtmr2 - ); let event_log: Vec = serde_json::from_slice(&attestation.raw_event_log) .context("Failed to parse event log for mismatch analysis")?; let computation_result = replay_event_logs(&event_log) .context("Failed to replay event logs for mismatch analysis")?; - if computation_result.rtmrs[3] != *boot_info.rtmr3 { + if computation_result.rtmrs[3] != report.rt_mr3 { bail!("RTMR3 mismatch"); } @@ -544,57 +522,46 @@ impl CvmVerifier { #[derive(Debug, Clone)] struct Mrs { - mrtd: String, - rtmr0: String, - rtmr1: String, - rtmr2: String, + mrtd: Vec, + rtmr0: Vec, + rtmr1: Vec, + rtmr2: Vec, } impl Mrs { fn assert_eq(&self, other: &Self) -> Result<()> { if self.mrtd != other.mrtd { bail!( - "MRTD does not match: expected={}, actual={}", - self.mrtd, - other.mrtd + "MRTD mismatch: expected={}, actual={}", + hex::encode(&self.mrtd), + hex::encode(&other.mrtd) ); } if self.rtmr0 != other.rtmr0 { bail!( - "RTMR0 does not match: expected={}, actual={}", - self.rtmr0, - other.rtmr0 + "RTMR0 mismatch: expected={}, actual={}", + hex::encode(&self.rtmr0), + hex::encode(&other.rtmr0) ); } if self.rtmr1 != other.rtmr1 { bail!( - "RTMR1 does not match: expected={}, actual={}", - self.rtmr1, - other.rtmr1 + "RTMR1 mismatch: expected={}, actual={}", + hex::encode(&self.rtmr1), + hex::encode(&other.rtmr1) ); } if self.rtmr2 != other.rtmr2 { bail!( - "RTMR2 does not match: expected={}, actual={}", - self.rtmr2, - other.rtmr2 + "RTMR2 mismatch: expected={}, actual={}", + hex::encode(&self.rtmr2), + hex::encode(&other.rtmr2) ); } Ok(()) } } -impl From<&upgrade_authority::BootInfo> for Mrs { - fn from(report: &upgrade_authority::BootInfo) -> Self { - Self { - mrtd: hex::encode(&report.mrtd), - rtmr0: hex::encode(&report.rtmr0), - rtmr1: hex::encode(&report.rtmr1), - rtmr2: hex::encode(&report.rtmr2), - } - } -} - mod upgrade_authority { use serde::{Deserialize, Serialize}; diff --git a/verifier/test.sh b/verifier/test.sh index 11c2ad1e..4f9554cf 100755 --- a/verifier/test.sh +++ b/verifier/test.sh @@ -99,11 +99,12 @@ fi # Show app info if available APP_ID=$(echo "$RESPONSE" | jq -r '.details.app_info.app_id // "null"') +OS_IMAGE_HASH=$(echo "$RESPONSE" | jq -r '.details.app_info.os_image_hash // "null"') if [ "$APP_ID" != "null" ]; then echo -e "\n${YELLOW}App Information:${NC}" echo "App ID: $APP_ID" - echo "Instance ID: $(echo "$RESPONSE" | jq -r '.details.app_info.instance_id')" echo "Compose Hash: $(echo "$RESPONSE" | jq -r '.details.app_info.compose_hash')" + echo "OS Image Hash: $OS_IMAGE_HASH" fi # Show report data From cf5cc614b053ebe5922f514f40bbf4faa026d373 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 23 Sep 2025 09:55:46 +0000 Subject: [PATCH 29/77] Fix testing failures --- verifier/src/main.rs | 33 +-------------------------------- 1 file changed, 1 insertion(+), 32 deletions(-) diff --git a/verifier/src/main.rs b/verifier/src/main.rs index f21594e4..d5ad4cc3 100644 --- a/verifier/src/main.rs +++ b/verifier/src/main.rs @@ -167,7 +167,7 @@ async fn run_oneshot(file_path: &str, config: &Config) -> anyhow::Result<()> { #[rocket::launch] fn rocket() -> _ { - tracing_subscriber::fmt::init(); + tracing_subscriber::fmt::try_init().ok(); let cli = Cli::parse(); @@ -208,34 +208,3 @@ fn rocket() -> _ { }) })) } - -#[cfg(test)] -mod tests { - use super::*; - use rocket::http::{ContentType, Status}; - use rocket::local::asynchronous::Client; - - #[tokio::test] - async fn test_health_endpoint() { - let client = Client::tracked(rocket()) - .await - .expect("valid rocket instance"); - let response = client.get("/health").dispatch().await; - assert_eq!(response.status(), Status::Ok); - } - - #[tokio::test] - async fn test_verify_endpoint_invalid_request() { - let client = Client::tracked(rocket()) - .await - .expect("valid rocket instance"); - let response = client - .post("/verify") - .header(ContentType::JSON) - .body(r#"{"invalid": "request"}"#) - .dispatch() - .await; - - assert_eq!(response.status(), Status::UnprocessableEntity); - } -} From a24476f2cd43929b0608ae0f5f40a687f5bd44de Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 23 Sep 2025 13:33:45 +0000 Subject: [PATCH 30/77] Update doc for verifier --- sdk/curl/api.md | 3 +- verifier/README.md | 108 ++++++++++++++++++----------------- verifier/src/types.rs | 1 + verifier/src/verification.rs | 23 +++++--- 4 files changed, 75 insertions(+), 60 deletions(-) diff --git a/sdk/curl/api.md b/sdk/curl/api.md index 73400d38..f2f4c518 100644 --- a/sdk/curl/api.md +++ b/sdk/curl/api.md @@ -131,7 +131,8 @@ curl --unix-socket /var/run/dstack.sock http://dstack/GetQuote?report_data=00000 { "quote": "", "event_log": "quote generation log", - "report_data": "" + "report_data": "", + "vm_config": "" } ``` diff --git a/verifier/README.md b/verifier/README.md index 51cf3dbc..50c37e32 100644 --- a/verifier/README.md +++ b/verifier/README.md @@ -1,20 +1,12 @@ # dstack-verifier -A HTTP server that provides CVM (Confidential Virtual Machine) verification services using the same verification process as the dstack KMS. - -## Features - -- **TDX Quote Verification**: Uses dcap-qvl to verify TDX quotes -- **Event Log Verification**: Validates event logs and extracts app information -- **OS Image Hash Verification**: Uses dstack-mr to ensure OS image hash matches expected measurements -- **Automatic Image Download**: Downloads and caches OS images automatically when not found locally -- **RESTful API**: Simple HTTP endpoints for verification requests +A HTTP server that provides dstack quote verification services using the same verification process as the dstack KMS. ## API Endpoints ### POST /verify -Verifies a CVM attestation with the provided quote, event log, and VM configuration. +Verifies a dstack quote with the provided quote and VM configuration. The body can be grabbed via [getQuote](https://github.com/Dstack-TEE/dstack/blob/master/sdk/curl/api.md#3-get-quote). **Request Body:** ```json @@ -22,7 +14,6 @@ Verifies a CVM attestation with the provided quote, event log, and VM configurat "quote": "hex-encoded-quote", "event_log": "hex-encoded-event-log", "vm_config": "json-vm-config-string", - "pccs_url": "optional-pccs-url" } ``` @@ -71,11 +62,6 @@ Health check endpoint that returns service status. ## Configuration -Configuration can be provided via: -1. TOML file (default: `dstack-verifier.toml`) -2. Environment variables with prefix `DSTACK_VERIFIER_` -3. Command line arguments - ### Configuration Options - `host`: Server bind address (default: "0.0.0.0") @@ -90,14 +76,16 @@ Configuration can be provided via: ```toml host = "0.0.0.0" port = 8080 -image_cache_dir = "/var/cache/dstack-verifier" +image_cache_dir = "/tmp/dstack-verifier/cache" image_download_url = "http://0.0.0.0:8000/mr_{OS_IMAGE_HASH}.tar.gz" image_download_timeout_secs = 300 -pccs_url = "https://pccs.example.com" +pccs_url = "https://pccs.phala.network" ``` ## Usage +### Running with Cargo + ```bash # Run with default config cargo run --bin dstack-verifier @@ -106,29 +94,64 @@ cargo run --bin dstack-verifier cargo run --bin dstack-verifier -- --config /path/to/config.toml # Set via environment variables -DSTACK_VERIFIER_PORT=9000 cargo run --bin dstack-verifier +DSTACK_VERIFIER_PORT=8080 cargo run --bin dstack-verifier ``` -## Testing +### Running with Docker Compose + +```yaml +services: + dstack-verifier: + image: kvin/dstack-verifier:latest + ports: + - "8080:8080" + restart: unless-stopped +``` + +Save the docker compose file as `docker-compose.yml` and run `docker compose up -d`. + +### Request verification -Two test scripts are provided for easy testing: +Grab a quote from your app. It's depends on your app how to grab a quote. -### Full Test (with server management) ```bash -./test.sh +# Grab a quote from the demo app +curl https://712eab2f507b963e11144ae67218177e93ac2a24-3000.app.kvin.wang:12004/GetQuote?report_data=0x1234 -o quote.json + ``` -This script will: -- Build the project -- Start the server -- Run the verification test -- Display detailed results -- Clean up automatically - -### Quick Test (assumes server is running) + +Send the quote to the verifier. + ```bash -./quick-test.sh +$ curl -s -d @quote.json localhost:8080/verify | jq +{ + "is_valid": true, + "details": { + "quote_verified": true, + "event_log_verified": true, + "os_image_hash_verified": true, + "report_data": "12340000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "tcb_status": "UpToDate", + "advisory_ids": [], + "app_info": { + "app_id": "e631a04a5d068c0e5ffd8ca60d6574ac99a18bda", + "compose_hash": "e631a04a5d068c0e5ffd8ca60d6574ac99a18bdaf0417d129d0c4ac52244d40f", + "instance_id": "712eab2f507b963e11144ae67218177e93ac2a24", + "device_id": "ee218f44a5f0a9c3233f9cc09f0cd41518f376478127feb989d5cf1292c56a01", + "mrtd": "f06dfda6dce1cf904d4e2bab1dc370634cf95cefa2ceb2de2eee127c9382698090d7a4a13e14c536ec6c9c3c8fa87077", + "rtmr0": "68102e7b524af310f7b7d426ce75481e36c40f5d513a9009c046e9d37e31551f0134d954b496a3357fd61d03f07ffe96", + "rtmr1": "a7b523278d4f914ee8df0ec80cd1c3d498cbf1152b0c5eaf65bad9425072874a3fcf891e8b01713d3d9937e3e0d26c15", + "rtmr2": "dbf4924c07f5066f3dc6859844184344306aa3263817153dcaee85af97d23e0c0b96efe0731d8865a8747e51b9e351ac", + "rtmr3": "5e7d8d84317343d28d73031d0be3c75f25facb1b20c9835a44582b8b0115de1acfe2d19350437dbd63846bcc5d7bf328", + "mr_system": "145010fa227e6c2537ad957c64e4a8486fcbfd8265ddfb359168b59afcff1d05", + "mr_aggregated": "52f6d7ccbee1bfa870709e8ff489e016e2e5c25a157b7e22ef1ea68fce763694", + "os_image_hash": "b6420818b356b198bdd70f076079aa0299a20279b87ab33ada7b2770ef432a5a", + "key_provider_info": "7b226e616d65223a226b6d73222c226964223a223330353933303133303630373261383634386365336430323031303630383261383634386365336430333031303730333432303030343139623234353764643962386161363434366439383066313336666666373831326563643663373737343065656230653238623130643536633063303030323861356236653539646365613330376435383362643166373037363965396331313664663262636662313735386139356438363133653764653163383438326330227d" + } + }, + "reason": null +} ``` -This script assumes the server is already running and just sends a test request. ## Verification Process @@ -142,22 +165,3 @@ The verifier performs three main verification steps: - Compares against the verified measurements from the quote All three steps must pass for the verification to be considered valid. - -### Automatic Image Download - -When an OS image is not found in the local cache, the verifier will: - -1. **Download**: Fetch the image tarball from the configured URL -2. **Extract**: Extract the tarball contents to a temporary directory -3. **Verify**: Check SHA256 checksums to ensure file integrity -4. **Validate**: Confirm the OS image hash matches the computed hash -5. **Cache**: Move the validated files to the cache directory for future use - -The download URL template uses `{OS_IMAGE_HASH}` as a placeholder that gets replaced with the actual OS image hash from the verification request. - -## Dependencies - -- dcap-qvl: TDX quote verification -- dstack-mr: OS image measurement computation -- ra-tls: Attestation handling and verification -- rocket: HTTP server framework \ No newline at end of file diff --git a/verifier/src/types.rs b/verifier/src/types.rs index 28bdfe20..e4e5d2c5 100644 --- a/verifier/src/types.rs +++ b/verifier/src/types.rs @@ -11,6 +11,7 @@ pub struct VerificationRequest { pub event_log: String, pub vm_config: String, pub pccs_url: Option, + pub debug: Option, } #[derive(Debug, Clone, Serialize)] diff --git a/verifier/src/verification.rs b/verifier/src/verification.rs index 2525d92a..a5d2a492 100644 --- a/verifier/src/verification.rs +++ b/verifier/src/verification.rs @@ -167,6 +167,8 @@ impl CvmVerifier { let attestation = Attestation::new(quote, event_log) .context("Failed to create attestation from quote and event log")?; + let debug = request.debug.unwrap_or(false); + let mut details = VerificationDetails { quote_verified: false, event_log_verified: false, @@ -205,7 +207,7 @@ impl CvmVerifier { // Step 3: Verify os-image-hash matches using dstack-mr if let Err(e) = self - .verify_os_image_hash(&vm_config, &verified_attestation, &mut details) + .verify_os_image_hash(&vm_config, &verified_attestation, debug, &mut details) .await { return Ok(VerificationResponse { @@ -255,6 +257,7 @@ impl CvmVerifier { &self, vm_config: &VmConfig, attestation: &VerifiedAttestation, + debug: bool, details: &mut VerificationDetails, ) -> Result<()> { let hex_os_image_hash = hex::encode(&vm_config.os_image_hash); @@ -328,11 +331,13 @@ impl CvmVerifier { let mrs = measurement_details.measurements; let expected_logs = measurement_details.rtmr_logs; - details.acpi_tables = Some(AcpiTables { - tables: hex::encode(&measurement_details.acpi_tables.tables), - rsdp: hex::encode(&measurement_details.acpi_tables.rsdp), - loader: hex::encode(&measurement_details.acpi_tables.loader), - }); + if debug { + details.acpi_tables = Some(AcpiTables { + tables: hex::encode(&measurement_details.acpi_tables.tables), + rsdp: hex::encode(&measurement_details.acpi_tables.rsdp), + loader: hex::encode(&measurement_details.acpi_tables.loader), + }); + } let expected_mrs = Mrs { mrtd: mrs.mrtd.clone(), @@ -354,6 +359,10 @@ impl CvmVerifier { match expected_mrs.assert_eq(&verified_mrs) { Ok(()) => Ok(()), Err(e) => { + let result = Err(e).context("MRs do not match"); + if !debug { + return result; + } let mut rtmr_debug = Vec::new(); if expected_mrs.rtmr0 != verified_mrs.rtmr0 { @@ -393,7 +402,7 @@ impl CvmVerifier { details.rtmr_debug = Some(rtmr_debug); } - Err(e.context("MRs do not match")) + result } } } From 96ac391dbcc80b53f99f014601021bff97501c5d Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 23 Sep 2025 13:34:09 +0000 Subject: [PATCH 31/77] Add cache for verifer --- verifier/src/verification.rs | 258 ++++++++++++++++++++++++++++++----- 1 file changed, 224 insertions(+), 34 deletions(-) diff --git a/verifier/src/verification.rs b/verifier/src/verification.rs index a5d2a492..a53da571 100644 --- a/verifier/src/verification.rs +++ b/verifier/src/verification.rs @@ -2,16 +2,21 @@ // // SPDX-License-Identifier: Apache-2.0 -use std::{ffi::OsStr, path::Path, time::Duration}; +use std::{ + ffi::OsStr, + path::{Path, PathBuf}, + time::Duration, +}; -use anyhow::{bail, Context, Result}; +use anyhow::{anyhow, bail, Context, Result}; use cc_eventlog::TdxEventLog as EventLog; -use dstack_mr::RtmrLog; +use dstack_mr::{RtmrLog, TdxMeasurementDetails, TdxMeasurements}; use dstack_types::VmConfig; use ra_tls::attestation::{Attestation, VerifiedAttestation}; +use serde::{Deserialize, Serialize}; use sha2::{Digest as _, Sha256, Sha384}; use tokio::{io::AsyncWriteExt, process::Command}; -use tracing::info; +use tracing::{debug, info, warn}; use crate::types::{ AcpiTables, RtmrEventEntry, RtmrEventStatus, RtmrMismatch, VerificationDetails, @@ -143,6 +148,14 @@ fn collect_rtmr_mismatch( } } +const MEASUREMENT_CACHE_VERSION: u32 = 1; + +#[derive(Clone, Serialize, Deserialize)] +struct CachedMeasurement { + version: u32, + measurements: TdxMeasurements, +} + pub struct CvmVerifier { pub image_cache_dir: String, pub download_url: String, @@ -158,6 +171,178 @@ impl CvmVerifier { } } + fn measurement_cache_dir(&self) -> PathBuf { + Path::new(&self.image_cache_dir).join("measurements") + } + + fn measurement_cache_path(&self, cache_key: &str) -> PathBuf { + self.measurement_cache_dir() + .join(format!("{cache_key}.json")) + } + + fn vm_config_cache_key(vm_config: &VmConfig) -> Result { + let serialized = serde_json::to_vec(vm_config) + .context("Failed to serialize VM config for cache key computation")?; + Ok(hex::encode(Sha256::digest(&serialized))) + } + + fn load_measurements_from_cache(&self, cache_key: &str) -> Result> { + let path = self.measurement_cache_path(cache_key); + if !path.exists() { + return Ok(None); + } + + let path_display = path.display().to_string(); + let contents = match fs_err::read(&path) { + Ok(data) => data, + Err(e) => { + warn!("Failed to read measurement cache {}: {e:?}", path_display); + return Ok(None); + } + }; + + let cached: CachedMeasurement = match serde_json::from_slice(&contents) { + Ok(entry) => entry, + Err(e) => { + warn!("Failed to parse measurement cache {}: {e:?}", path_display); + return Ok(None); + } + }; + + if cached.version != MEASUREMENT_CACHE_VERSION { + debug!( + "Ignoring measurement cache {} due to version mismatch (found {}, expected {})", + path_display, cached.version, MEASUREMENT_CACHE_VERSION + ); + return Ok(None); + } + + debug!("Loaded measurement cache entry {}", cache_key); + Ok(Some(cached.measurements)) + } + + fn store_measurements_in_cache( + &self, + cache_key: &str, + measurements: &TdxMeasurements, + ) -> Result<()> { + let cache_dir = self.measurement_cache_dir(); + fs_err::create_dir_all(&cache_dir) + .context("Failed to create measurement cache directory")?; + + let path = self.measurement_cache_path(cache_key); + let mut tmp = tempfile::NamedTempFile::new_in(&cache_dir) + .context("Failed to create temporary cache file")?; + + let entry = CachedMeasurement { + version: MEASUREMENT_CACHE_VERSION, + measurements: measurements.clone(), + }; + serde_json::to_writer(tmp.as_file_mut(), &entry) + .context("Failed to serialize measurement cache entry")?; + tmp.as_file_mut() + .sync_all() + .context("Failed to flush measurement cache entry to disk")?; + + tmp.persist(&path).map_err(|e| { + anyhow!( + "Failed to persist measurement cache to {}: {e}", + path.display() + ) + })?; + debug!("Stored measurement cache entry {}", cache_key); + Ok(()) + } + + fn compute_measurement_details( + &self, + vm_config: &VmConfig, + fw_path: &Path, + kernel_path: &Path, + initrd_path: &Path, + kernel_cmdline: &str, + ) -> Result { + let firmware = fw_path.display().to_string(); + let kernel = kernel_path.display().to_string(); + let initrd = initrd_path.display().to_string(); + + let details = dstack_mr::Machine::builder() + .cpu_count(vm_config.cpu_count) + .memory_size(vm_config.memory_size) + .firmware(&firmware) + .kernel(&kernel) + .initrd(&initrd) + .kernel_cmdline(kernel_cmdline) + .root_verity(true) + .hotplug_off(vm_config.hotplug_off) + .maybe_two_pass_add_pages(vm_config.qemu_single_pass_add_pages) + .maybe_pic(vm_config.pic) + .maybe_qemu_version(vm_config.qemu_version.clone()) + .maybe_pci_hole64_size(if vm_config.pci_hole64_size > 0 { + Some(vm_config.pci_hole64_size) + } else { + None + }) + .hugepages(vm_config.hugepages) + .num_gpus(vm_config.num_gpus) + .num_nvswitches(vm_config.num_nvswitches) + .build() + .measure_with_logs() + .context("Failed to compute expected MRs")?; + + Ok(details) + } + + fn compute_measurements( + &self, + vm_config: &VmConfig, + fw_path: &Path, + kernel_path: &Path, + initrd_path: &Path, + kernel_cmdline: &str, + ) -> Result { + self.compute_measurement_details( + vm_config, + fw_path, + kernel_path, + initrd_path, + kernel_cmdline, + ) + .map(|details| details.measurements) + } + + fn load_or_compute_measurements( + &self, + vm_config: &VmConfig, + fw_path: &Path, + kernel_path: &Path, + initrd_path: &Path, + kernel_cmdline: &str, + ) -> Result { + let cache_key = Self::vm_config_cache_key(vm_config)?; + + if let Some(measurements) = self.load_measurements_from_cache(&cache_key)? { + return Ok(measurements); + } + + let measurements = self.compute_measurements( + vm_config, + fw_path, + kernel_path, + initrd_path, + kernel_cmdline, + )?; + + if let Err(e) = self.store_measurements_in_cache(&cache_key, &measurements) { + warn!( + "Failed to write measurement cache entry for {}: {e:?}", + cache_key + ); + } + + Ok(measurements) + } + pub async fn verify(&self, request: &VerificationRequest) -> Result { let quote = hex::decode(&request.quote).context("Failed to decode quote hex")?; @@ -305,39 +490,41 @@ impl CvmVerifier { let kernel_cmdline = image_info.cmdline + " initrd=initrd"; // Use dstack-mr to compute expected MRs - let measurement_details = dstack_mr::Machine::builder() - .cpu_count(vm_config.cpu_count) - .memory_size(vm_config.memory_size) - .firmware(&fw_path.display().to_string()) - .kernel(&kernel_path.display().to_string()) - .initrd(&initrd_path.display().to_string()) - .kernel_cmdline(&kernel_cmdline) - .root_verity(true) - .hotplug_off(vm_config.hotplug_off) - .maybe_two_pass_add_pages(vm_config.qemu_single_pass_add_pages) - .maybe_pic(vm_config.pic) - .maybe_qemu_version(vm_config.qemu_version.clone()) - .maybe_pci_hole64_size(if vm_config.pci_hole64_size > 0 { - Some(vm_config.pci_hole64_size) - } else { - None - }) - .hugepages(vm_config.hugepages) - .num_gpus(vm_config.num_gpus) - .num_nvswitches(vm_config.num_nvswitches) - .build() - .measure_with_logs() - .context("Failed to compute expected MRs")?; + let (mrs, expected_logs) = if debug { + let TdxMeasurementDetails { + measurements, + rtmr_logs, + acpi_tables, + } = self + .compute_measurement_details( + vm_config, + &fw_path, + &kernel_path, + &initrd_path, + &kernel_cmdline, + ) + .context("Failed to compute expected measurements")?; - let mrs = measurement_details.measurements; - let expected_logs = measurement_details.rtmr_logs; - if debug { details.acpi_tables = Some(AcpiTables { - tables: hex::encode(&measurement_details.acpi_tables.tables), - rsdp: hex::encode(&measurement_details.acpi_tables.rsdp), - loader: hex::encode(&measurement_details.acpi_tables.loader), + tables: hex::encode(&acpi_tables.tables), + rsdp: hex::encode(&acpi_tables.rsdp), + loader: hex::encode(&acpi_tables.loader), }); - } + + (measurements, Some(rtmr_logs)) + } else { + ( + self.load_or_compute_measurements( + vm_config, + &fw_path, + &kernel_path, + &initrd_path, + &kernel_cmdline, + ) + .context("Failed to obtain expected measurements")?, + None, + ) + }; let expected_mrs = Mrs { mrtd: mrs.mrtd.clone(), @@ -363,6 +550,9 @@ impl CvmVerifier { if !debug { return result; } + let Some(expected_logs) = expected_logs.as_ref() else { + return result; + }; let mut rtmr_debug = Vec::new(); if expected_mrs.rtmr0 != verified_mrs.rtmr0 { From 30acf0b05659a96f09a63616965941f658973465 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 24 Sep 2025 03:37:44 +0000 Subject: [PATCH 32/77] Update GH workflow to push images to org --- .github/workflows/gateway-release.yml | 6 +++--- .github/workflows/kms-release.yml | 6 +++--- .github/workflows/verifier-release.yml | 6 +++--- verifier/README.md | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/gateway-release.yml b/.github/workflows/gateway-release.yml index 493a7a41..e983b89a 100644 --- a/.github/workflows/gateway-release.yml +++ b/.github/workflows/gateway-release.yml @@ -51,7 +51,7 @@ jobs: with: context: gateway/dstack-app/builder push: true - tags: ${{ vars.DOCKERHUB_USERNAME }}/gateway:${{ env.VERSION }} + tags: ${{ vars.DOCKERHUB_ORG }}/dstack-gateway:${{ env.VERSION }} platforms: linux/amd64 provenance: false build-args: | @@ -61,7 +61,7 @@ jobs: - name: Generate artifact attestation uses: actions/attest-build-provenance@v1 with: - subject-name: "docker.io/${{ vars.DOCKERHUB_USERNAME }}/gateway" + subject-name: "docker.io/${{ vars.DOCKERHUB_ORG }}/dstack-gateway" subject-digest: ${{ steps.build-and-push.outputs.digest }} push-to-registry: true @@ -72,7 +72,7 @@ jobs: body: | ## Docker Image Information - **Image**: `docker.io/${{ vars.DOCKERHUB_USERNAME }}/gateway:${{ env.VERSION }}` + **Image**: `docker.io/${{ vars.DOCKERHUB_ORG }}/dstack-gateway:${{ env.VERSION }}` **Digest (SHA256)**: `${{ steps.build-and-push.outputs.digest }}` diff --git a/.github/workflows/kms-release.yml b/.github/workflows/kms-release.yml index f3f45e4f..b5384372 100644 --- a/.github/workflows/kms-release.yml +++ b/.github/workflows/kms-release.yml @@ -54,7 +54,7 @@ jobs: with: context: kms/dstack-app/builder push: true - tags: ${{ vars.DOCKERHUB_USERNAME }}/kms:${{ env.VERSION }} + tags: ${{ vars.DOCKERHUB_ORG }}/dstack-kms:${{ env.VERSION }} platforms: linux/amd64 provenance: false build-args: | @@ -65,7 +65,7 @@ jobs: - name: Generate artifact attestation uses: actions/attest-build-provenance@v1 with: - subject-name: "docker.io/${{ vars.DOCKERHUB_USERNAME }}/kms" + subject-name: "docker.io/${{ vars.DOCKERHUB_ORG }}/dstack-kms" subject-digest: ${{ steps.build-and-push.outputs.digest }} push-to-registry: true @@ -92,7 +92,7 @@ jobs: body: | ## Docker Image Information - **Image**: `docker.io/${{ vars.DOCKERHUB_USERNAME }}/kms:${{ env.VERSION }}` + **Image**: `docker.io/${{ vars.DOCKERHUB_ORG }}/dstack-kms:${{ env.VERSION }}` **Digest (SHA256)**: `${{ steps.build-and-push.outputs.digest }}` diff --git a/.github/workflows/verifier-release.yml b/.github/workflows/verifier-release.yml index 2c8d25f2..a7a4d28d 100644 --- a/.github/workflows/verifier-release.yml +++ b/.github/workflows/verifier-release.yml @@ -51,7 +51,7 @@ jobs: context: verifier file: verifier/builder/Dockerfile push: true - tags: ${{ vars.DOCKERHUB_USERNAME }}/dstack-verifier:${{ env.VERSION }} + tags: ${{ vars.DOCKERHUB_ORG }}/dstack-verifier:${{ env.VERSION }} platforms: linux/amd64 provenance: false build-args: | @@ -62,7 +62,7 @@ jobs: - name: Generate artifact attestation uses: actions/attest-build-provenance@v1 with: - subject-name: "docker.io/${{ vars.DOCKERHUB_USERNAME }}/dstack-verifier" + subject-name: "docker.io/${{ vars.DOCKERHUB_ORG }}/dstack-verifier" subject-digest: ${{ steps.build-and-push.outputs.digest }} push-to-registry: true @@ -73,7 +73,7 @@ jobs: body: | ## Docker Image Information - **Image**: `docker.io/${{ vars.DOCKERHUB_USERNAME }}/dstack-verifier:${{ env.VERSION }}` + **Image**: `docker.io/${{ vars.DOCKERHUB_ORG }}/dstack-verifier:${{ env.VERSION }}` **Digest (SHA256)**: `${{ steps.build-and-push.outputs.digest }}` diff --git a/verifier/README.md b/verifier/README.md index 50c37e32..0ae36f7d 100644 --- a/verifier/README.md +++ b/verifier/README.md @@ -102,7 +102,7 @@ DSTACK_VERIFIER_PORT=8080 cargo run --bin dstack-verifier ```yaml services: dstack-verifier: - image: kvin/dstack-verifier:latest + image: dstacktee/dstack-verifier:latest ports: - "8080:8080" restart: unless-stopped From 1029b7a54beec22f4174915499134879735d490e Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 24 Sep 2025 08:26:04 +0000 Subject: [PATCH 33/77] Replace kvin.wang with dstack.org --- README.md | 35 +++++++--------------------- docs/deployment.md | 2 +- gateway/dstack-app/builder/README.md | 2 +- gateway/src/proxy/tls_passthough.rs | 2 +- kms/dstack-app/builder/README.md | 2 +- kms/dstack-app/deploy-to-vmm.sh | 2 +- sdk/simulator/sys-config.json | 4 ++-- verifier/README.md | 2 +- verifier/dstack-verifier.toml | 2 +- 9 files changed, 18 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index a68edb4e..a03c9131 100644 --- a/README.md +++ b/README.md @@ -212,9 +212,9 @@ Once your app is deployed and listening on an HTTP port, you can access it throu **Examples**: -- `3327603e03f5bd1f830812ca4a789277fc31f577-8080.app.kvin.wang` → port `8080` (TLS termination to any TCP) -- `3327603e03f5bd1f830812ca4a789277fc31f577-8080g.app.kvin.wang` → port `8080` (TLS termination with HTTP/2 negotiation) -- `3327603e03f5bd1f830812ca4a789277fc31f577-8080s.app.kvin.wang` → port `8080` (TLS passthrough to any TCP) +- `3327603e03f5bd1f830812ca4a789277fc31f577-8080.test0.dstack.org` → port `8080` (TLS termination to any TCP) +- `3327603e03f5bd1f830812ca4a789277fc31f577-8080g.test0.dstack.org` → port `8080` (TLS termination with HTTP/2 negotiation) +- `3327603e03f5bd1f830812ca4a789277fc31f577-8080s.test0.dstack.org` → port `8080` (TLS passthrough to any TCP) The `` can be either the app ID or instance ID. When using the app ID, the load balancer will select one of the available instances. Adding an `s` suffix enables TLS passthrough to the app instead of terminating at dstack-gateway. Adding a `g` suffix enables HTTPS/2 with TLS termination for gRPC applications. @@ -258,7 +258,7 @@ curl --unix-socket /var/run/dstack.sock http://localhost/GetQuote?report_data=0x Container logs can be obtained from the CVM's `dashboard` page or by curl: ```bash -curl 'http://.app.kvin.wang:9090/logs/?since=0&until=0&follow=true&text=true×tamps=true&bare=true' +curl 'http://.:9090/logs/?since=0&until=0&follow=true&text=true×tamps=true&bare=true' ``` Replace `` and `` with actual values. Available parameters: @@ -334,24 +334,7 @@ Then run the certbot in the `build/` and you will see the following log: $ RUST_LOG=info,certbot=debug ./certbot renew -c certbot.toml 2024-10-25T07:41:00.682990Z INFO certbot::bot: creating new ACME account 2024-10-25T07:41:00.869246Z INFO certbot::bot: created new ACME account: https://acme-staging-v02.api.letsencrypt.org/acme/acct/168601853 -2024-10-25T07:41:00.869270Z INFO certbot::bot: setting CAA records -2024-10-25T07:41:00.869276Z DEBUG certbot::acme_client: setting guard CAA records for app.kvin.wang -2024-10-25T07:41:01.740767Z DEBUG certbot::acme_client: removing existing CAA record app.kvin.wang 0 issuewild "letsencrypt.org;validationmethods=dns-01;accounturi=https://acme-staging-v02.api.letsencrypt.org/acme/acct/168578683" -2024-10-25T07:41:01.991298Z DEBUG certbot::acme_client: removing existing CAA record app.kvin.wang 0 issue "letsencrypt.org;validationmethods=dns-01;accounturi=https://acme-staging-v02.api.letsencrypt.org/acme/acct/168578683" -2024-10-25T07:41:02.216751Z DEBUG certbot::acme_client: setting CAA records for app.kvin.wang, 0 issue "letsencrypt.org;validationmethods=dns-01;accounturi=https://acme-staging-v02.api.letsencrypt.org/acme/acct/168601853" -2024-10-25T07:41:02.424217Z DEBUG certbot::acme_client: setting CAA records for app.kvin.wang, 0 issuewild "letsencrypt.org;validationmethods=dns-01;accounturi=https://acme-staging-v02.api.letsencrypt.org/acme/acct/168601853" -2024-10-25T07:41:02.663824Z DEBUG certbot::acme_client: removing guard CAA records for app.kvin.wang -2024-10-25T07:41:03.095564Z DEBUG certbot::acme_client: generating new cert key pair -2024-10-25T07:41:03.095678Z DEBUG certbot::acme_client: requesting new certificates for *.app.kvin.wang -2024-10-25T07:41:03.095699Z DEBUG certbot::acme_client: creating new order -2024-10-25T07:41:03.250382Z DEBUG certbot::acme_client: order is pending, waiting for authorization -2024-10-25T07:41:03.283600Z DEBUG certbot::acme_client: creating dns record for app.kvin.wang -2024-10-25T07:41:04.027882Z DEBUG certbot::acme_client: challenge not found, waiting 500ms tries=2 domain="_acme-challenge.app.kvin.wang" -2024-10-25T07:41:04.600711Z DEBUG certbot::acme_client: challenge not found, waiting 1s tries=3 domain="_acme-challenge.app.kvin.wang" -2024-10-25T07:41:05.642300Z DEBUG certbot::acme_client: challenge not found, waiting 2s tries=4 domain="_acme-challenge.app.kvin.wang" -2024-10-25T07:41:07.715947Z DEBUG certbot::acme_client: challenge not found, waiting 4s tries=5 domain="_acme-challenge.app.kvin.wang" -2024-10-25T07:41:11.724831Z DEBUG certbot::acme_client: challenge not found, waiting 8s tries=6 domain="_acme-challenge.app.kvin.wang" -2024-10-25T07:41:19.815990Z DEBUG certbot::acme_client: challenge not found, waiting 16s tries=7 domain="_acme-challenge.app.kvin.wang" +... 2024-10-25T07:41:35.852790Z DEBUG certbot::acme_client: setting challenge ready for https://acme-staging-v02.api.letsencrypt.org/acme/chall-v3/14584884443/mQ-I2A 2024-10-25T07:41:35.934425Z DEBUG certbot::acme_client: challenges are ready, waiting for order to be ready 2024-10-25T07:41:37.972434Z DEBUG certbot::acme_client: order is ready, uploading csr @@ -391,16 +374,16 @@ Execute dstack-gateway with `sudo ./dstack-gateway -c gateway.toml`, then access To enhance security, we've limited TLS certificate issuance to dstack-gateway via CAA records. However, since these records can be modified through Cloudflare's domain management, we need to implement global CA certificate monitoring to maintain security oversight. -`ct_monitor` tracks Certificate Transparency logs via [https://crt.sh](https://crt.sh/?q=app.kvin.wang), comparing their public key with the ones got from dstack-gateway RPC. It immediately alerts when detecting unauthorized certificates not issued through dstack-gateway: +`ct_monitor` tracks Certificate Transparency logs via https://crt.sh, comparing their public key with the ones got from dstack-gateway RPC. It immediately alerts when detecting unauthorized certificates not issued through dstack-gateway: ```text -$ ./ct_monitor -t https://localhost:9010/prpc -d app.kvin.wang -2024-10-25T08:12:11.366463Z INFO ct_monitor: monitoring app.kvin.wang... +$ ./ct_monitor -t https://localhost:9010/prpc -d +2024-10-25T08:12:11.366463Z INFO ct_monitor: monitoring ... 2024-10-25T08:12:11.366488Z INFO ct_monitor: fetching known public keys from https://localhost:9010/prpc 2024-10-25T08:12:11.566222Z INFO ct_monitor: got 2 known public keys 2024-10-25T08:12:13.142122Z INFO ct_monitor: ✅ checked log id=14705660685 2024-10-25T08:12:13.802573Z INFO ct_monitor: ✅ checked log id=14705656674 -2024-10-25T08:12:14.494944Z ERROR ct_monitor: ❌ error in CTLog { id: 14666084839, issuer_ca_id: 295815, issuer_name: "C=US, O=Let's Encrypt, CN=R11", common_name: "kvin.wang", name_value: "*.app.kvin.wang", not_before: "2024-09-24T02:23:15", not_after: "2024-12-23T02:23:14", serial_number: "03ae796f56a933c8ff7e32c7c0d662a253d4", result_count: 1, entry_timestamp: "2024-09-24T03:21:45.825" } +2024-10-25T08:12:14.494944Z ERROR ct_monitor: ❌ error in CTLog { id: 14666084839, issuer_ca_id: 295815, issuer_name: "C=US, O=Let's Encrypt, CN=R11", common_name: "", name_value: "*.", not_before: "2024-09-24T02:23:15", not_after: "2024-12-23T02:23:14", serial_number: "03ae796f56a933c8ff7e32c7c0d662a253d4", result_count: 1, entry_timestamp: "2024-09-24T03:21:45.825" } 2024-10-25T08:12:14.494998Z ERROR ct_monitor: error: certificate has issued to unknown pubkey: 30820122300d06092a864886f70d01010105000382010f003082010a02820101009de65c767caf117880626d1acc1ee78f3c6a992e3fe458f34066f92812ac550190a67e49ebf4f537003c393c000a8ec3e114da088c0cb02ffd0881fd39a2b32cc60d2e9989f0efab3345bee418262e0179d307d8d361fd0837f85d17eab92ec6f4126247e614aa01f4efcc05bc6303a8be68230f04326c9e85406fc4d234e9ce92089253b11d002cdf325582df45d5da42981cd546cbd2e9e49f0fa6636e747a345aaf8cefa02556aa258e1f7f90906be8fe51567ac9626f35bc46837e4f3203387fee59c71cea400000007c24e7537debc1941b36ff1612990233e4c219632e35858b1771f17a71944adf6c657dd7303583e3aeed199bd36a3152f49980f4f30203010001 ``` diff --git a/docs/deployment.md b/docs/deployment.md index f2b7d017..26b2858f 100644 --- a/docs/deployment.md +++ b/docs/deployment.md @@ -104,7 +104,7 @@ GUEST_AGENT_ADDR=127.0.0.1:9205 ETH_RPC_URL=https://rpc.phala.network GIT_REV=HEAD OS_IMAGE=dstack-0.5.2 -IMAGE_DOWNLOAD_URL=https://files.kvin.wang/images/mr_{OS_IMAGE_HASH}.tar.gz +IMAGE_DOWNLOAD_URL=https://download.dstack.org/os-images/mr_{OS_IMAGE_HASH}.tar.gz ``` Then run the script again. diff --git a/gateway/dstack-app/builder/README.md b/gateway/dstack-app/builder/README.md index 40f376a3..b5387fa9 100644 --- a/gateway/dstack-app/builder/README.md +++ b/gateway/dstack-app/builder/README.md @@ -44,7 +44,7 @@ services: environment: - IMAGE_DOWNLOAD_URL=${IMAGE_DOWNLOAD_URL:-http://localhost:8001/mr_{OS_IMAGE_HASH}.tar.gz} - AUTH_TYPE=dev - - DEV_DOMAIN=kms.1022.kvin.wang + - DEV_DOMAIN=kms.1022.dstack.org - QUOTE_ENABLED=false ``` diff --git a/gateway/src/proxy/tls_passthough.rs b/gateway/src/proxy/tls_passthough.rs index 1131eb01..e2cea9d0 100644 --- a/gateway/src/proxy/tls_passthough.rs +++ b/gateway/src/proxy/tls_passthough.rs @@ -150,7 +150,7 @@ mod tests { async fn test_resolve_app_address() { let app_addr = resolve_app_address( "_dstack-app-address", - "3327603e03f5bd1f830812ca4a789277fc31f577.app.kvin.wang", + "3327603e03f5bd1f830812ca4a789277fc31f577.app.dstack.org", false, ) .await diff --git a/kms/dstack-app/builder/README.md b/kms/dstack-app/builder/README.md index 40f376a3..b5387fa9 100644 --- a/kms/dstack-app/builder/README.md +++ b/kms/dstack-app/builder/README.md @@ -44,7 +44,7 @@ services: environment: - IMAGE_DOWNLOAD_URL=${IMAGE_DOWNLOAD_URL:-http://localhost:8001/mr_{OS_IMAGE_HASH}.tar.gz} - AUTH_TYPE=dev - - DEV_DOMAIN=kms.1022.kvin.wang + - DEV_DOMAIN=kms.1022.dstack.org - QUOTE_ENABLED=false ``` diff --git a/kms/dstack-app/deploy-to-vmm.sh b/kms/dstack-app/deploy-to-vmm.sh index d2d6ce5b..48a8fd19 100755 --- a/kms/dstack-app/deploy-to-vmm.sh +++ b/kms/dstack-app/deploy-to-vmm.sh @@ -35,7 +35,7 @@ else # GUEST_AGENT_ADDR=127.0.0.1:9205 # The URL of the dstack app image download URL -# IMAGE_DOWNLOAD_URL=https://files.kvin.wang/images/mr_{OS_IMAGE_HASH}.tar.gz +# IMAGE_DOWNLOAD_URL=https://download.dstack.org/os-images/mr_{OS_IMAGE_HASH}.tar.gz # Image hash verification feature flag VERIFY_IMAGE=true diff --git a/sdk/simulator/sys-config.json b/sdk/simulator/sys-config.json index 02911d19..1b2d5b48 100644 --- a/sdk/simulator/sys-config.json +++ b/sdk/simulator/sys-config.json @@ -1,9 +1,9 @@ { "kms_urls": [ - "https://kms.1022.kvin.wang:12001" + "https://kms.1022.dstack.org:12001" ], "gateway_urls": [ - "https://tproxy.1022.kvin.wang:12002" + "https://tproxy.1022.dstack.org:12002" ], "pccs_url": "", "docker_registry": "", diff --git a/verifier/README.md b/verifier/README.md index 0ae36f7d..e8343ed2 100644 --- a/verifier/README.md +++ b/verifier/README.md @@ -116,7 +116,7 @@ Grab a quote from your app. It's depends on your app how to grab a quote. ```bash # Grab a quote from the demo app -curl https://712eab2f507b963e11144ae67218177e93ac2a24-3000.app.kvin.wang:12004/GetQuote?report_data=0x1234 -o quote.json +curl https://712eab2f507b963e11144ae67218177e93ac2a24-3000.test0.dstack.org:12004/GetQuote?report_data=0x1234 -o quote.json ``` diff --git a/verifier/dstack-verifier.toml b/verifier/dstack-verifier.toml index c53b5351..8c8a9b89 100644 --- a/verifier/dstack-verifier.toml +++ b/verifier/dstack-verifier.toml @@ -10,7 +10,7 @@ port = 8080 image_cache_dir = "/tmp/dstack-verifier/cache" # Image download URL template (replace {OS_IMAGE_HASH} with actual hash) -image_download_url = "https://dstack-images.phala.network/mr_{OS_IMAGE_HASH}.tar.gz" +image_download_url = "https://download.dstack.org/os-images/mr_{OS_IMAGE_HASH}.tar.gz" # Image download timeout in seconds image_download_timeout_secs = 300 From 1594ba260a2c489689c438d9dee005a24731dd5b Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 24 Sep 2025 09:44:54 +0000 Subject: [PATCH 34/77] Update attestation.md use latest dstack-mr --- attestation.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/attestation.md b/attestation.md index ddafc835..c10232cd 100644 --- a/attestation.md +++ b/attestation.md @@ -33,26 +33,26 @@ RTMR3 differs as it contains runtime information like compose hash and instance ### 2.2. Determining expected MRs MRTD, RTMR0, RTMR1, and RTMR2 correspond to the image. dstack OS builds all related software from source. -Build version v0.4.0 using these commands: +Build version v0.5.4 using these commands: ```bash git clone https://github.com/Dstack-TEE/meta-dstack.git cd meta-dstack/ -git checkout 15189bcb5397083b5c650a438243ce3f29e705f4 +git checkout f7c795b76faa693f218e1c255007e3a68c541d79 git submodule update --init --recursive cd repro-build && ./repro-build.sh -n ``` -The resulting dstack-v0.4.0.tar.gz contains: +The resulting dstack-0.5.4.tar.gz contains: - ovmf.fd: virtual firmware - bzImage: kernel image - initramfs.cpio.gz: initrd -- rootfs.cpio: root filesystem +- rootfs.img.verity: root filesystem - metadata.json: image metadata, including kernel boot cmdline -Calculate image MRs using [dstack-mr](https://github.com/kvinwang/dstack-mr): +Calculate image MRs using [dstack-mr](dstack-mr/): ```bash -dstack-mr -cpu 4 -ram 4096 -metadata dstack-v0.4.0/metadata.json +cargo run --manifest-path ../dstack/Cargo.toml --bin dstack-mr measure -c 4 -m 4G dstack-0.5.4/metadata.json ``` Once these verification steps are completed successfully, the report_data contained in the verified quote can be considered authentic and trustworthy. From dd91bff339a4364c7d271d04d246cb6fc892851a Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 24 Sep 2025 09:49:34 +0000 Subject: [PATCH 35/77] dstack-mr: Fix potential panic due to int overflow --- dstack-mr/src/kernel.rs | 21 ++++++++++++----- dstack-mr/src/tdvf.rs | 50 ++++++++++++++++++++++++++++++++--------- 2 files changed, 54 insertions(+), 17 deletions(-) diff --git a/dstack-mr/src/kernel.rs b/dstack-mr/src/kernel.rs index 9fd465e6..51c7bb46 100644 --- a/dstack-mr/src/kernel.rs +++ b/dstack-mr/src/kernel.rs @@ -99,7 +99,7 @@ fn authenticode_sha384_hash(data: &[u8]) -> Result> { let trailing_data_len = file_size - sum_of_bytes_hashed; if trailing_data_len > cert_table_size { - let hashed_trailing_len = trailing_data_len - cert_table_size; + let hashed_trailing_len = trailing_data_len.saturating_sub(cert_table_size); let trailing_start = sum_of_bytes_hashed; if trailing_start + hashed_trailing_len <= data.len() { @@ -142,14 +142,14 @@ fn patch_kernel( } if protocol >= 0x201 { kd[0x211] |= 0x80; // loadflags |= CAN_USE_HEAP - let heap_end_ptr = cmdline_addr - real_addr - 0x200; + let heap_end_ptr = cmdline_addr.saturating_sub(real_addr).saturating_sub(0x200); kd[0x224..0x228].copy_from_slice(&heap_end_ptr.to_le_bytes()); } if protocol >= 0x202 { kd[0x228..0x22C].copy_from_slice(&cmdline_addr.to_le_bytes()); } else { kd[0x20..0x22].copy_from_slice(&0xa33f_u16.to_le_bytes()); - let offset = (cmdline_addr - real_addr) as u16; + let offset = cmdline_addr.saturating_sub(real_addr) as u16; kd[0x22..0x24].copy_from_slice(&offset.to_le_bytes()); } @@ -186,14 +186,23 @@ fn patch_kernel( mem_size as u32 }; - if initrd_max >= below_4g_mem_size - acpi_data_size { - initrd_max = below_4g_mem_size - acpi_data_size - 1; + if let Some(available_mem) = below_4g_mem_size.checked_sub(acpi_data_size) { + if initrd_max >= available_mem { + initrd_max = available_mem.saturating_sub(1); + } + } else { + // If acpi_data_size >= below_4g_mem_size, we have no memory available + bail!( + "ACPI data size ({}) exceeds available memory ({})", + acpi_data_size, + below_4g_mem_size + ); } if initrd_size >= initrd_max { bail!("initrd is too large"); } - let initrd_addr = (initrd_max - initrd_size) & !4095; + let initrd_addr = initrd_max.saturating_sub(initrd_size) & !4095; kd[0x218..0x21C].copy_from_slice(&initrd_addr.to_le_bytes()); kd[0x21C..0x220].copy_from_slice(&initrd_size.to_le_bytes()); } diff --git a/dstack-mr/src/tdvf.rs b/dstack-mr/src/tdvf.rs index a5d577a8..246cced6 100644 --- a/dstack-mr/src/tdvf.rs +++ b/dstack-mr/src/tdvf.rs @@ -82,20 +82,30 @@ impl<'a> Tdvf<'a> { const TABLE_FOOTER_GUID: &str = "96b582de-1fb2-45f7-baea-a366c55a082d"; const BYTES_AFTER_TABLE_FOOTER: usize = 32; + if fw.len() < BYTES_AFTER_TABLE_FOOTER { + bail!("TDVF firmware too small"); + } let offset = fw.len() - BYTES_AFTER_TABLE_FOOTER; let encoded_footer_guid = encode_guid(TABLE_FOOTER_GUID)?; + if offset < 16 { + bail!("TDVF firmware offset too small for GUID"); + } let guid = &fw[offset - 16..offset]; if guid != encoded_footer_guid { bail!("Failed to parse TDVF metadata: Invalid footer GUID"); } + if offset < 18 { + bail!("TDVF firmware offset too small for tables length"); + } let tables_len = u16::from_le_bytes(fw[offset - 18..offset - 16].try_into().unwrap()) as usize; - if tables_len == 0 || tables_len > offset - 18 { + if tables_len == 0 || tables_len > offset.saturating_sub(18) { bail!("Failed to parse TDVF metadata: Invalid tables length"); } - let tables = &fw[offset - 18 - tables_len..offset - 18]; + let table_start = offset.saturating_sub(18).saturating_sub(tables_len); + let tables = &fw[table_start..offset - 18]; let mut offset = tables.len(); let mut data: Option<&[u8]> = None; @@ -106,21 +116,28 @@ impl<'a> Tdvf<'a> { } let guid = &tables[offset - 16..offset]; let entry_len = read_le::(tables, offset - 18, "entry length")? as usize; - if entry_len > offset - 18 { + if entry_len > offset.saturating_sub(18) { bail!("Failed to parse TDVF metadata: Invalid entry length"); } if guid == encoded_guid { - data = Some(&tables[offset - 18 - entry_len..offset - 18]); + let entry_start = offset.saturating_sub(18).saturating_sub(entry_len); + data = Some(&tables[entry_start..offset - 18]); break; } - offset -= entry_len; + offset = offset.saturating_sub(entry_len); } let data = data.context("Failed to parse TDVF metadata: Missing TDVF metadata")?; - let tdvf_meta_offset = + if data.len() < 4 { + bail!("TDVF metadata data too small"); + } + let tdvf_meta_offset_raw = u32::from_le_bytes(data[data.len() - 4..].try_into().unwrap()) as usize; - let tdvf_meta_offset = fw.len() - tdvf_meta_offset; + if tdvf_meta_offset_raw > fw.len() { + bail!("TDVF metadata offset exceeds firmware size"); + } + let tdvf_meta_offset = fw.len() - tdvf_meta_offset_raw; let tdvf_meta_desc = &fw[tdvf_meta_offset..tdvf_meta_offset + 16]; if &tdvf_meta_desc[..4] != b"TDVF" { @@ -311,16 +328,27 @@ impl<'a> Tdvf<'a> { let (_, last_start, last_end) = memory_acceptor.ranges.pop().expect("No ranges"); for (accepted, start, end) in memory_acceptor.ranges { + if end < start { + bail!("Invalid memory range: end < start"); + } + let size = end - start; if accepted { - add_memory_resource_hob(0x00, start, end - start); + add_memory_resource_hob(0x00, start, size); } else { - add_memory_resource_hob(0x07, start, end - start); + add_memory_resource_hob(0x07, start, size); } } + if last_end < last_start { + bail!("Invalid last memory range: end < start"); + } if memory_size >= 0xB0000000 { - add_memory_resource_hob(0x07, last_start, 0x80000000u64 - last_start); - add_memory_resource_hob(0x07, 0x100000000, last_end - 0x80000000u64); + if last_start < 0x80000000u64 { + add_memory_resource_hob(0x07, last_start, 0x80000000u64 - last_start); + } + if last_end > 0x80000000u64 { + add_memory_resource_hob(0x07, 0x100000000, last_end - 0x80000000u64); + } } else { add_memory_resource_hob(0x07, last_start, last_end - last_start); } From 536cbc0e94ffc4e5dbd6493fca0bba8faa118608 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 24 Sep 2025 10:18:00 +0000 Subject: [PATCH 36/77] Fix deployment.md --- docs/deployment.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/deployment.md b/docs/deployment.md index f2b7d017..0dc61612 100644 --- a/docs/deployment.md +++ b/docs/deployment.md @@ -165,7 +165,7 @@ After you get the `os_image_hash`, you can register it to the KMS whitelist by r ```bash cd dstack/kms/auth-eth -npx hardhat kms:add-image --network phala --mr +npx hardhat kms:add-image --network phala 0x ``` ### Register dstack-gateway in KMS From c50f7f37ea0711b51f9015cee33609b7178c028d Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Thu, 25 Sep 2025 00:47:50 +0000 Subject: [PATCH 37/77] Fix VmConfig decode error --- dstack-types/src/lib.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dstack-types/src/lib.rs b/dstack-types/src/lib.rs index 439d9905..5ecf70db 100644 --- a/dstack-types/src/lib.rs +++ b/dstack-types/src/lib.rs @@ -138,8 +138,11 @@ pub struct VmConfig { pub cpu_count: u32, pub memory_size: u64, // https://github.com/intel-staging/qemu-tdx/issues/1 + #[serde(default, skip_serializing_if = "Option::is_none")] pub qemu_single_pass_add_pages: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub pic: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub qemu_version: Option, #[serde(default)] pub pci_hole64_size: u64, @@ -151,6 +154,7 @@ pub struct VmConfig { pub num_nvswitches: u32, #[serde(default)] pub hotplug_off: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] pub image: Option, } From 6b80ce10a372c100c260260c48bca86c67e942dd Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Fri, 26 Sep 2025 00:56:38 +0000 Subject: [PATCH 38/77] cvm: Support for configuration of storage fs type --- docs/security-guide/cvm-boundaries.md | 1 + dstack-types/src/lib.rs | 2 + dstack-util/src/system_setup.rs | 150 ++++++++++++++++++++++---- 3 files changed, 135 insertions(+), 18 deletions(-) diff --git a/docs/security-guide/cvm-boundaries.md b/docs/security-guide/cvm-boundaries.md index 1095e5da..0c99720e 100644 --- a/docs/security-guide/cvm-boundaries.md +++ b/docs/security-guide/cvm-boundaries.md @@ -41,6 +41,7 @@ This is the main configuration file for the application in JSON format: | secure_time | boolean | Whether secure time is enabled | | pre_launch_script | string | Prelaunch bash script that runs before execute `docker compose up` | | init_script | string | Bash script that executed prior to dockerd startup | +| storage_fs | string | Filesystem type for the data disk of the CVM. Supported values: "zfs", "ext4". default to "zfs". **ZFS:** Ensures filesystem integrity with built-in data protection features. **ext4:** Provides better performance for database applications with lower overhead and faster I/O operations, but no strong integrity protection. | The hash of this file content is extended to RTMR3 as event name `compose-hash`. Remote verifier can extract the compose-hash during remote attestation. diff --git a/dstack-types/src/lib.rs b/dstack-types/src/lib.rs index 5ecf70db..ba11f1ff 100644 --- a/dstack-types/src/lib.rs +++ b/dstack-types/src/lib.rs @@ -39,6 +39,8 @@ pub struct AppCompose { pub no_instance_id: bool, #[serde(default = "default_true")] pub secure_time: bool, + #[serde(default)] + pub storage_fs: Option, } fn default_true() -> bool { diff --git a/dstack-util/src/system_setup.rs b/dstack-util/src/system_setup.rs index 0470b1de..15cef6f1 100644 --- a/dstack-util/src/system_setup.rs +++ b/dstack-util/src/system_setup.rs @@ -6,6 +6,7 @@ use std::{ collections::{BTreeMap, BTreeSet}, ops::Deref, path::{Path, PathBuf}, + str::FromStr, }; use anyhow::{anyhow, bail, Context, Result}; @@ -77,6 +78,58 @@ struct InstanceInfo { app_id: Vec, } +#[derive(Debug, Clone, Copy, PartialEq, Default)] +enum FsType { + #[default] + Zfs, + Ext4, +} + +impl FromStr for FsType { + type Err = anyhow::Error; + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "zfs" => Ok(FsType::Zfs), + "ext4" => Ok(FsType::Ext4), + _ => bail!("Invalid filesystem type: {s}, supported types: zfs, ext4"), + } + } +} + +#[derive(Debug, Clone, Default)] +struct DstackOptions { + storage_encrypted: bool, + storage_fs: FsType, +} + +fn parse_dstack_options(shared: &HostShared) -> Result { + let cmdline = fs::read_to_string("/proc/cmdline").context("Failed to read /proc/cmdline")?; + + let mut options = DstackOptions { + storage_encrypted: true, // Default to encryption enabled + storage_fs: FsType::Zfs, // Default to ZFS + }; + + for param in cmdline.split_whitespace() { + if let Some(value) = param.strip_prefix("dstack.storage_encrypted=") { + match value { + "0" | "false" | "no" | "off" => options.storage_encrypted = false, + "1" | "true" | "yes" | "on" => options.storage_encrypted = true, + _ => { + bail!("Invalid value for dstack.storage_encrypted: {value}"); + } + } + } else if let Some(value) = param.strip_prefix("dstack.storage_fs=") { + options.storage_fs = value.parse().context("Failed to parse dstack.storage_fs")?; + } + } + + if let Some(fs) = &shared.app_compose.storage_fs { + options.storage_fs = fs.parse().context("Failed to parse storage_fs")?; + } + Ok(options) +} + impl InstanceInfo { fn is_initialized(&self) -> bool { !self.instance_id_seed.is_empty() @@ -433,36 +486,86 @@ impl<'a> Stage0<'a> { } } - async fn mount_data_disk(&self, initialized: bool, disk_crypt_key: &str) -> Result<()> { + async fn mount_data_disk( + &self, + initialized: bool, + disk_crypt_key: &str, + opts: &DstackOptions, + ) -> Result<()> { let name = "dstack_data_disk"; - let fs_dev = "/dev/mapper/".to_string() + name; let mount_point = &self.args.mount_point; + + // Determine the device to use based on encryption settings + let fs_dev = if opts.storage_encrypted { + format!("/dev/mapper/{name}") + } else { + self.args.device.to_string_lossy().to_string() + }; + if !initialized { self.vmm .notify_q("boot.progress", "initializing data disk") .await; - info!("Setting up disk encryption"); - self.luks_setup(disk_crypt_key, name)?; + + if opts.storage_encrypted { + info!("Setting up disk encryption"); + self.luks_setup(disk_crypt_key, name)?; + } else { + info!("Skipping disk encryption as requested by kernel cmdline"); + } + cmd! { mkdir -p $mount_point; - zpool create -o autoexpand=on dstack $fs_dev; - zfs create -o mountpoint=$mount_point -o atime=off -o checksum=blake3 dstack/data; + }?; + + match opts.storage_fs { + FsType::Zfs => { + info!("Creating ZFS filesystem"); + cmd! { + zpool create -o autoexpand=on dstack $fs_dev; + zfs create -o mountpoint=$mount_point -o atime=off -o checksum=blake3 dstack/data; + } + .context("Failed to create zpool")?; + } + FsType::Ext4 => { + info!("Creating ext4 filesystem"); + cmd! { + mkfs.ext4 -F $fs_dev; + mount $fs_dev $mount_point; + } + .context("Failed to create ext4 filesystem")?; + } } - .context("Failed to create zpool")?; } else { self.vmm .notify_q("boot.progress", "mounting data disk") .await; - info!("Mounting encrypted data disk"); - self.open_encrypted_volume(disk_crypt_key, name)?; - cmd! { - zpool import dstack; - zpool status dstack; - zpool online -e dstack $fs_dev; // triggers autoexpand + + if opts.storage_encrypted { + info!("Mounting encrypted data disk"); + self.open_encrypted_volume(disk_crypt_key, name)?; + } else { + info!("Mounting unencrypted data disk"); } - .context("Failed to import zpool")?; - if cmd!(mountpoint -q $mount_point).is_err() { - cmd!(zfs mount dstack/data).context("Failed to mount zpool")?; + + match opts.storage_fs { + FsType::Zfs => { + cmd! { + zpool import dstack; + zpool status dstack; + zpool online -e dstack $fs_dev; // triggers autoexpand + } + .context("Failed to import zpool")?; + if cmd!(mountpoint -q $mount_point).is_err() { + cmd!(zfs mount dstack/data).context("Failed to mount zpool")?; + } + } + FsType::Ext4 => { + if cmd!(mountpoint -q $mount_point).is_err() { + cmd!(mount $fs_dev $mount_point) + .context("Failed to mount ext4 filesystem")?; + } + } } } Ok(()) @@ -614,9 +717,20 @@ impl<'a> Stage0<'a> { let keys_json = serde_json::to_string(&app_keys).context("Failed to serialize app keys")?; fs::write(self.app_keys_file(), keys_json).context("Failed to write app keys")?; + // Parse kernel command line options + let opts = parse_dstack_options(&self.shared).context("Failed to parse kernel cmdline")?; + info!( + "Filesystem options: encryption={}, filesystem={:?}", + opts.storage_encrypted, opts.storage_fs + ); + self.vmm.notify_q("boot.progress", "unsealing env").await; - self.mount_data_disk(is_initialized, &hex::encode(&app_keys.disk_crypt_key)) - .await?; + self.mount_data_disk( + is_initialized, + &hex::encode(&app_keys.disk_crypt_key), + &opts, + ) + .await?; self.vmm .notify_q( "instance.info", From c0279f4306e7220f376b250635779ecffc08f3ca Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Fri, 26 Sep 2025 01:18:51 +0000 Subject: [PATCH 39/77] vmm: UI for storage fs selection --- vmm/src/console.html | 85 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 82 insertions(+), 3 deletions(-) diff --git a/vmm/src/console.html b/vmm/src/console.html index 3ab894fa..e2cfab5b 100644 --- a/vmm/src/console.html +++ b/vmm/src/console.html @@ -492,6 +492,56 @@ font-family: monospace; } +