From 1f06759572de5d20e897d2392660b13d6e51ec39 Mon Sep 17 00:00:00 2001 From: Peter Dave Hello Date: Sat, 24 Jan 2026 17:43:09 +0800 Subject: [PATCH] fix(core-schemas): add openai-responses provider schema openai-responses exists in the CLI provider list but the core-schemas and CLI config schema are not synced, leaving validation and model mapping incomplete. - packages/core-schemas: add openAIResponsesProviderSchema - cli/src/config/types.ts: re-export schema/type - cli/src/config/mapper.ts: map openAiModelId - cli/src/config/schema.json: add provider enum + validation blocks pnpm check-types cd cli && pnpm test --- .changeset/openai-responses-config.md | 6 ++ cli/src/__tests__/cli-provider-model.test.ts | 6 ++ cli/src/config/mapper.ts | 2 + cli/src/config/schema.json | 77 ++++++++++++++++++++ cli/src/config/types.ts | 2 + cli/src/constants/providers/models.ts | 2 + packages/core-schemas/src/config/provider.ts | 18 +++++ 7 files changed, 113 insertions(+) create mode 100644 .changeset/openai-responses-config.md diff --git a/.changeset/openai-responses-config.md b/.changeset/openai-responses-config.md new file mode 100644 index 00000000000..a0a067ba670 --- /dev/null +++ b/.changeset/openai-responses-config.md @@ -0,0 +1,6 @@ +--- +"@kilocode/cli": patch +"@kilocode/core-schemas": patch +--- + +Add openai-responses provider support in CLI config validation. diff --git a/cli/src/__tests__/cli-provider-model.test.ts b/cli/src/__tests__/cli-provider-model.test.ts index 927d451ddcf..58d2b234a45 100644 --- a/cli/src/__tests__/cli-provider-model.test.ts +++ b/cli/src/__tests__/cli-provider-model.test.ts @@ -20,6 +20,11 @@ describe("Provider and Model CLI Options", () => { expect(field).toBe("openAiModelId") }) + it("should return correct model field for openai-responses provider", () => { + const field = getModelIdKey("openai-responses") + expect(field).toBe("openAiModelId") + }) + it("should return correct model field for openai-native provider", () => { const field = getModelIdKey("openai-native") expect(field).toBe("apiModelId") @@ -176,6 +181,7 @@ describe("Provider and Model CLI Options", () => { { name: "kilocode", expectedField: "kilocodeModel" }, { name: "anthropic", expectedField: "apiModelId" }, { name: "openai", expectedField: "openAiModelId" }, + { name: "openai-responses", expectedField: "openAiModelId" }, { name: "openai-native", expectedField: "apiModelId" }, { name: "openrouter", expectedField: "openRouterModelId" }, { name: "ollama", expectedField: "ollamaModelId" }, diff --git a/cli/src/config/mapper.ts b/cli/src/config/mapper.ts index 3e1a3766bdf..6680861417b 100644 --- a/cli/src/config/mapper.ts +++ b/cli/src/config/mapper.ts @@ -112,6 +112,8 @@ export function getModelIdForProvider(provider: ProviderConfig): string { return provider.lmStudioModelId || "" case "openai": return provider.openAiModelId || "" + case "openai-responses": + return provider.openAiModelId || "" case "glama": return provider.glamaModelId || "" case "litellm": diff --git a/cli/src/config/schema.json b/cli/src/config/schema.json index 1c29931b897..678a4e945c0 100644 --- a/cli/src/config/schema.json +++ b/cli/src/config/schema.json @@ -236,6 +236,7 @@ "anthropic", "openai-native", "openai-codex", + "openai-responses", "openrouter", "bedrock", "gemini", @@ -457,6 +458,82 @@ } } }, + { + "if": { + "properties": { "provider": { "const": "openai-responses" } } + }, + "then": { + "properties": { + "openAiApiKey": { + "type": "string", + "description": "OpenAI API key" + }, + "openAiBaseUrl": { + "type": "string", + "description": "Custom base URL for OpenAI API requests" + }, + "openAiModelId": { + "type": "string", + "description": "OpenAI model ID" + }, + "openAiLegacyFormat": { + "type": "boolean", + "description": "Use legacy API format for compatibility with older OpenAI API versions" + }, + "openAiR1FormatEnabled": { + "type": "boolean", + "description": "Enable R1 format for reasoning models that support extended thinking capabilities" + }, + "openAiUseAzure": { + "type": "boolean", + "description": "Use Azure OpenAI Service instead of standard OpenAI API" + }, + "azureApiVersion": { + "type": "string", + "description": "Azure OpenAI API version (e.g., '2024-02-15-preview'). Required when openAiUseAzure is true" + }, + "openAiStreamingEnabled": { + "type": "boolean", + "description": "Enable streaming responses for real-time token generation" + }, + "openAiHeaders": { + "type": "object", + "description": "Custom HTTP headers to include in OpenAI API requests", + "additionalProperties": { + "type": "string" + } + } + } + } + }, + { + "if": { + "properties": { + "provider": { "const": "openai-responses" }, + "openAiApiKey": { "type": "string", "minLength": 1 } + }, + "required": ["openAiApiKey"] + }, + "then": { + "properties": { + "openAiApiKey": { "minLength": 10 } + } + } + }, + { + "if": { + "properties": { + "provider": { "const": "openai-responses" }, + "openAiModelId": { "type": "string", "minLength": 1 } + }, + "required": ["openAiModelId"] + }, + "then": { + "properties": { + "openAiModelId": { "minLength": 1 } + } + } + }, { "if": { "properties": { "provider": { "const": "openrouter" } } diff --git a/cli/src/config/types.ts b/cli/src/config/types.ts index 849caa1f63d..b0aeac4a649 100644 --- a/cli/src/config/types.ts +++ b/cli/src/config/types.ts @@ -23,6 +23,7 @@ export { anthropicProviderSchema, openAINativeProviderSchema, openAIProviderSchema, + openAIResponsesProviderSchema, openRouterProviderSchema, ollamaProviderSchema, lmStudioProviderSchema, @@ -66,6 +67,7 @@ export { type AnthropicProviderConfig, type OpenAINativeProviderConfig, type OpenAIProviderConfig, + type OpenAIResponsesProviderConfig, type OpenRouterProviderConfig, type OllamaProviderConfig, type LMStudioProviderConfig, diff --git a/cli/src/constants/providers/models.ts b/cli/src/constants/providers/models.ts index 4c703ed958c..fa9292f9289 100644 --- a/cli/src/constants/providers/models.ts +++ b/cli/src/constants/providers/models.ts @@ -450,6 +450,8 @@ export function getModelIdKey(provider: ProviderName): string { return "litellmModelId" case "openai": return "openAiModelId" + case "openai-responses": + return "openAiModelId" case "ollama": return "ollamaModelId" case "lmstudio": diff --git a/packages/core-schemas/src/config/provider.ts b/packages/core-schemas/src/config/provider.ts index 48e77e30d53..00a07887500 100644 --- a/packages/core-schemas/src/config/provider.ts +++ b/packages/core-schemas/src/config/provider.ts @@ -59,6 +59,22 @@ export const openAIProviderSchema = baseProviderSchema.extend({ openAiHeaders: z.record(z.string(), z.string()).optional(), }) +// kilocode_change start +// OpenAI Responses provider +export const openAIResponsesProviderSchema = baseProviderSchema.extend({ + provider: z.literal("openai-responses"), + openAiModelId: z.string().optional(), + openAiBaseUrl: z.string().optional(), + openAiApiKey: z.string().optional(), + openAiLegacyFormat: z.boolean().optional(), + openAiR1FormatEnabled: z.boolean().optional(), + openAiUseAzure: z.boolean().optional(), + azureApiVersion: z.string().optional(), + openAiStreamingEnabled: z.boolean().optional(), + openAiHeaders: z.record(z.string(), z.string()).optional(), +}) +// kilocode_change end + // OpenRouter provider export const openRouterProviderSchema = baseProviderSchema.extend({ provider: z.literal("openrouter"), @@ -397,6 +413,7 @@ export const providerConfigSchema = z.discriminatedUnion("provider", [ openAINativeProviderSchema, openAICodexProviderSchema, // kilocode_change openAIProviderSchema, + openAIResponsesProviderSchema, // kilocode_change openRouterProviderSchema, ollamaProviderSchema, lmStudioProviderSchema, @@ -443,6 +460,7 @@ export type AnthropicProviderConfig = z.infer export type OpenAINativeProviderConfig = z.infer export type OpenAICodexProviderConfig = z.infer // kilocode_change export type OpenAIProviderConfig = z.infer +export type OpenAIResponsesProviderConfig = z.infer // kilocode_change export type OpenRouterProviderConfig = z.infer export type OllamaProviderConfig = z.infer export type LMStudioProviderConfig = z.infer