Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changeset/openai-responses-config.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"@kilocode/cli": patch
"@kilocode/core-schemas": patch
---

Add openai-responses provider support in CLI config validation.
6 changes: 6 additions & 0 deletions cli/src/__tests__/cli-provider-model.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@ describe("Provider and Model CLI Options", () => {
expect(field).toBe("openAiModelId")
})

it("should return correct model field for openai-responses provider", () => {
const field = getModelIdKey("openai-responses")
expect(field).toBe("openAiModelId")
})

it("should return correct model field for openai-native provider", () => {
const field = getModelIdKey("openai-native")
expect(field).toBe("apiModelId")
Expand Down Expand Up @@ -176,6 +181,7 @@ describe("Provider and Model CLI Options", () => {
{ name: "kilocode", expectedField: "kilocodeModel" },
{ name: "anthropic", expectedField: "apiModelId" },
{ name: "openai", expectedField: "openAiModelId" },
{ name: "openai-responses", expectedField: "openAiModelId" },
{ name: "openai-native", expectedField: "apiModelId" },
{ name: "openrouter", expectedField: "openRouterModelId" },
{ name: "ollama", expectedField: "ollamaModelId" },
Expand Down
2 changes: 2 additions & 0 deletions cli/src/config/mapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ export function getModelIdForProvider(provider: ProviderConfig): string {
return provider.lmStudioModelId || ""
case "openai":
return provider.openAiModelId || ""
case "openai-responses":
return provider.openAiModelId || ""
case "glama":
return provider.glamaModelId || ""
case "litellm":
Expand Down
77 changes: 77 additions & 0 deletions cli/src/config/schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@
"anthropic",
"openai-native",
"openai-codex",
"openai-responses",
"openrouter",
"bedrock",
"gemini",
Expand Down Expand Up @@ -457,6 +458,82 @@
}
}
},
{
"if": {
"properties": { "provider": { "const": "openai-responses" } }
},
"then": {
"properties": {
"openAiApiKey": {
"type": "string",
"description": "OpenAI API key"
},
"openAiBaseUrl": {
"type": "string",
"description": "Custom base URL for OpenAI API requests"
},
"openAiModelId": {
"type": "string",
"description": "OpenAI model ID"
},
"openAiLegacyFormat": {
"type": "boolean",
"description": "Use legacy API format for compatibility with older OpenAI API versions"
},
"openAiR1FormatEnabled": {
"type": "boolean",
"description": "Enable R1 format for reasoning models that support extended thinking capabilities"
},
"openAiUseAzure": {
"type": "boolean",
"description": "Use Azure OpenAI Service instead of standard OpenAI API"
},
"azureApiVersion": {
"type": "string",
"description": "Azure OpenAI API version (e.g., '2024-02-15-preview'). Required when openAiUseAzure is true"
},
"openAiStreamingEnabled": {
"type": "boolean",
"description": "Enable streaming responses for real-time token generation"
},
"openAiHeaders": {
"type": "object",
"description": "Custom HTTP headers to include in OpenAI API requests",
"additionalProperties": {
"type": "string"
}
}
}
}
},
{
"if": {
"properties": {
"provider": { "const": "openai-responses" },
"openAiApiKey": { "type": "string", "minLength": 1 }
},
"required": ["openAiApiKey"]
},
"then": {
"properties": {
"openAiApiKey": { "minLength": 10 }
}
}
},
{
"if": {
"properties": {
"provider": { "const": "openai-responses" },
"openAiModelId": { "type": "string", "minLength": 1 }
},
"required": ["openAiModelId"]
},
"then": {
"properties": {
"openAiModelId": { "minLength": 1 }
}
}
},
{
"if": {
"properties": { "provider": { "const": "openrouter" } }
Expand Down
2 changes: 2 additions & 0 deletions cli/src/config/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ export {
anthropicProviderSchema,
openAINativeProviderSchema,
openAIProviderSchema,
openAIResponsesProviderSchema,
openRouterProviderSchema,
ollamaProviderSchema,
lmStudioProviderSchema,
Expand Down Expand Up @@ -66,6 +67,7 @@ export {
type AnthropicProviderConfig,
type OpenAINativeProviderConfig,
type OpenAIProviderConfig,
type OpenAIResponsesProviderConfig,
type OpenRouterProviderConfig,
type OllamaProviderConfig,
type LMStudioProviderConfig,
Expand Down
2 changes: 2 additions & 0 deletions cli/src/constants/providers/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -450,6 +450,8 @@ export function getModelIdKey(provider: ProviderName): string {
return "litellmModelId"
case "openai":
return "openAiModelId"
case "openai-responses":
return "openAiModelId"
case "ollama":
return "ollamaModelId"
case "lmstudio":
Expand Down
18 changes: 18 additions & 0 deletions packages/core-schemas/src/config/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,22 @@ export const openAIProviderSchema = baseProviderSchema.extend({
openAiHeaders: z.record(z.string(), z.string()).optional(),
})

// kilocode_change start
// OpenAI Responses provider
export const openAIResponsesProviderSchema = baseProviderSchema.extend({
provider: z.literal("openai-responses"),
openAiModelId: z.string().optional(),
openAiBaseUrl: z.string().optional(),
openAiApiKey: z.string().optional(),
openAiLegacyFormat: z.boolean().optional(),
openAiR1FormatEnabled: z.boolean().optional(),
openAiUseAzure: z.boolean().optional(),
azureApiVersion: z.string().optional(),
openAiStreamingEnabled: z.boolean().optional(),
openAiHeaders: z.record(z.string(), z.string()).optional(),
})
// kilocode_change end

// OpenRouter provider
export const openRouterProviderSchema = baseProviderSchema.extend({
provider: z.literal("openrouter"),
Expand Down Expand Up @@ -397,6 +413,7 @@ export const providerConfigSchema = z.discriminatedUnion("provider", [
openAINativeProviderSchema,
openAICodexProviderSchema, // kilocode_change
openAIProviderSchema,
openAIResponsesProviderSchema, // kilocode_change
openRouterProviderSchema,
ollamaProviderSchema,
lmStudioProviderSchema,
Expand Down Expand Up @@ -443,6 +460,7 @@ export type AnthropicProviderConfig = z.infer<typeof anthropicProviderSchema>
export type OpenAINativeProviderConfig = z.infer<typeof openAINativeProviderSchema>
export type OpenAICodexProviderConfig = z.infer<typeof openAICodexProviderSchema> // kilocode_change
export type OpenAIProviderConfig = z.infer<typeof openAIProviderSchema>
export type OpenAIResponsesProviderConfig = z.infer<typeof openAIResponsesProviderSchema> // kilocode_change
export type OpenRouterProviderConfig = z.infer<typeof openRouterProviderSchema>
export type OllamaProviderConfig = z.infer<typeof ollamaProviderSchema>
export type LMStudioProviderConfig = z.infer<typeof lmStudioProviderSchema>
Expand Down