diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs
index 3547483da10..0b6c5f5122f 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIAssistantChatClient.cs
@@ -237,14 +237,16 @@ void IDisposable.Dispose()
}
/// Converts an Extensions function to an OpenAI assistants function tool.
- internal static FunctionToolDefinition ToOpenAIAssistantsFunctionToolDefinition(AIFunction aiFunction)
+ internal static FunctionToolDefinition ToOpenAIAssistantsFunctionToolDefinition(AIFunction aiFunction, ChatOptions? options = null)
{
- (BinaryData parameters, bool? strict) = OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction);
+ bool? strict =
+ OpenAIClientExtensions.HasStrict(aiFunction.AdditionalProperties) ??
+ OpenAIClientExtensions.HasStrict(options?.AdditionalProperties);
return new FunctionToolDefinition(aiFunction.Name)
{
Description = aiFunction.Description,
- Parameters = parameters,
+ Parameters = OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction, strict),
StrictParameterSchemaEnabled = strict,
};
}
@@ -296,7 +298,7 @@ internal static FunctionToolDefinition ToOpenAIAssistantsFunctionToolDefinition(
switch (tool)
{
case AIFunction aiFunction:
- runOptions.ToolsOverride.Add(ToOpenAIAssistantsFunctionToolDefinition(aiFunction));
+ runOptions.ToolsOverride.Add(ToOpenAIAssistantsFunctionToolDefinition(aiFunction, options));
break;
case HostedCodeInterpreterTool:
@@ -342,7 +344,8 @@ internal static FunctionToolDefinition ToOpenAIAssistantsFunctionToolDefinition(
runOptions.ResponseFormat = AssistantResponseFormat.CreateJsonSchemaFormat(
jsonFormat.SchemaName,
BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(jsonSchema, OpenAIJsonContext.Default.JsonElement)),
- jsonFormat.SchemaDescription);
+ jsonFormat.SchemaDescription,
+ OpenAIClientExtensions.HasStrict(options.AdditionalProperties));
break;
case ChatResponseFormatJson jsonFormat:
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
index abbcb0ed0ae..c051550d493 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
@@ -101,11 +101,17 @@ void IDisposable.Dispose()
}
/// Converts an Extensions function to an OpenAI chat tool.
- internal static ChatTool ToOpenAIChatTool(AIFunction aiFunction)
+ internal static ChatTool ToOpenAIChatTool(AIFunction aiFunction, ChatOptions? options = null)
{
- (BinaryData parameters, bool? strict) = OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction);
-
- return ChatTool.CreateFunctionTool(aiFunction.Name, aiFunction.Description, parameters, strict);
+ bool? strict =
+ OpenAIClientExtensions.HasStrict(aiFunction.AdditionalProperties) ??
+ OpenAIClientExtensions.HasStrict(options?.AdditionalProperties);
+
+ return ChatTool.CreateFunctionTool(
+ aiFunction.Name,
+ aiFunction.Description,
+ OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction, strict),
+ strict);
}
/// Converts an Extensions chat message enumerable to an OpenAI chat message enumerable.
@@ -517,7 +523,7 @@ private ChatCompletionOptions ToOpenAIOptions(ChatOptions? options)
{
if (tool is AIFunction af)
{
- result.Tools.Add(ToOpenAIChatTool(af));
+ result.Tools.Add(ToOpenAIChatTool(af, options));
}
}
@@ -555,7 +561,8 @@ private ChatCompletionOptions ToOpenAIOptions(ChatOptions? options)
OpenAI.Chat.ChatResponseFormat.CreateJsonSchemaFormat(
jsonFormat.SchemaName ?? "json_schema",
BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(jsonSchema, OpenAIJsonContext.Default.JsonElement)),
- jsonFormat.SchemaDescription) :
+ jsonFormat.SchemaDescription,
+ OpenAIClientExtensions.HasStrict(options.AdditionalProperties)) :
OpenAI.Chat.ChatResponseFormat.CreateJsonObjectFormat();
}
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs
index 24fd93ccb65..b20769c0dc4 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIClientExtensions.cs
@@ -21,6 +21,7 @@
#pragma warning disable S1067 // Expressions should not be too complex
#pragma warning disable SA1515 // Single-line comment should be preceded by blank line
#pragma warning disable CA1305 // Specify IFormatProvider
+#pragma warning disable S1135 // Track uses of "TODO" tags
namespace Microsoft.Extensions.AI;
@@ -182,15 +183,17 @@ public static ResponseTool AsOpenAIResponseTool(this AIFunction function) =>
public static ConversationFunctionTool AsOpenAIConversationFunctionTool(this AIFunction function) =>
OpenAIRealtimeConversationClient.ToOpenAIConversationFunctionTool(Throw.IfNull(function));
+ // TODO: Once we're ready to rely on C# 14 features, add an extension property ChatOptions.Strict.
+
+ /// Gets whether the properties specify that strict schema handling is desired.
+ internal static bool? HasStrict(IReadOnlyDictionary? additionalProperties) =>
+ additionalProperties?.TryGetValue(StrictKey, out object? strictObj) is true &&
+ strictObj is bool strictValue ?
+ strictValue : null;
+
/// Extracts from an the parameters and strictness setting for use with OpenAI's APIs.
- internal static (BinaryData Parameters, bool? Strict) ToOpenAIFunctionParameters(AIFunction aiFunction)
+ internal static BinaryData ToOpenAIFunctionParameters(AIFunction aiFunction, bool? strict)
{
- // Extract any strict setting from AdditionalProperties.
- bool? strict =
- aiFunction.AdditionalProperties.TryGetValue(OpenAIClientExtensions.StrictKey, out object? strictObj) &&
- strictObj is bool strictValue ?
- strictValue : null;
-
// Perform any desirable transformations on the function's JSON schema, if it'll be used in a strict setting.
JsonElement jsonSchema = strict is true ?
StrictSchemaTransformCache.GetOrCreateTransformedSchema(aiFunction) :
@@ -201,7 +204,7 @@ strictObj is bool strictValue ?
var tool = JsonSerializer.Deserialize(jsonSchema, OpenAIJsonContext.Default.ToolJson)!;
var functionParameters = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool, OpenAIJsonContext.Default.ToolJson));
- return (functionParameters, strict);
+ return functionParameters;
}
/// Used to create the JSON payload for an OpenAI tool description.
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeConversationClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeConversationClient.cs
index 892a9e9aa2a..abfebd99f34 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeConversationClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeConversationClient.cs
@@ -1,7 +1,6 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using System;
using OpenAI.RealtimeConversation;
namespace Microsoft.Extensions.AI;
@@ -9,14 +8,16 @@ namespace Microsoft.Extensions.AI;
/// Provides helpers for interacting with OpenAI Realtime.
internal sealed class OpenAIRealtimeConversationClient
{
- public static ConversationFunctionTool ToOpenAIConversationFunctionTool(AIFunction aiFunction)
+ public static ConversationFunctionTool ToOpenAIConversationFunctionTool(AIFunction aiFunction, ChatOptions? options = null)
{
- (BinaryData parameters, _) = OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction);
+ bool? strict =
+ OpenAIClientExtensions.HasStrict(aiFunction.AdditionalProperties) ??
+ OpenAIClientExtensions.HasStrict(options?.AdditionalProperties);
return new ConversationFunctionTool(aiFunction.Name)
{
Description = aiFunction.Description,
- Parameters = parameters,
+ Parameters = OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction, strict),
};
}
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
index a5f68e10365..46019166719 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponseChatClient.cs
@@ -323,11 +323,17 @@ void IDisposable.Dispose()
// Nothing to dispose. Implementation required for the IChatClient interface.
}
- internal static ResponseTool ToResponseTool(AIFunction aiFunction)
+ internal static ResponseTool ToResponseTool(AIFunction aiFunction, ChatOptions? options = null)
{
- (BinaryData parameters, bool? strict) = OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction);
-
- return ResponseTool.CreateFunctionTool(aiFunction.Name, aiFunction.Description, parameters, strict ?? false);
+ bool? strict =
+ OpenAIClientExtensions.HasStrict(aiFunction.AdditionalProperties) ??
+ OpenAIClientExtensions.HasStrict(options?.AdditionalProperties);
+
+ return ResponseTool.CreateFunctionTool(
+ aiFunction.Name,
+ aiFunction.Description,
+ OpenAIClientExtensions.ToOpenAIFunctionParameters(aiFunction, strict),
+ strict ?? false);
}
/// Creates a from a .
@@ -380,7 +386,7 @@ private ResponseCreationOptions ToOpenAIResponseCreationOptions(ChatOptions? opt
switch (tool)
{
case AIFunction aiFunction:
- ResponseTool rtool = ToResponseTool(aiFunction);
+ ResponseTool rtool = ToResponseTool(aiFunction, options);
result.Tools.Add(rtool);
break;
@@ -442,7 +448,8 @@ private ResponseCreationOptions ToOpenAIResponseCreationOptions(ChatOptions? opt
ResponseTextFormat.CreateJsonSchemaFormat(
jsonFormat.SchemaName ?? "json_schema",
BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(jsonSchema, OpenAIJsonContext.Default.JsonElement)),
- jsonFormat.SchemaDescription) :
+ jsonFormat.SchemaDescription,
+ OpenAIClientExtensions.HasStrict(options.AdditionalProperties)) :
ResponseTextFormat.CreateJsonObjectFormat(),
};
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
index 30d03b6eee3..edb5d9fab07 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
@@ -276,6 +276,74 @@ public async Task BasicRequestResponse_Streaming()
}, usage.Details.AdditionalCounts);
}
+ [Fact]
+ public async Task ChatOptions_StrictRespected()
+ {
+ const string Input = """
+ {
+ "tools": [
+ {
+ "function": {
+ "description": "Gets the age of the specified person.",
+ "name": "GetPersonAge",
+ "strict": true,
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ }
+ },
+ "type": "function"
+ }
+ ],
+ "messages": [
+ {
+ "role": "user",
+ "content": "hello"
+ }
+ ],
+ "model": "gpt-4o-mini",
+ "tool_choice": "auto"
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "chatcmpl-ADx3PvAnCwJg0woha4pYsBTi3ZpOI",
+ "object": "chat.completion",
+ "created": 1727888631,
+ "model": "gpt-4o-mini-2024-07-18",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": "Hello! How can I assist you today?",
+ "refusal": null
+ },
+ "logprobs": null,
+ "finish_reason": "stop"
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateChatClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools = [AIFunctionFactory.Create(() => 42, "GetPersonAge", "Gets the age of the specified person.")],
+ AdditionalProperties = new()
+ {
+ ["strictJsonSchema"] = true,
+ },
+ });
+ Assert.NotNull(response);
+ }
+
[Fact]
public async Task ChatOptions_DoNotOverwrite_NotNullPropertiesInRawRepresentation_NonStreaming()
{
@@ -337,7 +405,7 @@ public async Task ChatOptions_DoNotOverwrite_NotNullPropertiesInRawRepresentatio
ResponseFormat = OpenAI.Chat.ChatResponseFormat.CreateTextFormat()
};
openAIOptions.StopSequences.Add("hello");
- openAIOptions.Tools.Add(ToOpenAIChatTool(tool));
+ openAIOptions.Tools.Add(OpenAIClientExtensions.AsOpenAIChatTool(tool));
return openAIOptions;
},
ModelId = null,
@@ -416,7 +484,7 @@ public async Task ChatOptions_DoNotOverwrite_NotNullPropertiesInRawRepresentatio
ResponseFormat = OpenAI.Chat.ChatResponseFormat.CreateTextFormat()
};
openAIOptions.StopSequences.Add("hello");
- openAIOptions.Tools.Add(ToOpenAIChatTool(tool));
+ openAIOptions.Tools.Add(OpenAIClientExtensions.AsOpenAIChatTool(tool));
return openAIOptions;
},
ModelId = null, // has no effect, you cannot change the model of an OpenAI's ChatClient.
@@ -600,20 +668,6 @@ public async Task ChatOptions_Overwrite_NullPropertiesInRawRepresentation_Stream
Assert.Equal("Hello! How can I assist you today?", responseText);
}
- /// Converts an Extensions function to an OpenAI chat tool.
- private static ChatTool ToOpenAIChatTool(AIFunction aiFunction)
- {
- bool? strict =
- aiFunction.AdditionalProperties.TryGetValue("strictJsonSchema", out object? strictObj) &&
- strictObj is bool strictValue ?
- strictValue : null;
-
- // Map to an intermediate model so that redundant properties are skipped.
- var tool = JsonSerializer.Deserialize(aiFunction.JsonSchema)!;
- var functionParameters = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool));
- return ChatTool.CreateFunctionTool(aiFunction.Name, aiFunction.Description, functionParameters, strict);
- }
-
/// Used to create the JSON payload for an OpenAI chat tool description.
internal sealed class ChatToolJson
{
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
index 8b27cd918a7..28125e462b7 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
@@ -288,6 +288,81 @@ public async Task BasicRequestResponse_Streaming()
Assert.Equal(36, usage.Details.TotalTokenCount);
}
+ [Fact]
+ public async Task ChatOptions_StrictRespected()
+ {
+ const string Input = """
+ {
+ "model": "gpt-4o-mini",
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "hello"
+ }
+ ]
+ }
+ ],
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "name": "GetPersonAge",
+ "description": "Gets the age of the specified person.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true
+ }
+ ]
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_67d327649b288191aeb46a824e49dc40058a5e08c46a181d",
+ "object": "response",
+ "status": "completed",
+ "model": "gpt-4o-mini-2024-07-18",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_67d32764fcdc8191bcf2e444d4088804058a5e08c46a181d",
+ "status": "completed",
+ "role": "assistant",
+ "content": [
+ {
+ "type": "output_text",
+ "text": "Hello! How can I assist you today?",
+ "annotations": []
+ }
+ ]
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools = [AIFunctionFactory.Create(() => 42, "GetPersonAge", "Gets the age of the specified person.")],
+ AdditionalProperties = new()
+ {
+ ["strictJsonSchema"] = true,
+ },
+ });
+ Assert.NotNull(response);
+ }
+
[Fact]
public async Task ChatOptions_DoNotOverwrite_NotNullPropertiesInRawRepresentation_NonStreaming()
{