diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
index 5c4e630da1a..7a4d24abd5e 100644
--- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs
@@ -7,6 +7,7 @@
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
+using System.Text.Json.Serialization.Metadata;
using System.Threading;
using System.Threading.Tasks;
using Azure.AI.Inference;
@@ -27,6 +28,9 @@ public sealed class AzureAIInferenceChatClient : IChatClient
/// The underlying .
private readonly ChatCompletionsClient _chatCompletionsClient;
+ /// The use for any serialization activities related to tool call arguments and results.
+ private JsonSerializerOptions _toolCallJsonSerializerOptions = AIJsonUtilities.DefaultOptions;
+
/// Initializes a new instance of the class for the specified .
/// The underlying client.
/// The ID of the model to use. If null, it can be provided per request via .
@@ -51,7 +55,11 @@ public AzureAIInferenceChatClient(ChatCompletionsClient chatCompletionsClient, s
}
/// Gets or sets to use for any serialization activities related to tool call arguments and results.
- public JsonSerializerOptions? ToolCallJsonSerializerOptions { get; set; }
+ public JsonSerializerOptions ToolCallJsonSerializerOptions
+ {
+ get => _toolCallJsonSerializerOptions;
+ set => _toolCallJsonSerializerOptions = Throw.IfNull(value);
+ }
///
public ChatClientMetadata Metadata { get; }
@@ -304,7 +312,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents,
// These properties are strongly typed on ChatOptions but not on ChatCompletionsOptions.
if (options.TopK is int topK)
{
- result.AdditionalProperties["top_k"] = new BinaryData(JsonSerializer.SerializeToUtf8Bytes(topK, JsonContext.Default.Int32));
+ result.AdditionalProperties["top_k"] = new BinaryData(JsonSerializer.SerializeToUtf8Bytes(topK, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(int))));
}
if (options.AdditionalProperties is { } props)
@@ -317,7 +325,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents,
default:
if (prop.Value is not null)
{
- byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, JsonContext.GetTypeInfo(prop.Value.GetType(), ToolCallJsonSerializerOptions));
+ byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
result.AdditionalProperties[prop.Key] = new BinaryData(data);
}
@@ -419,7 +427,7 @@ private IEnumerable ToAzureAIInferenceChatMessages(IEnumerab
{
try
{
- result = JsonSerializer.Serialize(resultContent.Result, JsonContext.GetTypeInfo(typeof(object), ToolCallJsonSerializerOptions));
+ result = JsonSerializer.Serialize(resultContent.Result, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
}
catch (NotSupportedException)
{
@@ -449,7 +457,7 @@ private IEnumerable ToAzureAIInferenceChatMessages(IEnumerab
callRequest.CallId,
new FunctionCall(
callRequest.Name,
- JsonSerializer.Serialize(callRequest.Arguments, JsonContext.GetTypeInfo(typeof(IDictionary), ToolCallJsonSerializerOptions)))));
+ JsonSerializer.Serialize(callRequest.Arguments, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(IDictionary))))));
}
}
@@ -490,5 +498,6 @@ private static List GetContentParts(IList con
private static FunctionCallContent ParseCallContentFromJsonString(string json, string callId, string name) =>
FunctionCallContent.CreateFromParsedArguments(json, callId, name,
- argumentParser: static json => JsonSerializer.Deserialize(json, JsonContext.Default.IDictionaryStringObject)!);
+ argumentParser: static json => JsonSerializer.Deserialize(json,
+ (JsonTypeInfo>)AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary)))!);
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs
index 0c785cbbd6d..295b45627e8 100644
--- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs
@@ -173,7 +173,7 @@ private EmbeddingsOptions ToAzureAIOptions(IEnumerable inputs, Embedding
{
if (prop.Value is not null)
{
- byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, JsonContext.GetTypeInfo(prop.Value.GetType(), null));
+ byte[] data = JsonSerializer.SerializeToUtf8Bytes(prop.Value, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(object)));
result.AdditionalProperties[prop.Key] = new BinaryData(data);
}
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs
index 1e1dabffab7..89e0946d306 100644
--- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/JsonContext.cs
@@ -1,12 +1,8 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using System;
-using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
using System.Text.Json;
using System.Text.Json.Serialization;
-using System.Text.Json.Serialization.Metadata;
namespace Microsoft.Extensions.AI;
@@ -16,55 +12,4 @@ namespace Microsoft.Extensions.AI;
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true)]
[JsonSerializable(typeof(AzureAIChatToolJson))]
-[JsonSerializable(typeof(IDictionary))]
-[JsonSerializable(typeof(JsonElement))]
-[JsonSerializable(typeof(int))]
-[JsonSerializable(typeof(long))]
-[JsonSerializable(typeof(float))]
-[JsonSerializable(typeof(double))]
-[JsonSerializable(typeof(bool))]
-[JsonSerializable(typeof(float[]))]
-[JsonSerializable(typeof(byte[]))]
-[JsonSerializable(typeof(sbyte[]))]
-internal sealed partial class JsonContext : JsonSerializerContext
-{
- /// Gets the singleton used as the default in JSON serialization operations.
- private static readonly JsonSerializerOptions _defaultToolJsonOptions = CreateDefaultToolJsonOptions();
-
- /// Gets JSON type information for the specified type.
- ///
- /// This first tries to get the type information from ,
- /// falling back to if it can't.
- ///
- public static JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions? firstOptions) =>
- firstOptions?.TryGetTypeInfo(type, out JsonTypeInfo? info) is true ?
- info :
- _defaultToolJsonOptions.GetTypeInfo(type);
-
- /// Creates the default to use for serialization-related operations.
- [UnconditionalSuppressMessage("AotAnalysis", "IL3050", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
- [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
- private static JsonSerializerOptions CreateDefaultToolJsonOptions()
- {
- // If reflection-based serialization is enabled by default, use it, as it's the most permissive in terms of what it can serialize,
- // and we want to be flexible in terms of what can be put into the various collections in the object model.
- // Otherwise, use the source-generated options to enable trimming and Native AOT.
-
- if (JsonSerializer.IsReflectionEnabledByDefault)
- {
- // Keep in sync with the JsonSourceGenerationOptions attribute on JsonContext above.
- JsonSerializerOptions options = new(JsonSerializerDefaults.Web)
- {
- TypeInfoResolver = new DefaultJsonTypeInfoResolver(),
- Converters = { new JsonStringEnumConverter() },
- DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
- WriteIndented = true,
- };
-
- options.MakeReadOnly();
- return options;
- }
-
- return Default.Options;
- }
-}
+internal sealed partial class JsonContext : JsonSerializerContext;
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/JsonContext.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/JsonContext.cs
index b90a28abb51..6de0144c7cf 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/JsonContext.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/JsonContext.cs
@@ -1,8 +1,6 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using System.Collections.Generic;
-using System.Text.Json;
using System.Text.Json.Serialization;
namespace Microsoft.Extensions.AI;
@@ -23,6 +21,4 @@ namespace Microsoft.Extensions.AI;
[JsonSerializable(typeof(OllamaToolCall))]
[JsonSerializable(typeof(OllamaEmbeddingRequest))]
[JsonSerializable(typeof(OllamaEmbeddingResponse))]
-[JsonSerializable(typeof(IDictionary))]
-[JsonSerializable(typeof(JsonElement))]
internal sealed partial class JsonContext : JsonSerializerContext;
diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
index 780b334cd93..abfa3f2b203 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs
@@ -30,6 +30,9 @@ public sealed class OllamaChatClient : IChatClient
/// The to use for sending requests.
private readonly HttpClient _httpClient;
+ /// The use for any serialization activities related to tool call arguments and results.
+ private JsonSerializerOptions _toolCallJsonSerializerOptions = AIJsonUtilities.DefaultOptions;
+
/// Initializes a new instance of the class.
/// The endpoint URI where Ollama is hosted.
///
@@ -66,7 +69,11 @@ public OllamaChatClient(Uri endpoint, string? modelId = null, HttpClient? httpCl
public ChatClientMetadata Metadata { get; }
/// Gets or sets to use for any serialization activities related to tool call arguments and results.
- public JsonSerializerOptions? ToolCallJsonSerializerOptions { get; set; }
+ public JsonSerializerOptions ToolCallJsonSerializerOptions
+ {
+ get => _toolCallJsonSerializerOptions;
+ set => _toolCallJsonSerializerOptions = Throw.IfNull(value);
+ }
///
public async Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
@@ -388,7 +395,6 @@ private IEnumerable ToOllamaChatRequestMessages(ChatMe
case FunctionCallContent fcc:
{
- JsonSerializerOptions serializerOptions = ToolCallJsonSerializerOptions ?? JsonContext.Default.Options;
yield return new OllamaChatRequestMessage
{
Role = "assistant",
@@ -396,7 +402,7 @@ private IEnumerable ToOllamaChatRequestMessages(ChatMe
{
CallId = fcc.CallId,
Name = fcc.Name,
- Arguments = JsonSerializer.SerializeToElement(fcc.Arguments, serializerOptions.GetTypeInfo(typeof(IDictionary))),
+ Arguments = JsonSerializer.SerializeToElement(fcc.Arguments, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(IDictionary))),
}, JsonContext.Default.OllamaFunctionCallContent)
};
break;
@@ -404,8 +410,7 @@ private IEnumerable ToOllamaChatRequestMessages(ChatMe
case FunctionResultContent frc:
{
- JsonSerializerOptions serializerOptions = ToolCallJsonSerializerOptions ?? JsonContext.Default.Options;
- JsonElement jsonResult = JsonSerializer.SerializeToElement(frc.Result, serializerOptions.GetTypeInfo(typeof(object)));
+ JsonElement jsonResult = JsonSerializer.SerializeToElement(frc.Result, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
yield return new OllamaChatRequestMessage
{
Role = "tool",
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
index 6e4a8d8ec9b..90329a9b593 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
@@ -3,7 +3,6 @@
using System;
using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Text;
@@ -38,6 +37,9 @@ public sealed partial class OpenAIChatClient : IChatClient
/// The underlying .
private readonly ChatClient _chatClient;
+ /// The use for any serialization activities related to tool call arguments and results.
+ private JsonSerializerOptions _toolCallJsonSerializerOptions = AIJsonUtilities.DefaultOptions;
+
/// Initializes a new instance of the class for the specified .
/// The underlying client.
/// The model to use.
@@ -80,7 +82,11 @@ public OpenAIChatClient(ChatClient chatClient)
}
/// Gets or sets to use for any serialization activities related to tool call arguments and results.
- public JsonSerializerOptions? ToolCallJsonSerializerOptions { get; set; }
+ public JsonSerializerOptions ToolCallJsonSerializerOptions
+ {
+ get => _toolCallJsonSerializerOptions;
+ set => _toolCallJsonSerializerOptions = Throw.IfNull(value);
+ }
///
public ChatClientMetadata Metadata { get; }
@@ -593,7 +599,7 @@ private sealed class OpenAIChatToolJson
{
try
{
- result = JsonSerializer.Serialize(resultContent.Result, JsonContext.GetTypeInfo(typeof(object), ToolCallJsonSerializerOptions));
+ result = JsonSerializer.Serialize(resultContent.Result, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
}
catch (NotSupportedException)
{
@@ -622,7 +628,7 @@ private sealed class OpenAIChatToolJson
callRequest.Name,
new(JsonSerializer.SerializeToUtf8Bytes(
callRequest.Arguments,
- JsonContext.GetTypeInfo(typeof(IDictionary), ToolCallJsonSerializerOptions)))));
+ ToolCallJsonSerializerOptions.GetTypeInfo(typeof(IDictionary))))));
}
}
@@ -668,11 +674,13 @@ private static List GetContentParts(IList con
private static FunctionCallContent ParseCallContentFromJsonString(string json, string callId, string name) =>
FunctionCallContent.CreateFromParsedArguments(json, callId, name,
- argumentParser: static json => JsonSerializer.Deserialize(json, JsonContext.Default.IDictionaryStringObject)!);
+ argumentParser: static json => JsonSerializer.Deserialize(json,
+ (JsonTypeInfo>)AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary)))!);
private static FunctionCallContent ParseCallContentFromBinaryData(BinaryData ut8Json, string callId, string name) =>
FunctionCallContent.CreateFromParsedArguments(ut8Json, callId, name,
- argumentParser: static json => JsonSerializer.Deserialize(json, JsonContext.Default.IDictionaryStringObject)!);
+ argumentParser: static json => JsonSerializer.Deserialize(json,
+ (JsonTypeInfo>)AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary)))!);
/// Source-generated JSON type information.
[JsonSourceGenerationOptions(JsonSerializerDefaults.Web,
@@ -680,48 +688,5 @@ private static FunctionCallContent ParseCallContentFromBinaryData(BinaryData ut8
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true)]
[JsonSerializable(typeof(OpenAIChatToolJson))]
- [JsonSerializable(typeof(IDictionary))]
- [JsonSerializable(typeof(JsonElement))]
- private sealed partial class JsonContext : JsonSerializerContext
- {
- /// Gets the singleton used as the default in JSON serialization operations.
- private static readonly JsonSerializerOptions _defaultToolJsonOptions = CreateDefaultToolJsonOptions();
-
- /// Gets JSON type information for the specified type.
- ///
- /// This first tries to get the type information from ,
- /// falling back to if it can't.
- ///
- public static JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions? firstOptions) =>
- firstOptions?.TryGetTypeInfo(type, out JsonTypeInfo? info) is true ?
- info :
- _defaultToolJsonOptions.GetTypeInfo(type);
-
- /// Creates the default to use for serialization-related operations.
- [UnconditionalSuppressMessage("AotAnalysis", "IL3050", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
- [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
- private static JsonSerializerOptions CreateDefaultToolJsonOptions()
- {
- // If reflection-based serialization is enabled by default, use it, as it's the most permissive in terms of what it can serialize,
- // and we want to be flexible in terms of what can be put into the various collections in the object model.
- // Otherwise, use the source-generated options to enable trimming and Native AOT.
-
- if (JsonSerializer.IsReflectionEnabledByDefault)
- {
- // Keep in sync with the JsonSourceGenerationOptions attribute on JsonContext above.
- JsonSerializerOptions options = new(JsonSerializerDefaults.Web)
- {
- TypeInfoResolver = new DefaultJsonTypeInfoResolver(),
- Converters = { new JsonStringEnumConverter() },
- DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
- WriteIndented = true,
- };
-
- options.MakeReadOnly();
- return options;
- }
-
- return Default.Options;
- }
- }
+ private sealed partial class JsonContext : JsonSerializerContext;
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs
index f404f5e61ef..476ad973ddc 100644
--- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs
@@ -6,6 +6,7 @@
using System.ComponentModel;
using System.Linq;
using System.Net.Http;
+using System.Text.Json;
using System.Threading.Tasks;
using Azure;
using Azure.AI.Inference;
@@ -29,6 +30,19 @@ public void Ctor_InvalidArgs_Throws()
Assert.Throws("modelId", () => new AzureAIInferenceChatClient(client, " "));
}
+ [Fact]
+ public void ToolCallJsonSerializerOptions_HasExpectedValue()
+ {
+ using AzureAIInferenceChatClient client = new(new(new("http://somewhere"), new AzureKeyCredential("key")), "mode");
+
+ Assert.Same(client.ToolCallJsonSerializerOptions, AIJsonUtilities.DefaultOptions);
+ Assert.Throws("value", () => client.ToolCallJsonSerializerOptions = null!);
+
+ JsonSerializerOptions options = new();
+ client.ToolCallJsonSerializerOptions = options;
+ Assert.Same(options, client.ToolCallJsonSerializerOptions);
+ }
+
[Fact]
public void AsChatClient_InvalidArgs_Throws()
{
diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
index 67b10e3f24b..3879e9e2ec3 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs
@@ -6,6 +6,7 @@
using System.ComponentModel;
using System.Linq;
using System.Net.Http;
+using System.Text.Json;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
@@ -26,6 +27,19 @@ public void Ctor_InvalidArgs_Throws()
Assert.Throws("modelId", () => new OllamaChatClient("http://localhost", " "));
}
+ [Fact]
+ public void ToolCallJsonSerializerOptions_HasExpectedValue()
+ {
+ using OllamaChatClient client = new("http://localhost", "model");
+
+ Assert.Same(client.ToolCallJsonSerializerOptions, AIJsonUtilities.DefaultOptions);
+ Assert.Throws("value", () => client.ToolCallJsonSerializerOptions = null!);
+
+ JsonSerializerOptions options = new();
+ client.ToolCallJsonSerializerOptions = options;
+ Assert.Same(options, client.ToolCallJsonSerializerOptions);
+ }
+
[Fact]
public void GetService_SuccessfullyReturnsUnderlyingClient()
{
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
index 05d2f5a22ff..fb912235cfc 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs
@@ -8,6 +8,7 @@
using System.ComponentModel;
using System.Linq;
using System.Net.Http;
+using System.Text.Json;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using Microsoft.Extensions.Caching.Distributed;
@@ -34,6 +35,19 @@ public void Ctor_InvalidArgs_Throws()
Assert.Throws("modelId", () => new OpenAIChatClient(openAIClient, " "));
}
+ [Fact]
+ public void ToolCallJsonSerializerOptions_HasExpectedValue()
+ {
+ using OpenAIChatClient client = new(new("key"), "model");
+
+ Assert.Same(client.ToolCallJsonSerializerOptions, AIJsonUtilities.DefaultOptions);
+ Assert.Throws("value", () => client.ToolCallJsonSerializerOptions = null!);
+
+ JsonSerializerOptions options = new();
+ client.ToolCallJsonSerializerOptions = options;
+ Assert.Same(options, client.ToolCallJsonSerializerOptions);
+ }
+
[Fact]
public void AsChatClient_InvalidArgs_Throws()
{