From b23bf7b37a8878a4c0be69689edba95a72ccd7db Mon Sep 17 00:00:00 2001 From: Stephen Toub Date: Fri, 7 Feb 2025 16:50:08 -0500 Subject: [PATCH] Address some M.E.AI API feedback - Remove DataContent.ContainsData. While ContainsData could be used to avoid lazily-instantiating a `ReadOnlyMemory` from a data URI, in all cases examined ContainsData was being used to guard accessing Data, in which case Data.HasValue is sufficient. - Make ToolMode optional. We'd previously said that an implementation could use null to imply None if it wanted, but with this being optional we'd want null to be the same as auto, so I added in an explicit null options. That matches as well with what various other client libs do. - Remove IChatClient/IEmbeddingGenerator.Metadata. GetService can be used instead, reducing what an implementer must implement and what's exposed to a consumer. - Add Complete{Streaming}Async for a single message. We have such accelerators in other places but not here, and it provides a natural grow-up from string to ChatMessage to `IList`. - Change UsageDetails.XxTokenCount properties from int? to long?. The AdditionalCounts is already based on longs, and in theory the token counts could exceed int, especially in a situation where UsageData is being used to sum many other call data. - Rename GenerateEmbeddingVectorAsync's TEmbedding to TEmbeddingElement. Everywhere else TEmbedding is used where it represents an Embedding, but here it represents a numerical element in an embedding vector. - Remove setters on FunctionCall/ResultContent for required ctor parameters. Having those setters goes against .NET design guidelines. - Remove FunctionResultContent.Name. It's unnecessary and is actually causing us to do more work in places. --- .../ChatCompletion/ChatClientExtensions.cs | 40 ++++++++++++- .../ChatCompletion/ChatOptions.cs | 3 +- .../ChatCompletion/ChatToolMode.cs | 17 +++++- .../ChatCompletion/DelegatingChatClient.cs | 23 ++++---- .../ChatCompletion/IChatClient.cs | 3 - .../ChatCompletion/NoneChatToolMode.cs | 28 ++++++++++ .../Contents/DataContent.cs | 19 ++----- .../Contents/FunctionCallContent.cs | 10 ++-- .../Contents/FunctionResultContent.cs | 13 +---- .../DelegatingEmbeddingGenerator.cs | 24 ++++---- .../EmbeddingGeneratorExtensions.cs | 6 +- .../Embeddings/IEmbeddingGenerator.cs | 3 - .../README.md | 26 +++++---- .../UsageDetails.cs | 14 ++--- .../AzureAIInferenceChatClient.cs | 24 +++++--- .../AzureAIInferenceEmbeddingGenerator.cs | 13 +++-- .../OllamaChatClient.cs | 26 +++++---- .../OllamaEmbeddingGenerator.cs | 18 +++--- .../OpenAIChatClient.cs | 13 +++-- .../OpenAIEmbeddingGenerator.cs | 45 +++++++-------- .../OpenAIModelMapper.ChatCompletion.cs | 34 +++++++---- .../OpenAIModelMapper.ChatMessage.cs | 8 +-- .../ChatClientStructuredOutputExtensions.cs | 46 ++++++++++++++- .../FunctionInvokingChatClient.cs | 6 +- .../ChatCompletion/LoggingChatClient.cs | 4 +- .../ChatCompletion/OpenTelemetryChatClient.cs | 28 +++++----- .../Embeddings/LoggingEmbeddingGenerator.cs | 2 +- .../OpenTelemetryEmbeddingGenerator.cs | 20 ++++--- .../ChatClientExtensionsTests.cs | 4 +- .../ChatCompletion/ChatCompletionTests.cs | 2 +- .../ChatCompletion/ChatMessageTests.cs | 4 +- .../ChatCompletion/ChatOptionsTests.cs | 4 +- .../ChatCompletion/ChatToolModeTests.cs | 18 ++++++ .../DelegatingChatClientTests.cs | 9 --- .../StreamingChatCompletionUpdateTests.cs | 2 +- .../Contents/DataContentTests.cs | 3 - .../Contents/FunctionCallContentTests..cs | 2 - .../Contents/FunctionResultContentTests.cs | 16 ++---- .../DelegatingEmbeddingGeneratorTests.cs | 9 --- .../TestChatClient.cs | 2 - .../TestEmbeddingGenerator.cs | 2 - .../AzureAIInferenceChatClientTests.cs | 11 ++-- ...AzureAIInferenceEmbeddingGeneratorTests.cs | 7 ++- .../ChatClientIntegrationTests.cs | 2 +- .../EmbeddingGeneratorIntegrationTests.cs | 6 +- .../QuantizationEmbeddingGenerator.cs | 2 - .../OllamaChatClientTests.cs | 9 +-- .../OllamaEmbeddingGeneratorTests.cs | 9 +-- .../OpenAIChatClientTests.cs | 18 +++--- .../OpenAIEmbeddingGeneratorTests.cs | 13 +++-- .../OpenAISerializationTests.cs | 3 +- ...atClientStructuredOutputExtensionsTests.cs | 20 +++---- .../DistributedCachingChatClientTest.cs | 38 ++++++------- .../FunctionInvokingChatClientTests.cs | 56 +++++++++---------- .../OpenTelemetryChatClientTests.cs | 8 ++- .../OpenTelemetryEmbeddingGeneratorTests.cs | 4 +- 56 files changed, 460 insertions(+), 339 deletions(-) create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/NoneChatToolMode.cs diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs index 655b9f3a281..163cde97f58 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatClientExtensions.cs @@ -42,7 +42,25 @@ public static Task CompleteAsync( _ = Throw.IfNull(client); _ = Throw.IfNull(chatMessage); - return client.CompleteAsync([new ChatMessage(ChatRole.User, chatMessage)], options, cancellationToken); + return client.CompleteAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken); + } + + /// Sends a chat message to the model and returns the response messages. + /// The chat client. + /// The chat message to send. + /// The chat options to configure the request. + /// The to monitor for cancellation requests. The default is . + /// The response messages generated by the client. + public static Task CompleteAsync( + this IChatClient client, + ChatMessage chatMessage, + ChatOptions? options = null, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(client); + _ = Throw.IfNull(chatMessage); + + return client.CompleteAsync([chatMessage], options, cancellationToken); } /// Sends a user chat text message to the model and streams the response messages. @@ -60,6 +78,24 @@ public static IAsyncEnumerable CompleteStreamingA _ = Throw.IfNull(client); _ = Throw.IfNull(chatMessage); - return client.CompleteStreamingAsync([new ChatMessage(ChatRole.User, chatMessage)], options, cancellationToken); + return client.CompleteStreamingAsync(new ChatMessage(ChatRole.User, chatMessage), options, cancellationToken); + } + + /// Sends a chat message to the model and streams the response messages. + /// The chat client. + /// The chat message to send. + /// The chat options to configure the request. + /// The to monitor for cancellation requests. The default is . + /// The response messages generated by the client. + public static IAsyncEnumerable CompleteStreamingAsync( + this IChatClient client, + ChatMessage chatMessage, + ChatOptions? options = null, + CancellationToken cancellationToken = default) + { + _ = Throw.IfNull(client); + _ = Throw.IfNull(chatMessage); + + return client.CompleteStreamingAsync([chatMessage], options, cancellationToken); } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs index f3d3621aa69..69adc8392fd 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatOptions.cs @@ -51,7 +51,8 @@ public class ChatOptions public IList? StopSequences { get; set; } /// Gets or sets the tool mode for the chat request. - public ChatToolMode ToolMode { get; set; } = ChatToolMode.Auto; + /// The default value is , which is treated the same as . + public ChatToolMode? ToolMode { get; set; } /// Gets or sets the list of tools to include with a chat request. [JsonIgnore] diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatToolMode.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatToolMode.cs index 0e279042abd..05e1f28f476 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatToolMode.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatToolMode.cs @@ -9,10 +9,11 @@ namespace Microsoft.Extensions.AI; /// Describes how tools should be selected by a . /// /// -/// The predefined values and are provided. +/// The predefined values , , and are provided. /// To nominate a specific function, use . /// [JsonPolymorphic(TypeDiscriminatorPropertyName = "$type")] +[JsonDerivedType(typeof(NoneChatToolMode), typeDiscriminator: "none")] [JsonDerivedType(typeof(AutoChatToolMode), typeDiscriminator: "auto")] [JsonDerivedType(typeof(RequiredChatToolMode), typeDiscriminator: "required")] #pragma warning disable CA1052 // Static holder types should be Static or NotInheritable @@ -32,7 +33,19 @@ private protected ChatToolMode() /// can contain zero or more /// instances, and the is free to invoke zero or more of them. /// - public static AutoChatToolMode Auto { get; } = new AutoChatToolMode(); + public static AutoChatToolMode Auto { get; } = new(); + + /// + /// Gets a predefined indicating that tool usage is unsupported. + /// + /// + /// can contain zero or more + /// instances, but the should not request the invocation of + /// any of them. This can be used when the should know about + /// tools in order to provide information about them or plan out their usage, but should + /// not request the invocation of any of them. + /// + public static NoneChatToolMode None { get; } = new(); /// /// Gets a predefined indicating that tool usage is required, diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs index 941ffeb722b..875aa31e87e 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/DelegatingChatClient.cs @@ -37,19 +37,6 @@ public void Dispose() /// Gets the inner . protected IChatClient InnerClient { get; } - /// Provides a mechanism for releasing unmanaged resources. - /// if being called from ; otherwise, . - protected virtual void Dispose(bool disposing) - { - if (disposing) - { - InnerClient.Dispose(); - } - } - - /// - public virtual ChatClientMetadata Metadata => InnerClient.Metadata; - /// public virtual Task CompleteAsync(IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) { @@ -72,4 +59,14 @@ public virtual IAsyncEnumerable CompleteStreaming serviceKey is null && serviceType.IsInstanceOfType(this) ? this : InnerClient.GetService(serviceType, serviceKey); } + + /// Provides a mechanism for releasing unmanaged resources. + /// if being called from ; otherwise, . + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + InnerClient.Dispose(); + } + } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs index 54e1dd9da98..5d86b1fd985 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/IChatClient.cs @@ -53,9 +53,6 @@ IAsyncEnumerable CompleteStreamingAsync( ChatOptions? options = null, CancellationToken cancellationToken = default); - /// Gets metadata that describes the . - ChatClientMetadata Metadata { get; } - /// Asks the for an object of the specified type . /// The type of object being requested. /// An optional key that can be used to help identify the target service. diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/NoneChatToolMode.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/NoneChatToolMode.cs new file mode 100644 index 00000000000..6c326504a47 --- /dev/null +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/NoneChatToolMode.cs @@ -0,0 +1,28 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Diagnostics; + +namespace Microsoft.Extensions.AI; + +/// +/// Indicates that an should not request the invocation of any tools. +/// +/// +/// Use to get an instance of . +/// +[DebuggerDisplay("None")] +public sealed class NoneChatToolMode : ChatToolMode +{ + /// Initializes a new instance of the class. + /// Use to get an instance of . + public NoneChatToolMode() + { + } // must exist in support of polymorphic deserialization of a ChatToolMode + + /// + public override bool Equals(object? obj) => obj is NoneChatToolMode; + + /// + public override int GetHashCode() => typeof(NoneChatToolMode).GetHashCode(); +} diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/DataContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/DataContent.cs index 1e158a8801b..4a386d062d7 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/DataContent.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/DataContent.cs @@ -172,22 +172,13 @@ public string Uri [JsonPropertyOrder(1)] public string? MediaType { get; private set; } - /// - /// Gets a value indicating whether the content contains data rather than only being a reference to data. - /// - /// - /// If the instance is constructed from a or from a data URI, this property returns , - /// as the instance actually contains all of the data it represents. If, however, the instance was constructed from another form of URI, one - /// that simply references where the data can be found but doesn't actually contain the data, this property returns . - /// - [MemberNotNullWhen(true, nameof(Data))] - [JsonIgnore] - public bool ContainsData => _dataUri is not null || _data is not null; - /// Gets the data represented by this instance. /// - /// If is , this property returns the represented data. - /// If is , this property returns . + /// If the instance was constructed from a , this property returns that data. + /// If the instance was constructed from a data URI, this property the data contained within the data URI. + /// If, however, the instance was constructed from another form of URI, one that simply references where the + /// data can be found but doesn't actually contain the data, this property returns ; + /// no attempt is made to retrieve the data from that URI. /// [JsonIgnore] public ReadOnlyMemory? Data diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionCallContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionCallContent.cs index b42c41e7cc8..6facf2e297c 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionCallContent.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionCallContent.cs @@ -25,20 +25,20 @@ public sealed class FunctionCallContent : AIContent [JsonConstructor] public FunctionCallContent(string callId, string name, IDictionary? arguments = null) { + CallId = Throw.IfNull(callId); Name = Throw.IfNull(name); - CallId = callId; Arguments = arguments; } /// - /// Gets or sets the function call ID. + /// Gets the function call ID. /// - public string CallId { get; set; } + public string CallId { get; } /// - /// Gets or sets the name of the function requested. + /// Gets the name of the function requested. /// - public string Name { get; set; } + public string Name { get; } /// /// Gets or sets the arguments requested to be provided to the function. diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionResultContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionResultContent.cs index 2c9778e1d03..e85b59d3536 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionResultContent.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/FunctionResultContent.cs @@ -19,33 +19,26 @@ public sealed class FunctionResultContent : AIContent /// Initializes a new instance of the class. /// /// The function call ID for which this is the result. - /// The function name that produced the result. /// /// if the function returned or was void-returning /// and thus had no result, or if the function call failed. Typically, however, to provide meaningfully representative /// information to an AI service, a human-readable representation of those conditions should be supplied. /// [JsonConstructor] - public FunctionResultContent(string callId, string name, object? result) + public FunctionResultContent(string callId, object? result) { CallId = Throw.IfNull(callId); - Name = Throw.IfNull(name); Result = result; } /// - /// Gets or sets the ID of the function call for which this is the result. + /// Gets the ID of the function call for which this is the result. /// /// /// If this is the result for a , this property should contain the same /// value. /// - public string CallId { get; set; } - - /// - /// Gets or sets the name of the function that was called. - /// - public string Name { get; set; } + public string CallId { get; } /// /// Gets or sets the result of the function call, or a generic error message if the function call failed. diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/DelegatingEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/DelegatingEmbeddingGenerator.cs index 7edbe7cf5bd..f1a4c3aa7a2 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/DelegatingEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/DelegatingEmbeddingGenerator.cs @@ -40,20 +40,6 @@ public void Dispose() GC.SuppressFinalize(this); } - /// Provides a mechanism for releasing unmanaged resources. - /// if being called from ; otherwise, . - protected virtual void Dispose(bool disposing) - { - if (disposing) - { - InnerGenerator.Dispose(); - } - } - - /// - public virtual EmbeddingGeneratorMetadata Metadata => - InnerGenerator.Metadata; - /// public virtual Task> GenerateAsync(IEnumerable values, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default) => InnerGenerator.GenerateAsync(values, options, cancellationToken); @@ -68,4 +54,14 @@ public virtual Task> GenerateAsync(IEnumerableProvides a mechanism for releasing unmanaged resources. + /// if being called from ; otherwise, . + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + InnerGenerator.Dispose(); + } + } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/EmbeddingGeneratorExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/EmbeddingGeneratorExtensions.cs index 1593cdd33a8..1165d299edf 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/EmbeddingGeneratorExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/EmbeddingGeneratorExtensions.cs @@ -54,7 +54,7 @@ public static class EmbeddingGeneratorExtensions /// Generates an embedding vector from the specified . /// The type from which embeddings will be generated. - /// The numeric type of the embedding data. + /// The numeric type of the embedding data. /// The embedding generator. /// A value from which an embedding will be generated. /// The embedding generation options to configure the request. @@ -64,8 +64,8 @@ public static class EmbeddingGeneratorExtensions /// This operation is equivalent to using and returning the /// resulting 's property. /// - public static async Task> GenerateEmbeddingVectorAsync( - this IEmbeddingGenerator> generator, + public static async Task> GenerateEmbeddingVectorAsync( + this IEmbeddingGenerator> generator, TInput value, EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default) diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/IEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/IEmbeddingGenerator.cs index 84d02c0de34..74913b87e8a 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/IEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Embeddings/IEmbeddingGenerator.cs @@ -37,9 +37,6 @@ Task> GenerateAsync( EmbeddingGenerationOptions? options = null, CancellationToken cancellationToken = default); - /// Gets metadata that describes the . - EmbeddingGeneratorMetadata Metadata { get; } - /// Asks the for an object of the specified type . /// The type of object being requested. /// An optional key that can be used to help identify the target service. diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md b/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md index 338a34e0f1c..ea0d6d9f74f 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/README.md @@ -40,10 +40,10 @@ using Microsoft.Extensions.AI; public class SampleChatClient : IChatClient { - public ChatClientMetadata Metadata { get; } + private readonly ChatClientMetadata _metadata; public SampleChatClient(Uri endpoint, string modelId) => - Metadata = new("SampleChatClient", endpoint, modelId); + _metadata = new("SampleChatClient", endpoint, modelId); public async Task CompleteAsync( IList chatMessages, @@ -61,11 +61,11 @@ public class SampleChatClient : IChatClient "This is yet another response message." ]; - return new([new ChatMessage() + return new(new ChatMessage() { Role = ChatRole.Assistant, Text = responses[Random.Shared.Next(responses.Length)], - }]); + }); } public async IAsyncEnumerable CompleteStreamingAsync( @@ -89,9 +89,12 @@ public class SampleChatClient : IChatClient } } - public TService? GetService(object? key = null) where TService : class => - this as TService; - + object? IChatClient.GetService(Type serviceType, object? serviceKey = null) => + serviceKey is not null ? null : + serviceType == typeof(ChatClientMetadata) ? _metadata : + serviceType?.IsInstanceOfType(this) is true ? this : + null; + void IDisposable.Dispose() { } } ``` @@ -446,7 +449,7 @@ using Microsoft.Extensions.AI; public class SampleEmbeddingGenerator(Uri endpoint, string modelId) : IEmbeddingGenerator> { - public EmbeddingGeneratorMetadata Metadata { get; } = new("SampleEmbeddingGenerator", endpoint, modelId); + private readonly EmbeddingGeneratorMetadata _metadata = new("SampleEmbeddingGenerator", endpoint, modelId); public async Task>> GenerateAsync( IEnumerable values, @@ -463,8 +466,11 @@ public class SampleEmbeddingGenerator(Uri endpoint, string modelId) : IEmbedding Enumerable.Range(0, 384).Select(_ => Random.Shared.NextSingle()).ToArray())); } - public TService? GetService(object? key = null) where TService : class => - this as TService; + object? IChatClient.GetService(Type serviceType, object? serviceKey = null) => + serviceKey is not null ? null : + serviceType == typeof(EmbeddingGeneratorMetadata) ? _metadata : + serviceType?.IsInstanceOfType(this) is true ? this : + null; void IDisposable.Dispose() { } } diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/UsageDetails.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/UsageDetails.cs index c3b84d47bf8..7d4e7ddbea2 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/UsageDetails.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/UsageDetails.cs @@ -13,13 +13,13 @@ namespace Microsoft.Extensions.AI; public class UsageDetails { /// Gets or sets the number of tokens in the input. - public int? InputTokenCount { get; set; } + public long? InputTokenCount { get; set; } /// Gets or sets the number of tokens in the output. - public int? OutputTokenCount { get; set; } + public long? OutputTokenCount { get; set; } /// Gets or sets the total number of tokens used to produce the response. - public int? TotalTokenCount { get; set; } + public long? TotalTokenCount { get; set; } /// Gets or sets a dictionary of additional usage counts. /// @@ -62,17 +62,17 @@ internal string DebuggerDisplay { List parts = []; - if (InputTokenCount is int input) + if (InputTokenCount is { } input) { parts.Add($"{nameof(InputTokenCount)} = {input}"); } - if (OutputTokenCount is int output) + if (OutputTokenCount is { } output) { parts.Add($"{nameof(OutputTokenCount)} = {output}"); } - if (TotalTokenCount is int total) + if (TotalTokenCount is { } total) { parts.Add($"{nameof(TotalTokenCount)} = {total}"); } @@ -89,5 +89,5 @@ internal string DebuggerDisplay } } - private static int? NullableSum(int? a, int? b) => (a.HasValue || b.HasValue) ? (a ?? 0) + (b ?? 0) : null; + private static long? NullableSum(long? a, long? b) => (a.HasValue || b.HasValue) ? (a ?? 0) + (b ?? 0) : null; } diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs index 511d9426c39..137b2d80e70 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs @@ -24,6 +24,9 @@ namespace Microsoft.Extensions.AI; /// Represents an for an Azure AI Inference . public sealed class AzureAIInferenceChatClient : IChatClient { + /// Metadata about the client. + private readonly ChatClientMetadata _metadata; + /// The underlying . private readonly ChatCompletionsClient _chatCompletionsClient; @@ -50,7 +53,7 @@ public AzureAIInferenceChatClient(ChatCompletionsClient chatCompletionsClient, s var providerUrl = typeof(ChatCompletionsClient).GetField("_endpoint", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) ?.GetValue(chatCompletionsClient) as Uri; - Metadata = new("az.ai.inference", providerUrl, modelId); + _metadata = new("az.ai.inference", providerUrl, modelId); } /// Gets or sets to use for any serialization activities related to tool call arguments and results. @@ -61,16 +64,14 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions } /// - public ChatClientMetadata Metadata { get; } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) + object? IChatClient.GetService(Type serviceType, object? serviceKey) { _ = Throw.IfNull(serviceType); return serviceKey is not null ? null : serviceType == typeof(ChatCompletionsClient) ? _chatCompletionsClient : + serviceType == typeof(ChatClientMetadata) ? _metadata : serviceType.IsInstanceOfType(this) ? this : null; } @@ -288,7 +289,7 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, { ChatCompletionsOptions result = new(ToAzureAIInferenceChatMessages(chatContents)) { - Model = options?.ModelId ?? Metadata.ModelId ?? throw new InvalidOperationException("No model id was provided when either constructing the client or in the chat options.") + Model = options?.ModelId ?? _metadata.ModelId ?? throw new InvalidOperationException("No model id was provided when either constructing the client or in the chat options.") }; if (options is not null) @@ -345,7 +346,12 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, switch (options.ToolMode) { + case NoneChatToolMode: + result.ToolChoice = ChatCompletionsToolChoice.None; + break; + case AutoChatToolMode: + case null: result.ToolChoice = ChatCompletionsToolChoice.Auto; break; @@ -374,8 +380,8 @@ private ChatCompletionsOptions ToAzureAIOptions(IList chatContents, private static ChatCompletionsToolDefinition ToAzureAIChatTool(AIFunction aiFunction) { // Map to an intermediate model so that redundant properties are skipped. - AzureAIChatToolJson tool = JsonSerializer.Deserialize(aiFunction.Metadata.Schema, JsonContext.Default.AzureAIChatToolJson)!; - BinaryData functionParameters = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool, JsonContext.Default.AzureAIChatToolJson)); + var tool = JsonSerializer.Deserialize(aiFunction.Metadata.Schema, JsonContext.Default.AzureAIChatToolJson)!; + var functionParameters = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool, JsonContext.Default.AzureAIChatToolJson)); return new(new FunctionDefinition(aiFunction.Metadata.Name) { Description = aiFunction.Metadata.Description, @@ -462,7 +468,7 @@ private static List GetContentParts(IList con break; case DataContent dataContent when dataContent.MediaTypeStartsWith("image/"): - if (dataContent.ContainsData) + if (dataContent.Data.HasValue) { parts.Add(new ChatMessageImageContentItem(BinaryData.FromBytes(dataContent.Data.Value), dataContent.MediaType)); } diff --git a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs index 295b45627e8..17bd4fa4662 100644 --- a/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceEmbeddingGenerator.cs @@ -25,6 +25,9 @@ namespace Microsoft.Extensions.AI; public sealed class AzureAIInferenceEmbeddingGenerator : IEmbeddingGenerator> { + /// Metadata about the embedding generator. + private readonly EmbeddingGeneratorMetadata _metadata; + /// The underlying . private readonly EmbeddingsClient _embeddingsClient; @@ -63,20 +66,18 @@ public AzureAIInferenceEmbeddingGenerator( var providerUrl = typeof(EmbeddingsClient).GetField("_endpoint", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) ?.GetValue(embeddingsClient) as Uri; - Metadata = new("az.ai.inference", providerUrl, modelId, dimensions); + _metadata = new("az.ai.inference", providerUrl, modelId, dimensions); } /// - public EmbeddingGeneratorMetadata Metadata { get; } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) + object? IEmbeddingGenerator>.GetService(Type serviceType, object? serviceKey) { _ = Throw.IfNull(serviceType); return serviceKey is not null ? null : serviceType == typeof(EmbeddingsClient) ? _embeddingsClient : + serviceType == typeof(EmbeddingGeneratorMetadata) ? _metadata : serviceType.IsInstanceOfType(this) ? this : null; } @@ -163,7 +164,7 @@ private EmbeddingsOptions ToAzureAIOptions(IEnumerable inputs, Embedding EmbeddingsOptions result = new(inputs) { Dimensions = options?.Dimensions ?? _dimensions, - Model = options?.ModelId ?? Metadata.ModelId, + Model = options?.ModelId ?? _metadata.ModelId, EncodingFormat = format, }; diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs index 4038ac8e761..450b83bd6a2 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaChatClient.cs @@ -16,6 +16,7 @@ #pragma warning disable EA0011 // Consider removing unnecessary conditional access operator (?) #pragma warning disable SA1204 // Static elements should appear before instance elements +#pragma warning disable S3358 // Ternary operators should not be nested namespace Microsoft.Extensions.AI; @@ -24,6 +25,9 @@ public sealed class OllamaChatClient : IChatClient { private static readonly JsonElement _schemalessJsonResponseFormatValue = JsonDocument.Parse("\"json\"").RootElement; + /// Metadata about the client. + private readonly ChatClientMetadata _metadata; + /// The api/chat endpoint URI. private readonly Uri _apiChatEndpoint; @@ -62,11 +66,9 @@ public OllamaChatClient(Uri endpoint, string? modelId = null, HttpClient? httpCl _apiChatEndpoint = new Uri(endpoint, "api/chat"); _httpClient = httpClient ?? OllamaUtilities.SharedClient; - Metadata = new("ollama", endpoint, modelId); - } - /// - public ChatClientMetadata Metadata { get; } + _metadata = new("ollama", endpoint, modelId); + } /// Gets or sets to use for any serialization activities related to tool call arguments and results. public JsonSerializerOptions ToolCallJsonSerializerOptions @@ -103,7 +105,7 @@ public async Task CompleteAsync(IList chatMessages, return new([FromOllamaMessage(response.Message!)]) { CompletionId = response.CreatedAt, - ModelId = response.Model ?? options?.ModelId ?? Metadata.ModelId, + ModelId = response.Model ?? options?.ModelId ?? _metadata.ModelId, CreatedAt = DateTimeOffset.TryParse(response.CreatedAt, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTimeOffset createdAt) ? createdAt : null, FinishReason = ToFinishReason(response), Usage = ParseOllamaChatResponseUsage(response), @@ -148,7 +150,7 @@ public async IAsyncEnumerable CompleteStreamingAs continue; } - string? modelId = chunk.Model ?? Metadata.ModelId; + string? modelId = chunk.Model ?? _metadata.ModelId; StreamingChatCompletionUpdate update = new() { @@ -189,12 +191,14 @@ public async IAsyncEnumerable CompleteStreamingAs } /// - public object? GetService(Type serviceType, object? serviceKey = null) + object? IChatClient.GetService(Type serviceType, object? serviceKey) { _ = Throw.IfNull(serviceType); return - serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + serviceKey is not null ? null : + serviceType == typeof(ChatClientMetadata) ? _metadata : + serviceType.IsInstanceOfType(this) ? this : null; } @@ -292,9 +296,9 @@ private OllamaChatRequest ToOllamaChatRequest(IList chatMessages, C { Format = ToOllamaChatResponseFormat(options?.ResponseFormat), Messages = chatMessages.SelectMany(ToOllamaChatRequestMessages).ToArray(), - Model = options?.ModelId ?? Metadata.ModelId ?? string.Empty, + Model = options?.ModelId ?? _metadata.ModelId ?? string.Empty, Stream = stream, - Tools = options?.Tools is { Count: > 0 } tools ? tools.OfType().Select(ToOllamaTool) : null, + Tools = options?.ToolMode is not NoneChatToolMode && options?.Tools is { Count: > 0 } tools ? tools.OfType().Select(ToOllamaTool) : null, }; if (options is not null) @@ -385,7 +389,7 @@ private IEnumerable ToOllamaChatRequestMessages(ChatMe OllamaChatRequestMessage? currentTextMessage = null; foreach (var item in content.Contents) { - if (item is DataContent { ContainsData: true } dataContent && dataContent.MediaTypeStartsWith("image/")) + if (item is DataContent dataContent && dataContent.MediaTypeStartsWith("image/") && dataContent.Data.HasValue) { IList images = currentTextMessage?.Images ?? []; images.Add(Convert.ToBase64String(dataContent.Data.Value diff --git a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs index ac5bd608dc7..3d869f3f278 100644 --- a/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.Ollama/OllamaEmbeddingGenerator.cs @@ -10,11 +10,16 @@ using System.Threading.Tasks; using Microsoft.Shared.Diagnostics; +#pragma warning disable S3358 // Ternary operators should not be nested + namespace Microsoft.Extensions.AI; /// Represents an for Ollama. public sealed class OllamaEmbeddingGenerator : IEmbeddingGenerator> { + /// Metadata about the embedding generator. + private readonly EmbeddingGeneratorMetadata _metadata; + /// The api/embeddings endpoint URI. private readonly Uri _apiEmbeddingsEndpoint; @@ -50,19 +55,18 @@ public OllamaEmbeddingGenerator(Uri endpoint, string? modelId = null, HttpClient _apiEmbeddingsEndpoint = new Uri(endpoint, "api/embed"); _httpClient = httpClient ?? OllamaUtilities.SharedClient; - Metadata = new("ollama", endpoint, modelId); + _metadata = new("ollama", endpoint, modelId); } /// - public EmbeddingGeneratorMetadata Metadata { get; } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) + object? IEmbeddingGenerator>.GetService(Type serviceType, object? serviceKey) { _ = Throw.IfNull(serviceType); return - serviceKey is null && serviceType.IsInstanceOfType(this) ? this : + serviceKey is not null ? null : + serviceType == typeof(EmbeddingGeneratorMetadata) ? _metadata : + serviceType.IsInstanceOfType(this) ? this : null; } @@ -83,7 +87,7 @@ public async Task>> GenerateAsync( // Create request. string[] inputs = values.ToArray(); - string? requestModel = options?.ModelId ?? Metadata.ModelId; + string? requestModel = options?.ModelId ?? _metadata.ModelId; var request = new OllamaEmbeddingRequest { Model = requestModel ?? string.Empty, diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs index d0ec35d1e22..61f4fedcb46 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs @@ -24,6 +24,9 @@ public sealed class OpenAIChatClient : IChatClient /// Default OpenAI endpoint. private static readonly Uri _defaultOpenAIEndpoint = new("https://api.openai.com/v1"); + /// Metadata about the client. + private readonly ChatClientMetadata _metadata; + /// The underlying . private readonly OpenAIClient? _openAIClient; @@ -51,7 +54,7 @@ public OpenAIChatClient(OpenAIClient openAIClient, string modelId) Uri providerUrl = typeof(OpenAIClient).GetField("_endpoint", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) ?.GetValue(openAIClient) as Uri ?? _defaultOpenAIEndpoint; - Metadata = new("openai", providerUrl, modelId); + _metadata = new("openai", providerUrl, modelId); } /// Initializes a new instance of the class for the specified . @@ -71,7 +74,7 @@ public OpenAIChatClient(ChatClient chatClient) string? model = typeof(ChatClient).GetField("_model", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) ?.GetValue(chatClient) as string; - Metadata = new("openai", providerUrl, model); + _metadata = new("openai", providerUrl, model); } /// Gets or sets to use for any serialization activities related to tool call arguments and results. @@ -82,15 +85,13 @@ public JsonSerializerOptions ToolCallJsonSerializerOptions } /// - public ChatClientMetadata Metadata { get; } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) + object? IChatClient.GetService(Type serviceType, object? serviceKey) { _ = Throw.IfNull(serviceType); return serviceKey is not null ? null : + serviceType == typeof(ChatClientMetadata) ? _metadata : serviceType == typeof(OpenAIClient) ? _openAIClient : serviceType == typeof(ChatClient) ? _chatClient : serviceType.IsInstanceOfType(this) ? this : diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIEmbeddingGenerator.cs index 5c34a8028a2..55c887ba108 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIEmbeddingGenerator.cs @@ -22,6 +22,9 @@ public sealed class OpenAIEmbeddingGenerator : IEmbeddingGeneratorDefault OpenAI endpoint. private const string DefaultOpenAIEndpoint = "https://api.openai.com/v1"; + /// Metadata about the embedding generator. + private readonly EmbeddingGeneratorMetadata _metadata; + /// The underlying . private readonly OpenAIClient? _openAIClient; @@ -57,7 +60,7 @@ public OpenAIEmbeddingGenerator( ?.GetValue(openAIClient) as Uri)?.ToString() ?? DefaultOpenAIEndpoint; - Metadata = CreateMetadata("openai", providerUrl, modelId, dimensions); + _metadata = CreateMetadata("openai", providerUrl, modelId, dimensions); } /// Initializes a new instance of the class. @@ -85,27 +88,7 @@ public OpenAIEmbeddingGenerator(EmbeddingClient embeddingClient, int? dimensions FieldInfo? modelField = typeof(EmbeddingClient).GetField("_model", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); string? model = modelField?.GetValue(embeddingClient) as string; - Metadata = CreateMetadata("openai", providerUrl, model, dimensions); - } - - /// Creates the for this instance. - private static EmbeddingGeneratorMetadata CreateMetadata(string providerName, string providerUrl, string? model, int? dimensions) => - new(providerName, Uri.TryCreate(providerUrl, UriKind.Absolute, out Uri? providerUri) ? providerUri : null, model, dimensions); - - /// - public EmbeddingGeneratorMetadata Metadata { get; } - - /// - public object? GetService(Type serviceType, object? serviceKey = null) - { - _ = Throw.IfNull(serviceType); - - return - serviceKey is not null ? null : - serviceType == typeof(OpenAIClient) ? _openAIClient : - serviceType == typeof(EmbeddingClient) ? _embeddingClient : - serviceType.IsInstanceOfType(this) ? this : - null; + _metadata = CreateMetadata("openai", providerUrl, model, dimensions); } /// @@ -136,6 +119,24 @@ void IDisposable.Dispose() // Nothing to dispose. Implementation required for the IEmbeddingGenerator interface. } + /// + object? IEmbeddingGenerator>.GetService(Type serviceType, object? serviceKey) + { + _ = Throw.IfNull(serviceType); + + return + serviceKey is not null ? null : + serviceType == typeof(EmbeddingGeneratorMetadata) ? _metadata : + serviceType == typeof(OpenAIClient) ? _openAIClient : + serviceType == typeof(EmbeddingClient) ? _embeddingClient : + serviceType.IsInstanceOfType(this) ? this : + null; + } + + /// Creates the for this instance. + private static EmbeddingGeneratorMetadata CreateMetadata(string providerName, string providerUrl, string? model, int? dimensions) => + new(providerName, Uri.TryCreate(providerUrl, UriKind.Absolute, out Uri? providerUri) ? providerUri : null, model, dimensions); + /// Converts an extensions options instance to an OpenAI options instance. private OpenAI.Embeddings.EmbeddingGenerationOptions? ToOpenAIOptions(EmbeddingGenerationOptions? options) { diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs index 92e956da9e6..b5671232f8d 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs @@ -34,7 +34,7 @@ public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion c throw new NotSupportedException("Creating OpenAI ChatCompletion models with multiple choices is currently not supported."); } - List? toolCalls = null; + List? toolCalls = null; foreach (AIContent content in chatCompletion.Message.Contents) { if (content is FunctionCallContent callRequest) @@ -49,7 +49,7 @@ public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion c } } - OpenAI.Chat.ChatTokenUsage? chatTokenUsage = null; + ChatTokenUsage? chatTokenUsage = null; if (chatCompletion.Usage is UsageDetails usageDetails) { chatTokenUsage = ToOpenAIUsage(usageDetails); @@ -143,7 +143,7 @@ public static ChatCompletion FromOpenAIChatCompletion(OpenAI.Chat.ChatCompletion return completion; } - public static ChatOptions FromOpenAIOptions(OpenAI.Chat.ChatCompletionOptions? options) + public static ChatOptions FromOpenAIOptions(ChatCompletionOptions? options) { ChatOptions result = new(); @@ -220,6 +220,7 @@ public static ChatOptions FromOpenAIOptions(OpenAI.Chat.ChatCompletionOptions? o result.ToolMode = jsonElement.GetString() switch { "required" => ChatToolMode.RequireAny, + "none" => ChatToolMode.None, _ => ChatToolMode.Auto, }; @@ -239,7 +240,7 @@ public static ChatOptions FromOpenAIOptions(OpenAI.Chat.ChatCompletionOptions? o } /// Converts an extensions options instance to an OpenAI options instance. - public static OpenAI.Chat.ChatCompletionOptions ToOpenAIOptions(ChatOptions? options) + public static ChatCompletionOptions ToOpenAIOptions(ChatOptions? options) { ChatCompletionOptions result = new(); @@ -318,7 +319,12 @@ public static OpenAI.Chat.ChatCompletionOptions ToOpenAIOptions(ChatOptions? opt switch (options.ToolMode) { + case NoneChatToolMode: + result.ToolChoice = ChatToolChoice.CreateNoneChoice(); + break; + case AutoChatToolMode: + case null: result.ToolChoice = ChatToolChoice.CreateAutoChoice(); break; @@ -351,7 +357,7 @@ public static OpenAI.Chat.ChatCompletionOptions ToOpenAIOptions(ChatOptions? opt private static AITool FromOpenAIChatTool(ChatTool chatTool) { - AdditionalPropertiesDictionary additionalProperties = new(); + AdditionalPropertiesDictionary additionalProperties = []; if (chatTool.FunctionSchemaIsStrict is bool strictValue) { additionalProperties["Strict"] = strictValue; @@ -399,8 +405,8 @@ strictObj is bool strictValue ? strictValue : null; // Map to an intermediate model so that redundant properties are skipped. - OpenAIChatToolJson tool = JsonSerializer.Deserialize(aiFunction.Metadata.Schema, OpenAIJsonContext.Default.OpenAIChatToolJson)!; - BinaryData functionParameters = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool, OpenAIJsonContext.Default.OpenAIChatToolJson)); + var tool = JsonSerializer.Deserialize(aiFunction.Metadata.Schema, OpenAIJsonContext.Default.OpenAIChatToolJson)!; + var functionParameters = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool, OpenAIJsonContext.Default.OpenAIChatToolJson)); return ChatTool.CreateFunctionTool(aiFunction.Metadata.Name, aiFunction.Metadata.Description, functionParameters, strict); } @@ -411,7 +417,7 @@ private static UsageDetails FromOpenAIUsage(ChatTokenUsage tokenUsage) InputTokenCount = tokenUsage.InputTokenCount, OutputTokenCount = tokenUsage.OutputTokenCount, TotalTokenCount = tokenUsage.TotalTokenCount, - AdditionalCounts = new(), + AdditionalCounts = [], }; if (tokenUsage.InputTokenDetails is ChatInputTokenUsageDetails inputDetails) @@ -478,11 +484,17 @@ private static ChatTokenUsage ToOpenAIUsage(UsageDetails usageDetails) } return OpenAIChatModelFactory.ChatTokenUsage( - inputTokenCount: usageDetails.InputTokenCount ?? 0, - outputTokenCount: usageDetails.OutputTokenCount ?? 0, - totalTokenCount: usageDetails.TotalTokenCount ?? 0, + inputTokenCount: ToInt32Saturate(usageDetails.InputTokenCount), + outputTokenCount: ToInt32Saturate(usageDetails.OutputTokenCount), + totalTokenCount: ToInt32Saturate(usageDetails.TotalTokenCount), outputTokenDetails: outputTokenUsageDetails, inputTokenDetails: inputTokenUsageDetails); + + static int ToInt32Saturate(long? value) => + value is null ? 0 : + value > int.MaxValue ? int.MaxValue : + value < int.MinValue ? int.MinValue : + (int)value; } /// Converts an OpenAI role to an Extensions role. diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs index 96a71b83650..9294e2137e7 100644 --- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs +++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatMessage.cs @@ -32,8 +32,6 @@ public static IEnumerable FromOpenAIChatMessages(IEnumerable? functionCalls = null; - foreach (OpenAI.Chat.ChatMessage input in inputs) { switch (input) @@ -73,11 +71,10 @@ public static IEnumerable FromOpenAIChatMessages(IEnumerable FromOpenAIChatMessages(IEnumerable ToOpenAIChatContent(IList break; case DataContent dataContent when dataContent.MediaTypeStartsWith("image/"): - if (dataContent.ContainsData) + if (dataContent.Data.HasValue) { parts.Add(ChatMessageContentPart.CreateImagePart(BinaryData.FromBytes(dataContent.Data.Value), dataContent.MediaType)); } diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs index 0e5adb6d811..008ad280dc5 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/ChatClientStructuredOutputExtensions.cs @@ -67,7 +67,27 @@ public static Task> CompleteAsync( ChatOptions? options = null, bool? useNativeJsonSchema = null, CancellationToken cancellationToken = default) => - CompleteAsync(chatClient, [new ChatMessage(ChatRole.User, chatMessage)], options, useNativeJsonSchema, cancellationToken); + CompleteAsync(chatClient, new ChatMessage(ChatRole.User, chatMessage), options, useNativeJsonSchema, cancellationToken); + + /// Sends a chat message to the model, requesting a response matching the type . + /// The . + /// The chat message to send. + /// The chat options to configure the request. + /// + /// Optionally specifies whether to set a JSON schema on the . + /// This improves reliability if the underlying model supports native structured output with a schema, but may cause an error if the model does not support it. + /// If not specified, the underlying provider's default will be used. + /// + /// The to monitor for cancellation requests. The default is . + /// The response messages generated by the client. + /// The type of structured output to request. + public static Task> CompleteAsync( + this IChatClient chatClient, + ChatMessage chatMessage, + ChatOptions? options = null, + bool? useNativeJsonSchema = null, + CancellationToken cancellationToken = default) => + CompleteAsync(chatClient, [chatMessage], options, useNativeJsonSchema, cancellationToken); /// Sends a user chat text message to the model, requesting a response matching the type . /// The . @@ -89,7 +109,29 @@ public static Task> CompleteAsync( ChatOptions? options = null, bool? useNativeJsonSchema = null, CancellationToken cancellationToken = default) => - CompleteAsync(chatClient, [new ChatMessage(ChatRole.User, chatMessage)], serializerOptions, options, useNativeJsonSchema, cancellationToken); + CompleteAsync(chatClient, new ChatMessage(ChatRole.User, chatMessage), serializerOptions, options, useNativeJsonSchema, cancellationToken); + + /// Sends a chat message to the model, requesting a response matching the type . + /// The . + /// The chat message to send. + /// The JSON serialization options to use. + /// The chat options to configure the request. + /// + /// Optionally specifies whether to set a JSON schema on the . + /// This improves reliability if the underlying model supports native structured output with a schema, but may cause an error if the model does not support it. + /// If not specified, the underlying provider's default will be used. + /// + /// The to monitor for cancellation requests. The default is . + /// The response messages generated by the client. + /// The type of structured output to request. + public static Task> CompleteAsync( + this IChatClient chatClient, + ChatMessage chatMessage, + JsonSerializerOptions serializerOptions, + ChatOptions? options = null, + bool? useNativeJsonSchema = null, + CancellationToken cancellationToken = default) => + CompleteAsync(chatClient, [chatMessage], serializerOptions, options, useNativeJsonSchema, cancellationToken); /// Sends chat messages to the model, requesting a response matching the type . /// The . diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs index 73aa10f65b8..59716092b7a 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/FunctionInvokingChatClient.cs @@ -292,7 +292,7 @@ public override async Task CompleteAsync(IList chat case ContinueMode.Continue when options.ToolMode is RequiredChatToolMode: // We have to reset this after the first iteration, otherwise we'll be in an infinite loop. options = options.Clone(); - options.ToolMode = ChatToolMode.Auto; + options.ToolMode = null; break; case ContinueMode.AllowOneMoreRoundtrip: @@ -408,7 +408,7 @@ public override async IAsyncEnumerable CompleteSt case ContinueMode.Continue when options.ToolMode is RequiredChatToolMode: // We have to reset this after the first iteration, otherwise we'll be in an infinite loop. options = options.Clone(); - options.ToolMode = ChatToolMode.Auto; + options.ToolMode = null; break; case ContinueMode.AllowOneMoreRoundtrip: @@ -643,7 +643,7 @@ FunctionResultContent CreateFunctionResultContent(FunctionInvocationResult resul functionResult = message; } - return new FunctionResultContent(result.CallContent.CallId, result.CallContent.Name, functionResult) { Exception = result.Exception }; + return new FunctionResultContent(result.CallContent.CallId, functionResult) { Exception = result.Exception }; } } diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs index b816af150b7..9e8fb07c43f 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/LoggingChatClient.cs @@ -50,7 +50,7 @@ public override async Task CompleteAsync( { if (_logger.IsEnabled(LogLevel.Trace)) { - LogInvokedSensitive(nameof(CompleteAsync), AsJson(chatMessages), AsJson(options), AsJson(Metadata)); + LogInvokedSensitive(nameof(CompleteAsync), AsJson(chatMessages), AsJson(options), AsJson(this.GetService())); } else { @@ -96,7 +96,7 @@ public override async IAsyncEnumerable CompleteSt { if (_logger.IsEnabled(LogLevel.Trace)) { - LogInvokedSensitive(nameof(CompleteStreamingAsync), AsJson(chatMessages), AsJson(options), AsJson(Metadata)); + LogInvokedSensitive(nameof(CompleteStreamingAsync), AsJson(chatMessages), AsJson(options), AsJson(this.GetService())); } else { diff --git a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs index cf7df564e73..65905f8688b 100644 --- a/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs +++ b/src/Libraries/Microsoft.Extensions.AI/ChatCompletion/OpenTelemetryChatClient.cs @@ -55,11 +55,13 @@ public OpenTelemetryChatClient(IChatClient innerClient, ILogger? logger = null, _logger = logger ?? NullLogger.Instance; - ChatClientMetadata metadata = innerClient!.Metadata; - _modelId = metadata.ModelId; - _system = metadata.ProviderName; - _serverAddress = metadata.ProviderUri?.GetLeftPart(UriPartial.Path); - _serverPort = metadata.ProviderUri?.Port ?? 0; + if (innerClient!.GetService() is ChatClientMetadata metadata) + { + _modelId = metadata.ModelId; + _system = metadata.ProviderName; + _serverAddress = metadata.ProviderUri?.GetLeftPart(UriPartial.Path); + _serverPort = metadata.ProviderUri?.Port ?? 0; + } string name = string.IsNullOrEmpty(sourceName) ? OpenTelemetryConsts.DefaultSourceName : sourceName!; _activitySource = new(name); @@ -333,20 +335,20 @@ private void TraceCompletion( if (_tokenUsageHistogram.Enabled && completion?.Usage is { } usage) { - if (usage.InputTokenCount is int inputTokens) + if (usage.InputTokenCount is long inputTokens) { TagList tags = default; tags.Add(OpenTelemetryConsts.GenAI.Token.Type, "input"); AddMetricTags(ref tags, requestModelId, completion); - _tokenUsageHistogram.Record(inputTokens); + _tokenUsageHistogram.Record((int)inputTokens); } - if (usage.OutputTokenCount is int outputTokens) + if (usage.OutputTokenCount is long outputTokens) { TagList tags = default; tags.Add(OpenTelemetryConsts.GenAI.Token.Type, "output"); AddMetricTags(ref tags, requestModelId, completion); - _tokenUsageHistogram.Record(outputTokens); + _tokenUsageHistogram.Record((int)outputTokens); } } @@ -380,14 +382,14 @@ private void TraceCompletion( _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.Model, completion.ModelId); } - if (completion.Usage?.InputTokenCount is int inputTokens) + if (completion.Usage?.InputTokenCount is long inputTokens) { - _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.InputTokens, inputTokens); + _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.InputTokens, (int)inputTokens); } - if (completion.Usage?.OutputTokenCount is int outputTokens) + if (completion.Usage?.OutputTokenCount is long outputTokens) { - _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.OutputTokens, outputTokens); + _ = activity.AddTag(OpenTelemetryConsts.GenAI.Response.OutputTokens, (int)outputTokens); } if (_system is not null) diff --git a/src/Libraries/Microsoft.Extensions.AI/Embeddings/LoggingEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Embeddings/LoggingEmbeddingGenerator.cs index 87757849b2e..24770df1052 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Embeddings/LoggingEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Embeddings/LoggingEmbeddingGenerator.cs @@ -52,7 +52,7 @@ public override async Task> GenerateAsync(IEnume { if (_logger.IsEnabled(LogLevel.Trace)) { - LogInvokedSensitive(AsJson(values), AsJson(options), AsJson(Metadata)); + LogInvokedSensitive(AsJson(values), AsJson(options), AsJson(this.GetService())); } else { diff --git a/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs b/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs index 497e87f2d7a..8ef13ce368b 100644 --- a/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs +++ b/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGenerator.cs @@ -50,13 +50,15 @@ public OpenTelemetryEmbeddingGenerator(IEmbeddingGenerator i { Debug.Assert(innerGenerator is not null, "Should have been validated by the base ctor."); - EmbeddingGeneratorMetadata metadata = innerGenerator!.Metadata; - _system = metadata.ProviderName; - _modelId = metadata.ModelId; - _modelProvider = metadata.ProviderName; - _endpointAddress = metadata.ProviderUri?.GetLeftPart(UriPartial.Path); - _endpointPort = metadata.ProviderUri?.Port ?? 0; - _dimensions = metadata.Dimensions; + if (innerGenerator!.GetService() is EmbeddingGeneratorMetadata metadata) + { + _system = metadata.ProviderName; + _modelId = metadata.ModelId; + _modelProvider = metadata.ProviderName; + _endpointAddress = metadata.ProviderUri?.GetLeftPart(UriPartial.Path); + _endpointPort = metadata.ProviderUri?.Port ?? 0; + _dimensions = metadata.Dimensions; + } string name = string.IsNullOrEmpty(sourceName) ? OpenTelemetryConsts.DefaultSourceName : sourceName!; _activitySource = new(name); @@ -187,9 +189,9 @@ private void TraceCompletion( if (embeddings is not null) { responseModelId = embeddings.FirstOrDefault()?.ModelId; - if (embeddings.Usage?.InputTokenCount is int i) + if (embeddings.Usage?.InputTokenCount is long i) { - inputTokens = inputTokens.GetValueOrDefault() + i; + inputTokens = inputTokens.GetValueOrDefault() + (int)i; } } diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs index 3732e80503f..41e92f1ad66 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatClientExtensionsTests.cs @@ -27,7 +27,7 @@ public void CompleteAsync_InvalidArgs_Throws() Assert.Throws("chatMessage", () => { - _ = ChatClientExtensions.CompleteAsync(new TestChatClient(), null!); + _ = ChatClientExtensions.CompleteAsync(new TestChatClient(), (ChatMessage)null!); }); } @@ -41,7 +41,7 @@ public void CompleteStreamingAsync_InvalidArgs_Throws() Assert.Throws("chatMessage", () => { - _ = ChatClientExtensions.CompleteStreamingAsync(new TestChatClient(), null!); + _ = ChatClientExtensions.CompleteStreamingAsync(new TestChatClient(), (ChatMessage)null!); }); } diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatCompletionTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatCompletionTests.cs index 7ff4d781c98..261682ab801 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatCompletionTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatCompletionTests.cs @@ -249,7 +249,7 @@ public void ToStreamingChatCompletionUpdates_MultiChoice() new ChatMessage(ChatRole.System, [ new FunctionCallContent("call123", "name"), - new FunctionResultContent("call123", "name", 42), + new FunctionResultContent("call123", 42), ]) { AdditionalProperties = new() { ["choice2Key"] = "choice2Value" }, diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatMessageTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatMessageTests.cs index 17ccb373e75..b67fb1de4a5 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatMessageTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatMessageTests.cs @@ -129,7 +129,7 @@ public void Text_GetSet_UsesFirstTextContent() new FunctionCallContent("callId1", "fc1"), new TextContent("text-1"), new TextContent("text-2"), - new FunctionResultContent("callId1", "fc2", "result"), + new FunctionResultContent("callId1", "result"), ]); TextContent textContent = Assert.IsType(message.Contents[3]); @@ -278,7 +278,7 @@ public void ItCanBeSerializeAndDeserialized() AdditionalProperties = new() { ["metadata-key-4"] = "metadata-value-4" } }, new FunctionCallContent("function-id", "plugin-name-function-name", new Dictionary { ["parameter"] = "argument" }), - new FunctionResultContent("function-id", "plugin-name-function-name", "function-result"), + new FunctionResultContent("function-id", "function-result"), ]; // Act diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs index 349623d7b08..4e3ceadd793 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatOptionsTests.cs @@ -23,7 +23,7 @@ public void Constructor_Parameterless_PropsDefaulted() Assert.Null(options.ResponseFormat); Assert.Null(options.ModelId); Assert.Null(options.StopSequences); - Assert.Same(ChatToolMode.Auto, options.ToolMode); + Assert.Null(options.ToolMode); Assert.Null(options.Tools); Assert.Null(options.AdditionalProperties); @@ -38,7 +38,7 @@ public void Constructor_Parameterless_PropsDefaulted() Assert.Null(clone.ResponseFormat); Assert.Null(clone.ModelId); Assert.Null(clone.StopSequences); - Assert.Same(ChatToolMode.Auto, clone.ToolMode); + Assert.Null(clone.ToolMode); Assert.Null(clone.Tools); Assert.Null(clone.AdditionalProperties); } diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatToolModeTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatToolModeTests.cs index 7cdda8ef975..e0c00769277 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatToolModeTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/ChatToolModeTests.cs @@ -12,6 +12,7 @@ public class ChatToolModeTests public void Singletons_Idempotent() { Assert.Same(ChatToolMode.Auto, ChatToolMode.Auto); + Assert.Same(ChatToolMode.None, ChatToolMode.None); Assert.Same(ChatToolMode.RequireAny, ChatToolMode.RequireAny); } @@ -25,6 +26,13 @@ public void Equality_ComparersProduceExpectedResults() Assert.False(ChatToolMode.Auto.Equals(new RequiredChatToolMode("func"))); Assert.Equal(ChatToolMode.Auto.GetHashCode(), ChatToolMode.Auto.GetHashCode()); + Assert.True(ChatToolMode.None == ChatToolMode.None); + Assert.True(ChatToolMode.None.Equals(ChatToolMode.None)); + Assert.False(ChatToolMode.None.Equals(ChatToolMode.RequireAny)); + Assert.False(ChatToolMode.None.Equals(new RequiredChatToolMode(null))); + Assert.False(ChatToolMode.None.Equals(new RequiredChatToolMode("func"))); + Assert.Equal(ChatToolMode.None.GetHashCode(), ChatToolMode.None.GetHashCode()); + Assert.True(ChatToolMode.RequireAny == ChatToolMode.RequireAny); Assert.True(ChatToolMode.RequireAny.Equals(ChatToolMode.RequireAny)); Assert.False(ChatToolMode.RequireAny.Equals(ChatToolMode.Auto)); @@ -54,6 +62,16 @@ public void Serialization_AutoRoundtrips() Assert.Equal(ChatToolMode.Auto, result); } + [Fact] + public void Serialization_NoneRoundtrips() + { + string json = JsonSerializer.Serialize(ChatToolMode.None, TestJsonSerializerContext.Default.ChatToolMode); + Assert.Equal("""{"$type":"none"}""", json); + + ChatToolMode? result = JsonSerializer.Deserialize(json, TestJsonSerializerContext.Default.ChatToolMode); + Assert.Equal(ChatToolMode.None, result); + } + [Fact] public void Serialization_RequireAnyRoundtrips() { diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs index 8245452210c..a6b6e024681 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/DelegatingChatClientTests.cs @@ -17,15 +17,6 @@ public void RequiresInnerChatClient() Assert.Throws("innerClient", () => new NoOpDelegatingChatClient(null!)); } - [Fact] - public void MetadataDefaultsToInnerClient() - { - using var inner = new TestChatClient(); - using var delegating = new NoOpDelegatingChatClient(inner); - - Assert.Same(inner.Metadata, delegating.Metadata); - } - [Fact] public async Task ChatAsyncDefaultsToInnerClientAsync() { diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateTests.cs index 371d9c70bad..725d9cea57f 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/ChatCompletion/StreamingChatCompletionUpdateTests.cs @@ -96,7 +96,7 @@ public void Text_GetSet_UsesFirstTextContent() new FunctionCallContent("callId1", "fc1"), new TextContent("text-1"), new TextContent("text-2"), - new FunctionResultContent("callId1", "fc2", "result"), + new FunctionResultContent("callId1", "result"), ], }; diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/DataContentTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/DataContentTests.cs index dab6e7f3eed..dfa28373d48 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/DataContentTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/DataContentTests.cs @@ -142,7 +142,6 @@ public void Deserialize_MatchesExpectedData() Assert.NotNull(content.Data); Assert.Equal([0x01, 0x02, 0x03, 0x04], content.Data.Value.ToArray()); Assert.Equal("application/octet-stream", content.MediaType); - Assert.True(content.ContainsData); // Uri referenced content-only content = JsonSerializer.Deserialize("""{"mediaType":"application/octet-stream","uri":"http://localhost/"}""", TestJsonSerializerContext.Default.Options)!; @@ -150,7 +149,6 @@ public void Deserialize_MatchesExpectedData() Assert.Null(content.Data); Assert.Equal("http://localhost/", content.Uri); Assert.Equal("application/octet-stream", content.MediaType); - Assert.False(content.ContainsData); // Using extra metadata content = JsonSerializer.Deserialize(""" @@ -169,7 +167,6 @@ public void Deserialize_MatchesExpectedData() Assert.NotNull(content.Data); Assert.Equal([0x01, 0x02, 0x03, 0x04], content.Data.Value.ToArray()); Assert.Equal("text/plain", content.MediaType); - Assert.True(content.ContainsData); Assert.Equal("value", content.AdditionalProperties!["key"]!.ToString()); } diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionCallContentTests..cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionCallContentTests..cs index 82b1a518aca..747708602cd 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionCallContentTests..cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionCallContentTests..cs @@ -61,8 +61,6 @@ public void Constructor_PropsRoundtrip() Assert.Same(props, c.AdditionalProperties); Assert.Equal("callId1", c.CallId); - c.CallId = "id"; - Assert.Equal("id", c.CallId); Assert.Null(c.Arguments); AdditionalPropertiesDictionary args = new() { { "key", "value" } }; diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionResultContentTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionResultContentTests.cs index ef3382b430e..1542a4b823a 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionResultContentTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Contents/FunctionResultContentTests.cs @@ -12,9 +12,8 @@ public class FunctionResultContentTests [Fact] public void Constructor_PropsDefault() { - FunctionResultContent c = new("callId1", "functionName", null); + FunctionResultContent c = new("callId1", null); Assert.Equal("callId1", c.CallId); - Assert.Equal("functionName", c.Name); Assert.Null(c.RawRepresentation); Assert.Null(c.AdditionalProperties); Assert.Null(c.Result); @@ -24,10 +23,9 @@ public void Constructor_PropsDefault() [Fact] public void Constructor_String_PropsRoundtrip() { - FunctionResultContent c = new("id", "name", "result"); + FunctionResultContent c = new("id", "result"); Assert.Null(c.RawRepresentation); Assert.Null(c.AdditionalProperties); - Assert.Equal("name", c.Name); Assert.Equal("id", c.CallId); Assert.Equal("result", c.Result); Assert.Null(c.Exception); @@ -36,7 +34,7 @@ public void Constructor_String_PropsRoundtrip() [Fact] public void Constructor_PropsRoundtrip() { - FunctionResultContent c = new("callId1", "functionName", null); + FunctionResultContent c = new("callId1", null); Assert.Null(c.RawRepresentation); object raw = new(); @@ -49,8 +47,6 @@ public void Constructor_PropsRoundtrip() Assert.Same(props, c.AdditionalProperties); Assert.Equal("callId1", c.CallId); - c.CallId = "id"; - Assert.Equal("id", c.CallId); Assert.Null(c.Result); c.Result = "result"; @@ -66,7 +62,7 @@ public void Constructor_PropsRoundtrip() public void ItShouldBeSerializableAndDeserializable() { // Arrange - var sut = new FunctionResultContent("id", "p1-f1", "result"); + var sut = new FunctionResultContent("id", "result"); // Act var json = JsonSerializer.Serialize(sut, TestJsonSerializerContext.Default.Options); @@ -75,7 +71,6 @@ public void ItShouldBeSerializableAndDeserializable() // Assert Assert.NotNull(deserializedSut); - Assert.Equal(sut.Name, deserializedSut.Name); Assert.Equal(sut.CallId, deserializedSut.CallId); Assert.Equal(sut.Result, deserializedSut.Result?.ToString()); } @@ -84,7 +79,7 @@ public void ItShouldBeSerializableAndDeserializable() public void ItShouldBeSerializableAndDeserializableWithException() { // Arrange - var sut = new FunctionResultContent("callId1", "functionName", null) { Exception = new InvalidOperationException("hello") }; + var sut = new FunctionResultContent("callId1", null) { Exception = new InvalidOperationException("hello") }; // Act var json = JsonSerializer.Serialize(sut, TestJsonSerializerContext.Default.Options); @@ -92,7 +87,6 @@ public void ItShouldBeSerializableAndDeserializableWithException() // Assert Assert.NotNull(deserializedSut); - Assert.Equal(sut.Name, deserializedSut.Name); Assert.Equal(sut.CallId, deserializedSut.CallId); Assert.Equal(sut.Result, deserializedSut.Result?.ToString()); Assert.Null(deserializedSut.Exception); diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Embeddings/DelegatingEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Embeddings/DelegatingEmbeddingGeneratorTests.cs index 7ba6de333e0..ededd588383 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Embeddings/DelegatingEmbeddingGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Embeddings/DelegatingEmbeddingGeneratorTests.cs @@ -17,15 +17,6 @@ public void RequiresInnerService() Assert.Throws("innerGenerator", () => new NoOpDelegatingEmbeddingGenerator(null!)); } - [Fact] - public void MetadataDefaultsToInnerService() - { - using var inner = new TestEmbeddingGenerator(); - using var delegating = new NoOpDelegatingEmbeddingGenerator(inner); - - Assert.Same(inner.Metadata, delegating.Metadata); - } - [Fact] public async Task GenerateEmbeddingsDefaultsToInnerServiceAsync() { diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs index e0f8c7fe982..09723371191 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestChatClient.cs @@ -17,8 +17,6 @@ public TestChatClient() public IServiceProvider? Services { get; set; } - public ChatClientMetadata Metadata { get; set; } = new(); - public Func, ChatOptions?, CancellationToken, Task>? CompleteAsyncCallback { get; set; } public Func, ChatOptions?, CancellationToken, IAsyncEnumerable>? CompleteStreamingAsyncCallback { get; set; } diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestEmbeddingGenerator.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestEmbeddingGenerator.cs index 3908a5f8cca..e0d747cfc9d 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestEmbeddingGenerator.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestEmbeddingGenerator.cs @@ -15,8 +15,6 @@ public TestEmbeddingGenerator() GetServiceCallback = DefaultGetServiceCallback; } - public EmbeddingGeneratorMetadata Metadata { get; set; } = new(); - public Func, EmbeddingGenerationOptions?, CancellationToken, Task>>>? GenerateAsyncCallback { get; set; } public Func GetServiceCallback { get; set; } diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs index 599b4403fa9..1f306063085 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs @@ -63,9 +63,10 @@ public void AsChatClient_ProducesExpectedMetadata() ChatCompletionsClient client = new(endpoint, new AzureKeyCredential("key")); IChatClient chatClient = client.AsChatClient(model); - Assert.Equal("az.ai.inference", chatClient.Metadata.ProviderName); - Assert.Equal(endpoint, chatClient.Metadata.ProviderUri); - Assert.Equal(model, chatClient.Metadata.ModelId); + var metadata = chatClient.GetService(); + Assert.Equal("az.ai.inference", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); } [Fact] @@ -508,7 +509,7 @@ public async Task MultipleMessages_NonStreaming() new(ChatRole.Assistant, "hi, how are you?"), new(ChatRole.User, "i'm good. how are you?"), new(ChatRole.Assistant, [new FunctionCallContent("abcd123", "GetMood")]), - new(ChatRole.Tool, [new FunctionResultContent("abcd123", "GetMood", "happy")]), + new(ChatRole.Tool, [new FunctionResultContent("abcd123", "happy")]), ]; var response = await client.CompleteAsync(messages, new() @@ -672,6 +673,7 @@ public async Task NullAssistantText_ContentEmpty_NonStreaming() public static IEnumerable FunctionCallContent_NonStreaming_MemberData() { yield return [ChatToolMode.Auto]; + yield return [ChatToolMode.None]; yield return [ChatToolMode.RequireAny]; yield return [ChatToolMode.RequireSpecific("GetPersonAge")]; } @@ -709,6 +711,7 @@ public async Task FunctionCallContent_NonStreaming(ChatToolMode mode) } ], "tool_choice": {{( + mode is NoneChatToolMode ? "\"none\"" : mode is AutoChatToolMode ? "\"auto\"" : mode is RequiredChatToolMode { RequiredFunctionName: not null } f ? "{\"type\":\"function\",\"function\":{\"name\":\"GetPersonAge\"}}" : "\"required\"" diff --git a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs index d28ea111157..c092bc87ced 100644 --- a/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceEmbeddingGeneratorTests.cs @@ -49,9 +49,10 @@ public void AsEmbeddingGenerator_OpenAIClient_ProducesExpectedMetadata() EmbeddingsClient client = new(endpoint, new AzureKeyCredential("key")); IEmbeddingGenerator> embeddingGenerator = client.AsEmbeddingGenerator(model); - Assert.Equal("az.ai.inference", embeddingGenerator.Metadata.ProviderName); - Assert.Equal(endpoint, embeddingGenerator.Metadata.ProviderUri); - Assert.Equal(model, embeddingGenerator.Metadata.ModelId); + var metadata = embeddingGenerator.GetService(); + Assert.Equal("az.ai.inference", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); } [Fact] diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs index 3d43912aab6..6f8d0ddee29 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/ChatClientIntegrationTests.cs @@ -621,7 +621,7 @@ public virtual async Task OpenTelemetry_CanEmitTracesAndMetrics() var activity = Assert.Single(activities); Assert.StartsWith("chat", activity.DisplayName); Assert.StartsWith("http", (string)activity.GetTagItem("server.address")!); - Assert.Equal(chatClient.Metadata.ProviderUri?.Port, (int)activity.GetTagItem("server.port")!); + Assert.Equal(chatClient.GetService()?.ProviderUri?.Port, (int)activity.GetTagItem("server.port")!); Assert.NotNull(activity.Id); Assert.NotEmpty(activity.Id); Assert.NotEqual(0, (int)activity.GetTagItem("gen_ai.response.input_tokens")!); diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/EmbeddingGeneratorIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/EmbeddingGeneratorIntegrationTests.cs index aacd07b561b..bc5aaac88a7 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/EmbeddingGeneratorIntegrationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/EmbeddingGeneratorIntegrationTests.cs @@ -50,7 +50,7 @@ public virtual async Task GenerateEmbedding_CreatesEmbeddingSuccessfully() Assert.NotNull(embeddings.Usage.InputTokenCount); Assert.NotNull(embeddings.Usage.TotalTokenCount); Assert.Single(embeddings); - Assert.Equal(_embeddingGenerator.Metadata.ModelId, embeddings[0].ModelId); + Assert.Equal(_embeddingGenerator.GetService()?.ModelId, embeddings[0].ModelId); Assert.NotEmpty(embeddings[0].Vector.ToArray()); } @@ -71,7 +71,7 @@ public virtual async Task GenerateEmbeddings_CreatesEmbeddingsSuccessfully() Assert.NotNull(embeddings.Usage.TotalTokenCount); Assert.All(embeddings, embedding => { - Assert.Equal(_embeddingGenerator.Metadata.ModelId, embedding.ModelId); + Assert.Equal(_embeddingGenerator.GetService()?.ModelId, embedding.ModelId); Assert.NotEmpty(embedding.Vector.ToArray()); }); } @@ -122,7 +122,7 @@ public virtual async Task OpenTelemetry_CanEmitTracesAndMetrics() var activity = activities.Single(); Assert.StartsWith("embed", activity.DisplayName); Assert.StartsWith("http", (string)activity.GetTagItem("server.address")!); - Assert.Equal(embeddingGenerator.Metadata.ProviderUri?.Port, (int)activity.GetTagItem("server.port")!); + Assert.Equal(embeddingGenerator.GetService()?.ProviderUri?.Port, (int)activity.GetTagItem("server.port")!); Assert.NotNull(activity.Id); Assert.NotEmpty(activity.Id); Assert.NotEqual(0, (int)activity.GetTagItem("gen_ai.response.input_tokens")!); diff --git a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/QuantizationEmbeddingGenerator.cs b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/QuantizationEmbeddingGenerator.cs index c48dc2e23e8..3bf33988146 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/QuantizationEmbeddingGenerator.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Integration.Tests/QuantizationEmbeddingGenerator.cs @@ -25,8 +25,6 @@ public QuantizationEmbeddingGenerator(IEmbeddingGenerator _floatService.Metadata; - void IDisposable.Dispose() => _floatService.Dispose(); public object? GetService(Type serviceType, object? serviceKey = null) => diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs index 507666e52fc..c8816d5cf85 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaChatClientTests.cs @@ -71,9 +71,10 @@ public void AsChatClient_ProducesExpectedMetadata() string model = "amazingModel"; using IChatClient chatClient = new OllamaChatClient(endpoint, model); - Assert.Equal("ollama", chatClient.Metadata.ProviderName); - Assert.Equal(endpoint, chatClient.Metadata.ProviderUri); - Assert.Equal(model, chatClient.Metadata.ModelId); + var metadata = chatClient.GetService(); + Assert.Equal("ollama", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); } [Fact] @@ -459,7 +460,7 @@ public async Task FunctionResultContent_NonStreaming() [ new(ChatRole.User, "How old is Alice?"), new(ChatRole.Assistant, [new FunctionCallContent("abcd1234", "GetPersonAge", new Dictionary { ["personName"] = "Alice" })]), - new(ChatRole.Tool, [new FunctionResultContent("abcd1234", "GetPersonAge", 42)]), + new(ChatRole.Tool, [new FunctionResultContent("abcd1234", 42)]), ], new() { diff --git a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorTests.cs index e044ef1d468..bc6d5500bd9 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Ollama.Tests/OllamaEmbeddingGeneratorTests.cs @@ -49,10 +49,11 @@ public void AsEmbeddingGenerator_ProducesExpectedMetadata() Uri endpoint = new("http://localhost/some/endpoint"); string model = "amazingModel"; - using IEmbeddingGenerator> chatClient = new OllamaEmbeddingGenerator(endpoint, model); - Assert.Equal("ollama", chatClient.Metadata.ProviderName); - Assert.Equal(endpoint, chatClient.Metadata.ProviderUri); - Assert.Equal(model, chatClient.Metadata.ModelId); + using IEmbeddingGenerator> generator = new OllamaEmbeddingGenerator(endpoint, model); + var metadata = generator.GetService(); + Assert.Equal("ollama", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); } [Fact] diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs index ac1b397364d..9238b4fbba0 100644 --- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIChatClientTests.cs @@ -72,14 +72,16 @@ public void AsChatClient_OpenAIClient_ProducesExpectedMetadata(bool useAzureOpen new OpenAIClient(new ApiKeyCredential("key"), new OpenAIClientOptions { Endpoint = endpoint }); IChatClient chatClient = client.AsChatClient(model); - Assert.Equal("openai", chatClient.Metadata.ProviderName); - Assert.Equal(endpoint, chatClient.Metadata.ProviderUri); - Assert.Equal(model, chatClient.Metadata.ModelId); + var metadata = chatClient.GetService(); + Assert.Equal("openai", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); chatClient = client.GetChatClient(model).AsChatClient(); - Assert.Equal("openai", chatClient.Metadata.ProviderName); - Assert.Equal(endpoint, chatClient.Metadata.ProviderUri); - Assert.Equal(model, chatClient.Metadata.ModelId); + metadata = chatClient.GetService(); + Assert.Equal("openai", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); } [Fact] @@ -994,8 +996,8 @@ public async Task AssistantMessageWithBothToolsAndContent_NonStreaming() ]), new (ChatRole.Tool, [ - new FunctionResultContent("12345", "SayHello", "Said hello"), - new FunctionResultContent("12346", "SayHi", "Said hi"), + new FunctionResultContent("12345", "Said hello"), + new FunctionResultContent("12346", "Said hi"), ]), new(ChatRole.Assistant, "You are great."), new(ChatRole.User, "Thanks!"), diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIEmbeddingGeneratorTests.cs index 4a8b7aad83a..3ceba5a9d00 100644 --- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIEmbeddingGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIEmbeddingGeneratorTests.cs @@ -55,14 +55,15 @@ public void AsEmbeddingGenerator_OpenAIClient_ProducesExpectedMetadata(bool useA new OpenAIClient(new ApiKeyCredential("key"), new OpenAIClientOptions { Endpoint = endpoint }); IEmbeddingGenerator> embeddingGenerator = client.AsEmbeddingGenerator(model); - Assert.Equal("openai", embeddingGenerator.Metadata.ProviderName); - Assert.Equal(endpoint, embeddingGenerator.Metadata.ProviderUri); - Assert.Equal(model, embeddingGenerator.Metadata.ModelId); + var metadata = embeddingGenerator.GetService(); + Assert.Equal("openai", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); embeddingGenerator = client.GetEmbeddingClient(model).AsEmbeddingGenerator(); - Assert.Equal("openai", embeddingGenerator.Metadata.ProviderName); - Assert.Equal(endpoint, embeddingGenerator.Metadata.ProviderUri); - Assert.Equal(model, embeddingGenerator.Metadata.ModelId); + Assert.Equal("openai", metadata?.ProviderName); + Assert.Equal(endpoint, metadata?.ProviderUri); + Assert.Equal(model, metadata?.ModelId); } [Fact] diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs index 72461ce2a48..71ec7bd7f5f 100644 --- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAISerializationTests.cs @@ -366,7 +366,7 @@ public static async Task RequestDeserialization_ToolCall() Assert.Null(request.Options.Seed); Assert.Null(request.Options.StopSequences); - Assert.Equal(ChatToolMode.Auto, request.Options.ToolMode); + Assert.Same(ChatToolMode.Auto, request.Options.ToolMode); Assert.NotNull(request.Options.Tools); AIFunction function = Assert.IsAssignableFrom(Assert.Single(request.Options.Tools)); @@ -455,7 +455,6 @@ public static async Task RequestDeserialization_ToolChatMessage() Assert.Null(msg.AdditionalProperties); FunctionResultContent frc = Assert.IsType(Assert.Single(msg.Contents)); - Assert.Equal("SayHello", frc.Name); Assert.Equal("12345", frc.CallId); Assert.Equal(42, Assert.IsType(frc.Result).GetInt32()); Assert.Null(frc.AdditionalProperties); diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs index 73e231ccbdb..aab999d12d3 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/ChatClientStructuredOutputExtensionsTests.cs @@ -17,7 +17,7 @@ public class ChatClientStructuredOutputExtensionsTests public async Task SuccessUsage() { var expectedResult = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger }; - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))]) + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))) { CompletionId = "test", CreatedAt = DateTimeOffset.UtcNow, @@ -78,7 +78,7 @@ public async Task SuccessUsage() public async Task WrapsNonObjectValuesInDataProperty() { var expectedResult = new { data = 123 }; - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); using var client = new TestChatClient { @@ -110,7 +110,7 @@ public async Task WrapsNonObjectValuesInDataProperty() [Fact] public async Task FailureUsage_InvalidJson() { - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, "This is not valid JSON")]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, "This is not valid JSON")); using var client = new TestChatClient { CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedCompletion), @@ -129,7 +129,7 @@ public async Task FailureUsage_InvalidJson() [Fact] public async Task FailureUsage_NullJson() { - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, "null")]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, "null")); using var client = new TestChatClient { CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedCompletion), @@ -148,7 +148,7 @@ public async Task FailureUsage_NullJson() [Fact] public async Task FailureUsage_NoJsonInResponse() { - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, [new DataContent("https://example.com")])]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, [new DataContent("https://example.com")])); using var client = new TestChatClient { CompleteAsyncCallback = (messages, options, cancellationToken) => Task.FromResult(expectedCompletion), @@ -168,7 +168,7 @@ public async Task FailureUsage_NoJsonInResponse() public async Task CanUseNativeStructuredOutput() { var expectedResult = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger }; - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); using var client = new TestChatClient { @@ -212,7 +212,7 @@ public async Task CanUseNativeStructuredOutput() public async Task CanUseNativeStructuredOutputWithSanitizedTypeName() { var expectedResult = new Data { Value = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger } }; - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult))); using var client = new TestChatClient { @@ -247,7 +247,7 @@ public async Task CanUseNativeStructuredOutputWithArray() { var expectedResult = new[] { new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger } }; var payload = new { data = expectedResult }; - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(payload))]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(payload))); using var client = new TestChatClient { @@ -278,7 +278,7 @@ public async Task CanSpecifyCustomJsonSerializationOptions() PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, }; var expectedResult = new Animal { Id = 1, FullName = "Tigger", Species = Species.Tiger }; - var expectedCompletion = new ChatCompletion([new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult, jso))]); + var expectedCompletion = new ChatCompletion(new ChatMessage(ChatRole.Assistant, JsonSerializer.Serialize(expectedResult, jso))); using var client = new TestChatClient { @@ -324,7 +324,7 @@ public async Task HandlesBackendReturningMultipleObjects() { CompleteAsyncCallback = (messages, options, cancellationToken) => { - return Task.FromResult(new ChatCompletion([new ChatMessage(ChatRole.Assistant, resultDuplicatedJson)])); + return Task.FromResult(new ChatCompletion(new ChatMessage(ChatRole.Assistant, resultDuplicatedJson))); }, }; diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs index f66ce1cbd5b..730d0739f9d 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/DistributedCachingChatClientTest.cs @@ -83,19 +83,19 @@ public async Task CachesSuccessResultsAsync() }; // Make the initial request and do a quick sanity check - var result1 = await outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); + var result1 = await outer.CompleteAsync("some input"); Assert.Same(expectedCompletion, result1); Assert.Equal(1, innerCallCount); // Act - var result2 = await outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); + var result2 = await outer.CompleteAsync("some input"); // Assert Assert.Equal(1, innerCallCount); AssertCompletionsEqual(expectedCompletion, result2); // Act/Assert 2: Cache misses do not return cached results - await outer.CompleteAsync([new ChatMessage(ChatRole.User, "some modified input")]); + await outer.CompleteAsync("some modified input"); Assert.Equal(2, innerCallCount); } @@ -120,8 +120,8 @@ public async Task AllowsConcurrentCallsAsync() }; // Act 1: Concurrent calls before resolution are passed into the inner client - var result1 = outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); - var result2 = outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); + var result1 = outer.CompleteAsync("some input"); + var result2 = outer.CompleteAsync("some input"); // Assert 1 Assert.Equal(2, innerCallCount); @@ -132,7 +132,7 @@ public async Task AllowsConcurrentCallsAsync() Assert.Equal("Hello", (await result2).Message.Text); // Act 2: Subsequent calls after completion are resolved from the cache - var result3 = outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); + var result3 = outer.CompleteAsync("some input"); Assert.Equal(2, innerCallCount); Assert.Equal("Hello", (await result3).Message.Text); } @@ -270,19 +270,19 @@ public async Task StreamingCachesSuccessResultsAsync() }; // Make the initial request and do a quick sanity check - var result1 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result1 = outer.CompleteStreamingAsync("some input"); await AssertCompletionsEqualAsync(actualCompletion, result1); Assert.Equal(1, innerCallCount); // Act - var result2 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result2 = outer.CompleteStreamingAsync("some input"); // Assert Assert.Equal(1, innerCallCount); await AssertCompletionsEqualAsync(expectedCachedCompletion, result2); // Act/Assert 2: Cache misses do not return cached results - await ToListAsync(outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some modified input")])); + await ToListAsync(outer.CompleteStreamingAsync("some modified input")); Assert.Equal(2, innerCallCount); } @@ -317,11 +317,11 @@ public async Task StreamingCoalescesConsecutiveTextChunksAsync(bool? coalesce) outer.CoalesceStreamingUpdates = coalesce.Value; } - var result1 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result1 = outer.CompleteStreamingAsync("some input"); await ToListAsync(result1); // Act - var result2 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result2 = outer.CompleteStreamingAsync("some input"); // Assert if (coalesce is null or true) @@ -389,11 +389,11 @@ public async Task StreamingCoalescingPropagatesMetadataAsync() JsonSerializerOptions = TestJsonSerializerContext.Default.Options }; - var result1 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result1 = outer.CompleteStreamingAsync("some input"); await ToListAsync(result1); // Act - var result2 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result2 = outer.CompleteStreamingAsync("some input"); // Assert var items = await ToListAsync(result2); @@ -432,8 +432,8 @@ public async Task StreamingAllowsConcurrentCallsAsync() }; // Act 1: Concurrent calls before resolution are passed into the inner client - var result1 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); - var result2 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result1 = outer.CompleteStreamingAsync("some input"); + var result2 = outer.CompleteStreamingAsync("some input"); // Assert 1 Assert.NotSame(result1, result2); @@ -447,7 +447,7 @@ public async Task StreamingAllowsConcurrentCallsAsync() Assert.Equal(2, innerCallCount); // Act 2: Subsequent calls after completion are resolved from the cache - var result3 = outer.CompleteStreamingAsync([new ChatMessage(ChatRole.User, "some input")]); + var result3 = outer.CompleteStreamingAsync("some input"); await AssertCompletionsEqualAsync(expectedCompletion, result3); Assert.Equal(2, innerCallCount); } @@ -654,11 +654,11 @@ public async Task CanCacheCustomContentTypesAsync() }; // Make the initial request and do a quick sanity check - var result1 = await outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); + var result1 = await outer.CompleteAsync("some input"); AssertCompletionsEqual(expectedCompletion, result1); // Act - var result2 = await outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); + var result2 = await outer.CompleteAsync("some input"); // Assert Assert.Equal(1, innerCallCount); @@ -692,7 +692,7 @@ public async Task CanResolveIDistributedCacheFromDI() // Act: Make a request that should populate the cache Assert.Empty(_storage.Keys); - var result = await outer.CompleteAsync([new ChatMessage(ChatRole.User, "some input")]); + var result = await outer.CompleteAsync("some input"); // Assert Assert.NotNull(result); diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs index 02c61d5941e..540ca7c8431 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/FunctionInvokingChatClientTests.cs @@ -56,11 +56,11 @@ public async Task SupportsSingleFunctionCallPerRequestAsync() [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId1", "Func1")]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", "Func1", result: "Result 1")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", result: "Result 1")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId2", "Func2", arguments: new Dictionary { { "i", 42 } })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", "Func2", result: "Result 2: 42")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", result: "Result 2: 42")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId3", "VoidReturn", arguments: new Dictionary { { "i", 43 } })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", "VoidReturn", result: "Success: Function completed.")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", result: "Success: Function completed.")]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -94,9 +94,9 @@ public async Task SupportsMultipleFunctionCallsPerRequestAsync(bool concurrentIn ]), new ChatMessage(ChatRole.Tool, [ - new FunctionResultContent("callId1", "Func1", result: "Result 1"), - new FunctionResultContent("callId2", "Func2", result: "Result 2: 34"), - new FunctionResultContent("callId3", "Func2", result: "Result 2: 56"), + new FunctionResultContent("callId1", result: "Result 1"), + new FunctionResultContent("callId2", result: "Result 2: 34"), + new FunctionResultContent("callId3", result: "Result 2: 56"), ]), new ChatMessage(ChatRole.Assistant, [ @@ -105,8 +105,8 @@ public async Task SupportsMultipleFunctionCallsPerRequestAsync(bool concurrentIn ]), new ChatMessage(ChatRole.Tool, [ - new FunctionResultContent("callId4", "Func2", result: "Result 2: 78"), - new FunctionResultContent("callId5", "Func1", result: "Result 1") + new FunctionResultContent("callId4", result: "Result 2: 78"), + new FunctionResultContent("callId5", result: "Result 1") ]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -145,8 +145,8 @@ public async Task ParallelFunctionCallsMayBeInvokedConcurrentlyAsync() ]), new ChatMessage(ChatRole.Tool, [ - new FunctionResultContent("callId1", "Func", result: "hellohello"), - new FunctionResultContent("callId2", "Func", result: "worldworld"), + new FunctionResultContent("callId1", result: "hellohello"), + new FunctionResultContent("callId2", result: "worldworld"), ]), new ChatMessage(ChatRole.Assistant, "done"), ]; @@ -188,8 +188,8 @@ public async Task ConcurrentInvocationOfParallelCallsDisabledByDefaultAsync() ]), new ChatMessage(ChatRole.Tool, [ - new FunctionResultContent("callId1", "Func", result: "hellohello"), - new FunctionResultContent("callId2", "Func", result: "worldworld"), + new FunctionResultContent("callId1", result: "hellohello"), + new FunctionResultContent("callId2", result: "worldworld"), ]), new ChatMessage(ChatRole.Assistant, "done"), ]; @@ -218,11 +218,11 @@ public async Task RemovesFunctionCallingMessagesWhenRequestedAsync(bool keepFunc [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId1", "Func1")]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", "Func1", result: "Result 1")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", result: "Result 1")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId2", "Func2", arguments: new Dictionary { { "i", 42 } })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", "Func2", result: "Result 2: 42")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", result: "Result 2: 42")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId3", "VoidReturn", arguments: new Dictionary { { "i", 43 } })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", "VoidReturn", result: "Success: Function completed.")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", result: "Success: Function completed.")]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -270,11 +270,11 @@ public async Task RemovesFunctionCallingContentWhenRequestedAsync(bool keepFunct [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new TextContent("extra"), new FunctionCallContent("callId1", "Func1"), new TextContent("stuff")]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", "Func1", result: "Result 1")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", result: "Result 1")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId2", "Func2", arguments: new Dictionary { { "i", 42 } })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", "Func2", result: "Result 2: 42")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", result: "Result 2: 42")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId3", "VoidReturn", arguments: new Dictionary { { "i", 43 } }), new TextContent("more")]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", "VoidReturn", result: "Success: Function completed.")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", result: "Success: Function completed.")]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -293,11 +293,11 @@ public async Task RemovesFunctionCallingContentWhenRequestedAsync(bool keepFunct [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId1", "Func1")]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", "Func1", result: "Result 1")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", result: "Result 1")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId2", "Func2", arguments: new Dictionary { { "i", 42 } })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", "Func2", result: "Result 2: 42")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", result: "Result 2: 42")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId3", "VoidReturn", arguments: new Dictionary { { "i", 43 } })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", "VoidReturn", result: "Success: Function completed.")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", result: "Success: Function completed.")]), new ChatMessage(ChatRole.Assistant, "extrastuffmoreworld"), ] : [ @@ -336,7 +336,7 @@ public async Task ExceptionDetailsOnlyReportedWhenRequestedAsync(bool detailedEr [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId1", "Func1")]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", "Func1", result: detailedErrors ? "Error: Function failed. Exception: Oh no!" : "Error: Function failed.")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", result: detailedErrors ? "Error: Function failed. Exception: Oh no!" : "Error: Function failed.")]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -395,7 +395,7 @@ public async Task FunctionInvocationsLogged(LogLevel level) [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId1", "Func1", new Dictionary { ["arg1"] = "value1" })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", "Func1", result: "Result 1")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", result: "Result 1")]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -451,7 +451,7 @@ public async Task FunctionInvocationTrackedWithActivity(bool enableTelemetry) [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId1", "Func1", new Dictionary { ["arg1"] = "value1" })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", "Func1", result: "Result 1")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", result: "Result 1")]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -583,9 +583,9 @@ public async Task CanAccesssFunctionInvocationContextFromFunctionCall() [ new ChatMessage(ChatRole.User, "hello"), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId1", "Func1", new Dictionary { ["i"] = 41 })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", "Func1", result: "Result 41")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId1", result: "Result 41")]), new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId2", "Func1", new Dictionary { ["i"] = 42 })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", "Func1", result: "Result 42")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId2", result: "Result 42")]), ]; // The full plan should never be fulfilled @@ -593,7 +593,7 @@ public async Task CanAccesssFunctionInvocationContextFromFunctionCall() [ .. planBeforeTermination, new ChatMessage(ChatRole.Assistant, [new FunctionCallContent("callId3", "Func1", new Dictionary { ["i"] = 43 })]), - new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", "Func1", result: "Result 43")]), + new ChatMessage(ChatRole.Tool, [new FunctionResultContent("callId3", result: "Result 43")]), new ChatMessage(ChatRole.Assistant, "world"), ]; @@ -653,7 +653,7 @@ private static async Task> InvokeAndAssertAsync( using CancellationTokenSource cts = new(); List chat = [plan[0]]; - var expectedTotalTokenCounts = 0; + long expectedTotalTokenCounts = 0; using var innerClient = new TestChatClient { diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs index d9dace62191..f4673023d4c 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/ChatCompletion/OpenTelemetryChatClientTests.cs @@ -35,11 +35,10 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData, bool using var innerClient = new TestChatClient { - Metadata = new("testservice", new Uri("http://localhost:12345/something"), "amazingmodel"), CompleteAsyncCallback = async (messages, options, cancellationToken) => { await Task.Yield(); - return new ChatCompletion([new ChatMessage(ChatRole.Assistant, "The blue whale, I think.")]) + return new ChatCompletion(new ChatMessage(ChatRole.Assistant, "The blue whale, I think.")) { CompletionId = "id123", FinishReason = ChatFinishReason.Stop, @@ -57,6 +56,9 @@ public async Task ExpectedInformationLogged_Async(bool enableSensitiveData, bool }; }, CompleteStreamingAsyncCallback = CallbackAsync, + GetServiceCallback = (serviceType, serviceKey) => + serviceType == typeof(ChatClientMetadata) ? new ChatClientMetadata("testservice", new Uri("http://localhost:12345/something"), "amazingmodel") : + null, }; async static IAsyncEnumerable CallbackAsync( @@ -110,7 +112,7 @@ async static IAsyncEnumerable CallbackAsync( new(ChatRole.System, "You are a close friend."), new(ChatRole.User, "Hey!"), new(ChatRole.Assistant, [new FunctionCallContent("12345", "GetPersonName")]), - new(ChatRole.Tool, [new FunctionResultContent("12345", "GetPersonName", "John")]), + new(ChatRole.Tool, [new FunctionResultContent("12345", "John")]), new(ChatRole.Assistant, "Hey John, what's up?"), new(ChatRole.User, "What's the biggest animal?") ]; diff --git a/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/OpenTelemetryEmbeddingGeneratorTests.cs b/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/OpenTelemetryEmbeddingGeneratorTests.cs index e5dc014d6aa..b1d7221e552 100644 --- a/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/OpenTelemetryEmbeddingGeneratorTests.cs +++ b/test/Libraries/Microsoft.Extensions.AI.Tests/Embeddings/OpenTelemetryEmbeddingGeneratorTests.cs @@ -30,7 +30,6 @@ public async Task ExpectedInformationLogged_Async() using var innerGenerator = new TestEmbeddingGenerator { - Metadata = new("testservice", new Uri("http://localhost:12345/something"), "amazingmodel", 384), GenerateAsyncCallback = async (values, options, cancellationToken) => { await Task.Yield(); @@ -48,6 +47,9 @@ public async Task ExpectedInformationLogged_Async() } }; }, + GetServiceCallback = (serviceType, serviceKey) => + serviceType == typeof(EmbeddingGeneratorMetadata) ? new EmbeddingGeneratorMetadata("testservice", new Uri("http://localhost:12345/something"), "amazingmodel", 384) : + null, }; using var generator = innerGenerator