diff --git a/BotSharp.sln b/BotSharp.sln index d281450e8..a1c1ddb91 100644 --- a/BotSharp.sln +++ b/BotSharp.sln @@ -65,6 +65,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.GoogleAI", EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.SemanticKernel", "src\Plugins\BotSharp.Plugin.SemanticKernel\BotSharp.Plugin.SemanticKernel.csproj", "{73EE2CD0-3B27-4F02-A67B-762CBDD740D0}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.MicrosoftExtensionsAI", "src\Plugins\BotSharp.Plugin.MicrosoftExtensionsAI\BotSharp.Plugin.MicrosoftExtensionsAI.csproj", "{72CA059E-6AAA-406C-A1EB-A2243E652F5F}" +EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.SemanticKernel.UnitTests", "tests\BotSharp.Plugin.SemanticKernel.UnitTests\BotSharp.Plugin.SemanticKernel.UnitTests.csproj", "{BC57D428-A1A4-4D38-A2D0-AC6CA943F247}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.Twilio", "src\Plugins\BotSharp.Plugin.Twilio\BotSharp.Plugin.Twilio.csproj", "{E627F1E3-BE03-443A-83A2-86A855A278EB}" @@ -283,6 +285,14 @@ Global {73EE2CD0-3B27-4F02-A67B-762CBDD740D0}.Release|Any CPU.Build.0 = Release|Any CPU {73EE2CD0-3B27-4F02-A67B-762CBDD740D0}.Release|x64.ActiveCfg = Release|Any CPU {73EE2CD0-3B27-4F02-A67B-762CBDD740D0}.Release|x64.Build.0 = Release|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|x64.ActiveCfg = Debug|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|x64.Build.0 = Debug|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|Any CPU.Build.0 = Release|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|x64.ActiveCfg = Release|Any CPU + {72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|x64.Build.0 = Release|Any CPU {BC57D428-A1A4-4D38-A2D0-AC6CA943F247}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {BC57D428-A1A4-4D38-A2D0-AC6CA943F247}.Debug|Any CPU.Build.0 = Debug|Any CPU {BC57D428-A1A4-4D38-A2D0-AC6CA943F247}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -491,6 +501,7 @@ Global {DB3DE37B-1208-4ED3-9615-A52AD0AAD69C} = {5CD330E1-9E5A-4112-8346-6E31CA98EF78} {8BC29F8A-78D6-422C-B522-10687ADC38ED} = {D5293208-2BEF-42FC-A64C-5954F61720BA} {73EE2CD0-3B27-4F02-A67B-762CBDD740D0} = {D5293208-2BEF-42FC-A64C-5954F61720BA} + {72CA059E-6AAA-406C-A1EB-A2243E652F5F} = {D5293208-2BEF-42FC-A64C-5954F61720BA} {BC57D428-A1A4-4D38-A2D0-AC6CA943F247} = {32FAFFFE-A4CB-4FEE-BF7C-84518BBC6DCC} {E627F1E3-BE03-443A-83A2-86A855A278EB} = {64264688-0F5C-4AB0-8F2B-B59B717CCE00} {F06B22CB-B143-4680-8FFF-35B9E50E6C47} = {51AFE054-AE99-497D-A593-69BAEFB5106F} diff --git a/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/BotSharp.Plugin.MicrosoftExtensionsAI.csproj b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/BotSharp.Plugin.MicrosoftExtensionsAI.csproj new file mode 100644 index 000000000..53518b388 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/BotSharp.Plugin.MicrosoftExtensionsAI.csproj @@ -0,0 +1,23 @@ + + + + $(TargetFramework) + enable + $(LangVersion) + $(BotSharpVersion) + $(GeneratePackageOnBuild) + $(GeneratePackageOnBuild) + $(SolutionDir)packages + $(NoWarn);NU5104 + + + + + + + + + + + + diff --git a/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAIChatCompletionProvider.cs b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAIChatCompletionProvider.cs new file mode 100644 index 000000000..67f1e5132 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAIChatCompletionProvider.cs @@ -0,0 +1,185 @@ +using BotSharp.Abstraction.Agents.Enums; +using BotSharp.Abstraction.Agents.Models; +using BotSharp.Abstraction.Agents; +using BotSharp.Abstraction.Conversations.Models; +using BotSharp.Abstraction.Conversations; +using BotSharp.Abstraction.Files; +using BotSharp.Abstraction.Files.Utilities; +using BotSharp.Abstraction.Loggers; +using BotSharp.Abstraction.MLTasks; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using System.Collections.Generic; +using System.Threading.Tasks; +using System; +using System.Linq; +using System.Text.Json; +using System.Threading; + +namespace BotSharp.Plugin.MicrosoftExtensionsAI; + +/// +/// Provides an implementation of for Microsoft.Extensions.AI. +/// +public sealed class MicrosoftExtensionsAIChatCompletionProvider : IChatCompletion +{ + private readonly IChatClient _client; + private readonly ILogger _logger; + private readonly IServiceProvider _services; + private string? _model; + + /// + /// Creates an instance of the class. + /// + public MicrosoftExtensionsAIChatCompletionProvider( + IChatClient client, + ILogger logger, + IServiceProvider services) + { + _client = client; + _model = _client.Metadata.ModelId; + _logger = logger; + _services = services; + } + + /// + public string Provider => "microsoft.extensions.ai"; + + /// + public void SetModelName(string model) => _model = model; + + /// + public async Task GetChatCompletions(Agent agent, List conversations) + { + // Before chat completion hook + var hooks = _services.GetServices().ToArray(); + await Task.WhenAll(hooks.Select(hook => hook.BeforeGenerating(agent, conversations))); + + // Configure options + var state = _services.GetRequiredService(); + var options = new ChatOptions() + { + Temperature = float.Parse(state.GetState("temperature", "0.0")), + MaxOutputTokens = int.Parse(state.GetState("max_tokens", "1024")) + }; + + if (_services.GetService() is { } agentService) + { + foreach (var function in agent.Functions) + { + if (agentService.RenderFunction(agent, function)) + { + var property = agentService.RenderFunctionProperty(agent, function); + (options.Tools ??= []).Add(new NopAIFunction(new(function.Name) + { + Description = function.Description, + Parameters = property?.Properties.RootElement.Deserialize>()?.Select(p => new AIFunctionParameterMetadata(p.Key) + { + Schema = p.Value, + }).ToList() ?? [], + })); + } + } + } + + // Configure messages + List messages = []; + + if (_services.GetRequiredService().RenderedInstruction(agent) is string instruction && + instruction.Length > 0) + { + messages.Add(new(ChatRole.System, instruction)); + } + + if (!string.IsNullOrEmpty(agent.Knowledges)) + { + messages.Add(new(ChatRole.System, agent.Knowledges)); + } + + foreach (var sample in ProviderHelper.GetChatSamples(agent.Samples)) + { + messages.Add(new(sample.Role == AgentRole.Assistant ? ChatRole.Assistant : ChatRole.User, sample.Content)); + } + + var fileStorage = _services.GetService(); + bool allowMultiModal = fileStorage is not null && _services.GetService()?.GetSetting(Provider, _model ?? "default")?.MultiModal is true; + foreach (var x in conversations) + { + if (x.Role == AgentRole.Function && x.FunctionName is not null) + { + messages.Add(new(ChatRole.Assistant, + [ + new FunctionCallContent(x.FunctionName, x.FunctionName, JsonSerializer.Deserialize>(x.FunctionArgs ?? "{}")), + new FunctionResultContent(x.FunctionName, x.FunctionName, x.Content) + ])); + } + else if (x.Role == AgentRole.System || x.Role == AgentRole.Assistant) + { + messages.Add(new(x.Role == AgentRole.System ? ChatRole.System : ChatRole.Assistant, x.Content)); + } + else if (x.Role == AgentRole.User) + { + List contents = [new TextContent(!string.IsNullOrWhiteSpace(x.Payload) ? x.Payload : x.Content)]; + if (allowMultiModal) + { + foreach (var file in x.Files) + { + if (!string.IsNullOrEmpty(file.FileData)) + { + contents.Add(new ImageContent(file.FileData)); + } + else if (!string.IsNullOrEmpty(file.FileStorageUrl)) + { + var contentType = FileUtility.GetFileContentType(file.FileStorageUrl); + var bytes = fileStorage!.GetFileBytes(file.FileStorageUrl); + contents.Add(new ImageContent(bytes, contentType)); + } + else if (!string.IsNullOrEmpty(file.FileUrl)) + { + contents.Add(new ImageContent(file.FileUrl)); + } + } + } + + messages.Add(new(ChatRole.User, contents) { AuthorName = x.FunctionName }); + } + } + + var completion = await _client.CompleteAsync(messages); + + RoleDialogModel result = new(AgentRole.Assistant, string.Concat(completion.Message.Contents.OfType())) + { + CurrentAgentId = agent.Id + }; + + if (completion.Message.Contents.OfType().FirstOrDefault() is { } fcc) + { + result.Role = AgentRole.Function; + result.MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty; + result.FunctionName = fcc.Name; + result.FunctionArgs = fcc.Arguments is not null ? JsonSerializer.Serialize(fcc.Arguments) : null; + } + + // After chat completion hook + await Task.WhenAll(hooks.Select(hook => hook.AfterGenerated(result, new() { Model = _model ?? "default" }))); + + return result; + } + + /// + public Task GetChatCompletionsAsync(Agent agent, List conversations, Func onMessageReceived, Func onFunctionExecuting) => + throw new NotImplementedException(); + + /// + public Task GetChatCompletionsStreamingAsync(Agent agent, List conversations, Func onMessageReceived) => + throw new NotImplementedException(); + + private sealed class NopAIFunction(AIFunctionMetadata metadata) : AIFunction + { + public override AIFunctionMetadata Metadata { get; } = metadata; + + protected override Task InvokeCoreAsync(IEnumerable> arguments, CancellationToken cancellationToken) => + throw new NotSupportedException(); + } +} \ No newline at end of file diff --git a/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAIPlugin.cs b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAIPlugin.cs new file mode 100644 index 000000000..105e00846 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAIPlugin.cs @@ -0,0 +1,29 @@ +using BotSharp.Abstraction.MLTasks; +using BotSharp.Abstraction.Plugins; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; + +namespace BotSharp.Plugin.MicrosoftExtensionsAI; + +/// +/// Use Microsoft.Extensions.AI as BotSharp plugin +/// +public sealed class MicrosoftExtensionsAIPlugin : IBotSharpPlugin +{ + /// + public string Id => "B7F2AB8D-1BBA-41CE-9642-2D5E6B5F86A0"; + + /// + public string Name => "Microsoft.Extensions.AI"; + + /// + public string Description => "Microsoft.Extensions.AI Service"; + + /// + public void RegisterDI(IServiceCollection services, IConfiguration config) + { + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + } +} \ No newline at end of file diff --git a/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAITextCompletionProvider.cs b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAITextCompletionProvider.cs new file mode 100644 index 000000000..152213c61 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAITextCompletionProvider.cs @@ -0,0 +1,71 @@ +using BotSharp.Abstraction.Agents.Enums; +using BotSharp.Abstraction.Agents.Models; +using BotSharp.Abstraction.Conversations; +using BotSharp.Abstraction.Conversations.Models; +using BotSharp.Abstraction.Loggers; +using BotSharp.Abstraction.MLTasks; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using System; +using System.Linq; +using System.Threading.Tasks; + +namespace BotSharp.Plugin.MicrosoftExtensionsAI; + +/// +/// Provides an implementation of for Microsoft.Extensions.AI. +/// +public sealed class MicrosoftExtensionsAITextCompletionProvider : ITextCompletion +{ + private readonly IChatClient _chatClient; + private readonly IServiceProvider _services; + private readonly ITokenStatistics _tokenStatistics; + private string? _model = null; + + /// + public string Provider => "microsoft-extensions-ai"; + + /// + /// Creates an instance of the class. + /// + public MicrosoftExtensionsAITextCompletionProvider( + IChatClient chatClient, + IServiceProvider services, + ITokenStatistics tokenStatistics) + { + _chatClient = chatClient; + _services = services; + _tokenStatistics = tokenStatistics; + } + + /// + public async Task GetCompletion(string text, string agentId, string messageId) + { + var hooks = _services.GetServices().ToArray(); + + // Before chat completion hook + Agent agent = new() { Id = agentId }; + RoleDialogModel userMessage = new(AgentRole.User, text) { MessageId = messageId }; + await Task.WhenAll(hooks.Select(hook => hook.BeforeGenerating(agent, [userMessage]))); + + _tokenStatistics.StartTimer(); + var completion = await _chatClient.CompleteAsync(text); + var result = string.Concat(completion.Message.Contents.OfType()); + _tokenStatistics.StopTimer(); + + // After chat completion hook + await Task.WhenAll(hooks.Select(hook => + hook.AfterGenerated(new(AgentRole.Assistant, result), new() { Model = _model ?? "default" }))); + + return result; + } + + /// + public void SetModelName(string model) + { + if (!string.IsNullOrWhiteSpace(model)) + { + _model = model; + } + } +} \ No newline at end of file diff --git a/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAITextEmbeddingProvider.cs b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAITextEmbeddingProvider.cs new file mode 100644 index 000000000..958c46944 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/MicrosoftExtensionsAITextEmbeddingProvider.cs @@ -0,0 +1,59 @@ +using BotSharp.Abstraction.MLTasks; +using Microsoft.Extensions.AI; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace BotSharp.Plugin.MicrosoftExtensionsAI; + +/// +/// Provides an implementation of for Microsoft.Extensions.AI. +/// +public sealed class MicrosoftExtensionsAITextEmbeddingProvider : ITextEmbedding +{ + private readonly IEmbeddingGenerator> _generator; + private string? _model; + private int? _dimensions; + + /// + /// Creates an instance of the class. + /// + public MicrosoftExtensionsAITextEmbeddingProvider(IEmbeddingGenerator> generator) => + _generator = generator; + + /// + public string Provider => "microsoft-extensions-ai"; + + /// + public async Task GetVectorAsync(string text) => + (await _generator.GenerateEmbeddingVectorAsync(text, CreateOptions())).ToArray(); + + /// + public async Task> GetVectorsAsync(List texts) + { + var embeddings = await _generator.GenerateAsync(texts, CreateOptions()); + return embeddings.Select(e => e.Vector.ToArray()).ToList(); + } + + /// + public void SetModelName(string model) => _model = model; + + /// + public void SetDimension(int dimension) + { + if (dimension > 0) + { + _dimensions = dimension; + } + } + + /// + public int GetDimension() => _dimensions ?? 0; + + private EmbeddingGenerationOptions CreateOptions() => + new() + { + ModelId = _model, + Dimensions = _dimensions, + }; +} diff --git a/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/ProviderHelper.cs b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/ProviderHelper.cs new file mode 100644 index 000000000..17f0b82e1 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.MicrosoftExtensionsAI/ProviderHelper.cs @@ -0,0 +1,26 @@ +using BotSharp.Abstraction.Conversations.Models; +using System.Collections.Generic; + +namespace BotSharp.Plugin.MicrosoftExtensionsAI; + +internal static class ProviderHelper +{ + public static IEnumerable GetChatSamples(List lines) + { + foreach (string line in lines) + { + if (!string.IsNullOrWhiteSpace(line)) + { + int pos = line.IndexOf(' '); + if (pos > 0) + { + string role = line.Substring(0, pos - 1).Trim(); + if (role != "##") // skip comments + { + yield return new RoleDialogModel(role, line.Substring(line.IndexOf(' ') + 1).Trim()); + } + } + } + } + } +}