Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions BotSharp.sln
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.GoogleAI",
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.SemanticKernel", "src\Plugins\BotSharp.Plugin.SemanticKernel\BotSharp.Plugin.SemanticKernel.csproj", "{73EE2CD0-3B27-4F02-A67B-762CBDD740D0}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.MicrosoftExtensionsAI", "src\Plugins\BotSharp.Plugin.MicrosoftExtensionsAI\BotSharp.Plugin.MicrosoftExtensionsAI.csproj", "{72CA059E-6AAA-406C-A1EB-A2243E652F5F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.SemanticKernel.UnitTests", "tests\BotSharp.Plugin.SemanticKernel.UnitTests\BotSharp.Plugin.SemanticKernel.UnitTests.csproj", "{BC57D428-A1A4-4D38-A2D0-AC6CA943F247}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BotSharp.Plugin.Twilio", "src\Plugins\BotSharp.Plugin.Twilio\BotSharp.Plugin.Twilio.csproj", "{E627F1E3-BE03-443A-83A2-86A855A278EB}"
Expand Down Expand Up @@ -283,6 +285,14 @@ Global
{73EE2CD0-3B27-4F02-A67B-762CBDD740D0}.Release|Any CPU.Build.0 = Release|Any CPU
{73EE2CD0-3B27-4F02-A67B-762CBDD740D0}.Release|x64.ActiveCfg = Release|Any CPU
{73EE2CD0-3B27-4F02-A67B-762CBDD740D0}.Release|x64.Build.0 = Release|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|x64.ActiveCfg = Debug|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Debug|x64.Build.0 = Debug|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|Any CPU.Build.0 = Release|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|x64.ActiveCfg = Release|Any CPU
{72CA059E-6AAA-406C-A1EB-A2243E652F5F}.Release|x64.Build.0 = Release|Any CPU
{BC57D428-A1A4-4D38-A2D0-AC6CA943F247}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{BC57D428-A1A4-4D38-A2D0-AC6CA943F247}.Debug|Any CPU.Build.0 = Debug|Any CPU
{BC57D428-A1A4-4D38-A2D0-AC6CA943F247}.Debug|x64.ActiveCfg = Debug|Any CPU
Expand Down Expand Up @@ -491,6 +501,7 @@ Global
{DB3DE37B-1208-4ED3-9615-A52AD0AAD69C} = {5CD330E1-9E5A-4112-8346-6E31CA98EF78}
{8BC29F8A-78D6-422C-B522-10687ADC38ED} = {D5293208-2BEF-42FC-A64C-5954F61720BA}
{73EE2CD0-3B27-4F02-A67B-762CBDD740D0} = {D5293208-2BEF-42FC-A64C-5954F61720BA}
{72CA059E-6AAA-406C-A1EB-A2243E652F5F} = {D5293208-2BEF-42FC-A64C-5954F61720BA}
{BC57D428-A1A4-4D38-A2D0-AC6CA943F247} = {32FAFFFE-A4CB-4FEE-BF7C-84518BBC6DCC}
{E627F1E3-BE03-443A-83A2-86A855A278EB} = {64264688-0F5C-4AB0-8F2B-B59B717CCE00}
{F06B22CB-B143-4680-8FFF-35B9E50E6C47} = {51AFE054-AE99-497D-A593-69BAEFB5106F}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>$(TargetFramework)</TargetFramework>
<Nullable>enable</Nullable>
<LangVersion>$(LangVersion)</LangVersion>
<VersionPrefix>$(BotSharpVersion)</VersionPrefix>
<GeneratePackageOnBuild>$(GeneratePackageOnBuild)</GeneratePackageOnBuild>
<GenerateDocumentationFile>$(GeneratePackageOnBuild)</GenerateDocumentationFile>
<OutputPath>$(SolutionDir)packages</OutputPath>
<NoWarn>$(NoWarn);NU5104</NoWarn>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.AI.Abstractions" Version="9.0.0-preview.9.24525.1" />
<PackageReference Include="System.Text.Encodings.Web" Version="8.0.0" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\Infrastructure\BotSharp.Abstraction\BotSharp.Abstraction.csproj" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
using BotSharp.Abstraction.Agents.Enums;
using BotSharp.Abstraction.Agents.Models;
using BotSharp.Abstraction.Agents;
using BotSharp.Abstraction.Conversations.Models;
using BotSharp.Abstraction.Conversations;
using BotSharp.Abstraction.Files;
using BotSharp.Abstraction.Files.Utilities;
using BotSharp.Abstraction.Loggers;
using BotSharp.Abstraction.MLTasks;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using System.Collections.Generic;
using System.Threading.Tasks;
using System;
using System.Linq;
using System.Text.Json;
using System.Threading;

namespace BotSharp.Plugin.MicrosoftExtensionsAI;

/// <summary>
/// Provides an implementation of <see cref="IChatCompletion"/> for Microsoft.Extensions.AI.
/// </summary>
public sealed class MicrosoftExtensionsAIChatCompletionProvider : IChatCompletion
{
private readonly IChatClient _client;
private readonly ILogger<MicrosoftExtensionsAIChatCompletionProvider> _logger;
private readonly IServiceProvider _services;
private string? _model;

/// <summary>
/// Creates an instance of the <see cref="MicrosoftExtensionsAIChatCompletionProvider"/> class.
/// </summary>
public MicrosoftExtensionsAIChatCompletionProvider(
IChatClient client,
ILogger<MicrosoftExtensionsAIChatCompletionProvider> logger,
IServiceProvider services)
{
_client = client;
_model = _client.Metadata.ModelId;
_logger = logger;
_services = services;
}

/// <inheritdoc/>
public string Provider => "microsoft.extensions.ai";

/// <inheritdoc/>
public void SetModelName(string model) => _model = model;

/// <inheritdoc/>
public async Task<RoleDialogModel> GetChatCompletions(Agent agent, List<RoleDialogModel> conversations)
{
// Before chat completion hook
var hooks = _services.GetServices<IContentGeneratingHook>().ToArray();
await Task.WhenAll(hooks.Select(hook => hook.BeforeGenerating(agent, conversations)));

// Configure options
var state = _services.GetRequiredService<IConversationStateService>();
var options = new ChatOptions()
{
Temperature = float.Parse(state.GetState("temperature", "0.0")),
MaxOutputTokens = int.Parse(state.GetState("max_tokens", "1024"))
};

if (_services.GetService<IAgentService>() is { } agentService)
{
foreach (var function in agent.Functions)
{
if (agentService.RenderFunction(agent, function))
{
var property = agentService.RenderFunctionProperty(agent, function);
(options.Tools ??= []).Add(new NopAIFunction(new(function.Name)
{
Description = function.Description,
Parameters = property?.Properties.RootElement.Deserialize<Dictionary<string, object?>>()?.Select(p => new AIFunctionParameterMetadata(p.Key)
{
Schema = p.Value,
}).ToList() ?? [],
}));
}
}
}

// Configure messages
List<ChatMessage> messages = [];

if (_services.GetRequiredService<IAgentService>().RenderedInstruction(agent) is string instruction &&
instruction.Length > 0)
{
messages.Add(new(ChatRole.System, instruction));
}

if (!string.IsNullOrEmpty(agent.Knowledges))
{
messages.Add(new(ChatRole.System, agent.Knowledges));
}

foreach (var sample in ProviderHelper.GetChatSamples(agent.Samples))
{
messages.Add(new(sample.Role == AgentRole.Assistant ? ChatRole.Assistant : ChatRole.User, sample.Content));
}

var fileStorage = _services.GetService<IFileStorageService>();
bool allowMultiModal = fileStorage is not null && _services.GetService<ILlmProviderService>()?.GetSetting(Provider, _model ?? "default")?.MultiModal is true;
foreach (var x in conversations)
{
if (x.Role == AgentRole.Function && x.FunctionName is not null)
{
messages.Add(new(ChatRole.Assistant,
[
new FunctionCallContent(x.FunctionName, x.FunctionName, JsonSerializer.Deserialize<Dictionary<string, object?>>(x.FunctionArgs ?? "{}")),
new FunctionResultContent(x.FunctionName, x.FunctionName, x.Content)
]));
}
else if (x.Role == AgentRole.System || x.Role == AgentRole.Assistant)
{
messages.Add(new(x.Role == AgentRole.System ? ChatRole.System : ChatRole.Assistant, x.Content));
}
else if (x.Role == AgentRole.User)
{
List<AIContent> contents = [new TextContent(!string.IsNullOrWhiteSpace(x.Payload) ? x.Payload : x.Content)];
if (allowMultiModal)
{
foreach (var file in x.Files)
{
if (!string.IsNullOrEmpty(file.FileData))
{
contents.Add(new ImageContent(file.FileData));
}
else if (!string.IsNullOrEmpty(file.FileStorageUrl))
{
var contentType = FileUtility.GetFileContentType(file.FileStorageUrl);
var bytes = fileStorage!.GetFileBytes(file.FileStorageUrl);
contents.Add(new ImageContent(bytes, contentType));
}
else if (!string.IsNullOrEmpty(file.FileUrl))
{
contents.Add(new ImageContent(file.FileUrl));
}
}
}

messages.Add(new(ChatRole.User, contents) { AuthorName = x.FunctionName });
}
}

var completion = await _client.CompleteAsync(messages);

RoleDialogModel result = new(AgentRole.Assistant, string.Concat(completion.Message.Contents.OfType<TextContent>()))
{
CurrentAgentId = agent.Id
};

if (completion.Message.Contents.OfType<FunctionCallContent>().FirstOrDefault() is { } fcc)
{
result.Role = AgentRole.Function;
result.MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty;
result.FunctionName = fcc.Name;
result.FunctionArgs = fcc.Arguments is not null ? JsonSerializer.Serialize(fcc.Arguments) : null;
}

// After chat completion hook
await Task.WhenAll(hooks.Select(hook => hook.AfterGenerated(result, new() { Model = _model ?? "default" })));

return result;
}

/// <inheritdoc/>
public Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogModel> conversations, Func<RoleDialogModel, Task> onMessageReceived, Func<RoleDialogModel, Task> onFunctionExecuting) =>
throw new NotImplementedException();

/// <inheritdoc/>
public Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleDialogModel> conversations, Func<RoleDialogModel, Task> onMessageReceived) =>
throw new NotImplementedException();

private sealed class NopAIFunction(AIFunctionMetadata metadata) : AIFunction
{
public override AIFunctionMetadata Metadata { get; } = metadata;

protected override Task<object?> InvokeCoreAsync(IEnumerable<KeyValuePair<string, object?>> arguments, CancellationToken cancellationToken) =>
throw new NotSupportedException();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
using BotSharp.Abstraction.MLTasks;
using BotSharp.Abstraction.Plugins;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;

namespace BotSharp.Plugin.MicrosoftExtensionsAI;

/// <summary>
/// Use Microsoft.Extensions.AI as BotSharp plugin
/// </summary>
public sealed class MicrosoftExtensionsAIPlugin : IBotSharpPlugin
{
/// <inheritdoc/>
public string Id => "B7F2AB8D-1BBA-41CE-9642-2D5E6B5F86A0";

/// <inheritdoc/>
public string Name => "Microsoft.Extensions.AI";

/// <inheritdoc/>
public string Description => "Microsoft.Extensions.AI Service";

/// <inheritdoc/>
public void RegisterDI(IServiceCollection services, IConfiguration config)
{
services.AddScoped<ITextCompletion, MicrosoftExtensionsAITextCompletionProvider>();
services.AddScoped<IChatCompletion, MicrosoftExtensionsAIChatCompletionProvider>();
services.AddScoped<ITextEmbedding, MicrosoftExtensionsAITextEmbeddingProvider>();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
using BotSharp.Abstraction.Agents.Enums;
using BotSharp.Abstraction.Agents.Models;
using BotSharp.Abstraction.Conversations;
using BotSharp.Abstraction.Conversations.Models;
using BotSharp.Abstraction.Loggers;
using BotSharp.Abstraction.MLTasks;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using System;
using System.Linq;
using System.Threading.Tasks;

namespace BotSharp.Plugin.MicrosoftExtensionsAI;

/// <summary>
/// Provides an implementation of <see cref="ITextCompletion"/> for Microsoft.Extensions.AI.
/// </summary>
public sealed class MicrosoftExtensionsAITextCompletionProvider : ITextCompletion
{
private readonly IChatClient _chatClient;
private readonly IServiceProvider _services;
private readonly ITokenStatistics _tokenStatistics;
private string? _model = null;

/// <inheritdoc/>
public string Provider => "microsoft-extensions-ai";

/// <summary>
/// Creates an instance of the <see cref="MicrosoftExtensionsAITextCompletionProvider"/> class.
/// </summary>
public MicrosoftExtensionsAITextCompletionProvider(
IChatClient chatClient,
IServiceProvider services,
ITokenStatistics tokenStatistics)
{
_chatClient = chatClient;
_services = services;
_tokenStatistics = tokenStatistics;
}

/// <inheritdoc/>
public async Task<string> GetCompletion(string text, string agentId, string messageId)
{
var hooks = _services.GetServices<IContentGeneratingHook>().ToArray();

// Before chat completion hook
Agent agent = new() { Id = agentId };
RoleDialogModel userMessage = new(AgentRole.User, text) { MessageId = messageId };
await Task.WhenAll(hooks.Select(hook => hook.BeforeGenerating(agent, [userMessage])));

_tokenStatistics.StartTimer();
var completion = await _chatClient.CompleteAsync(text);
var result = string.Concat(completion.Message.Contents.OfType<TextContent>());
_tokenStatistics.StopTimer();

// After chat completion hook
await Task.WhenAll(hooks.Select(hook =>
hook.AfterGenerated(new(AgentRole.Assistant, result), new() { Model = _model ?? "default" })));

return result;
}

/// <inheritdoc/>
public void SetModelName(string model)
{
if (!string.IsNullOrWhiteSpace(model))
{
_model = model;
}
}
}
Loading