diff --git a/docs/architecture/logging.md b/docs/architecture/logging.md new file mode 100644 index 000000000..f788789d4 --- /dev/null +++ b/docs/architecture/logging.md @@ -0,0 +1,24 @@ +# Logging + +## Setting +To initialize the logging feature, set up the following flags in `Conversation`. Each flag can display or record specific content during conversation. + +* `ShowVerboseLog`: print conversation details or prompt in console. +* `EnableLlmCompletionLog`: log LLM completion results, e.g., real-time prompt sent to LLM and response generated from LLm. +* `EnableExecutionLog`: log details after events, e.g., receiving message, executing function, generating response, etc. + + +```json +"Conversation": { + "ShowVerboseLog": false, + "EnableLlmCompletionLog": false, + "EnableExecutionLog": true +} +``` + +### Usage +To enable the logging functionality, add the following line of code in `Program.cs`. + +```csharp +builder.Services.AddBotSharpLogger(builder.Configuration); +``` diff --git a/docs/index.rst b/docs/index.rst index 8b3d08f6d..94f8d08dd 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -102,6 +102,7 @@ The main documentation for the site is organized into the following sections: architecture/plugin architecture/hooks architecture/routing + architecture/logging architecture/data-persistence If you feel that this project is helpful to you, please Star us on the project, we will be very grateful. diff --git a/src/Infrastructure/BotSharp.Abstraction/Loggers/IVerboseLogHook.cs b/src/Infrastructure/BotSharp.Abstraction/Loggers/IVerboseLogHook.cs deleted file mode 100644 index 9ebb5c664..000000000 --- a/src/Infrastructure/BotSharp.Abstraction/Loggers/IVerboseLogHook.cs +++ /dev/null @@ -1,6 +0,0 @@ -namespace BotSharp.Abstraction.Loggers; - -public interface IVerboseLogHook -{ - void GenerateLog(string text); -} diff --git a/src/Infrastructure/BotSharp.Logger/BotSharpLoggerExtensions.cs b/src/Infrastructure/BotSharp.Logger/BotSharpLoggerExtensions.cs index 06c9eb4c3..4e98fd7f6 100644 --- a/src/Infrastructure/BotSharp.Logger/BotSharpLoggerExtensions.cs +++ b/src/Infrastructure/BotSharp.Logger/BotSharpLoggerExtensions.cs @@ -6,7 +6,7 @@ public static IServiceCollection AddBotSharpLogger(this IServiceCollection servi { services.AddScoped(); services.AddScoped(); - services.AddScoped(); + services.AddScoped(); return services; } } diff --git a/src/Infrastructure/BotSharp.Logger/Hooks/CommonContentGeneratingHook.cs b/src/Infrastructure/BotSharp.Logger/Hooks/CommonContentGeneratingHook.cs index 748dfce1c..7a9a0d994 100644 --- a/src/Infrastructure/BotSharp.Logger/Hooks/CommonContentGeneratingHook.cs +++ b/src/Infrastructure/BotSharp.Logger/Hooks/CommonContentGeneratingHook.cs @@ -7,10 +7,6 @@ public CommonContentGeneratingHook(IServiceProvider services) _services = services; } - /// - /// After content generated. - /// - /// public async Task AfterGenerated(RoleDialogModel message, TokenStatsModel tokenStats) { SaveLlmCompletionLog(message, tokenStats); diff --git a/src/Infrastructure/BotSharp.Logger/Hooks/VerboseLogHook.cs b/src/Infrastructure/BotSharp.Logger/Hooks/VerboseLogHook.cs index 4ba21f8ef..f93532d80 100644 --- a/src/Infrastructure/BotSharp.Logger/Hooks/VerboseLogHook.cs +++ b/src/Infrastructure/BotSharp.Logger/Hooks/VerboseLogHook.cs @@ -1,20 +1,45 @@ +using BotSharp.Abstraction.Agents; +using BotSharp.Abstraction.Agents.Enums; + namespace BotSharp.Logger.Hooks; -public class VerboseLogHook : IVerboseLogHook +public class VerboseLogHook : IContentGeneratingHook { private readonly ConversationSetting _convSettings; private readonly ILogger _logger; + private readonly IServiceProvider _services; - public VerboseLogHook(ConversationSetting convSettings, ILogger logger) + public VerboseLogHook( + ConversationSetting convSettings, + IServiceProvider serivces, + ILogger logger) { _convSettings = convSettings; + _services = serivces; _logger = logger; } - public void GenerateLog(string text) + public async Task BeforeGenerating(Agent agent, List conversations) + { + if (!_convSettings.ShowVerboseLog) return; + + var dialog = conversations.Last(); + var log = $"{dialog.Role}: {dialog.Content}"; + _logger.LogInformation(log); + } + + public async Task AfterGenerated(RoleDialogModel message, TokenStatsModel tokenStats) { if (!_convSettings.ShowVerboseLog) return; - _logger.LogInformation(text); + var agentService = _services.GetRequiredService(); + var agent = await agentService.LoadAgent(message.CurrentAgentId); + + var log = message.Role == AgentRole.Function ? + $"[{agent.Name}]: {message.FunctionName}({message.FunctionArgs})" : + $"[{agent.Name}]: {message.Content}"; + + _logger.LogInformation(tokenStats.Prompt); + _logger.LogInformation(log); } } diff --git a/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/ChatCompletionProvider.cs b/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/ChatCompletionProvider.cs index 1f6bb5fb0..0a5a18c22 100644 --- a/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/ChatCompletionProvider.cs +++ b/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/ChatCompletionProvider.cs @@ -4,7 +4,6 @@ using BotSharp.Abstraction.Agents.Models; using BotSharp.Abstraction.Conversations; using BotSharp.Abstraction.Conversations.Models; -using BotSharp.Abstraction.Conversations.Settings; using BotSharp.Abstraction.Loggers; using BotSharp.Abstraction.MLTasks; using BotSharp.Plugin.AzureOpenAI.Settings; @@ -39,7 +38,6 @@ public ChatCompletionProvider(AzureOpenAiSettings settings, public RoleDialogModel GetChatCompletions(Agent agent, List conversations) { var contentHooks = _services.GetServices().ToList(); - var logHook = _services.GetService(); // Before chat completion hook Task.WaitAll(contentHooks.Select(hook => @@ -75,11 +73,6 @@ public RoleDialogModel GetChatCompletions(Agent agent, List con } } - var log = responseMessage.Role == AgentRole.Function ? - $"[{agent.Name}]: {responseMessage.FunctionName}({responseMessage.FunctionArgs})" : - $"[{agent.Name}]: {responseMessage.Content}"; - logHook?.GenerateLog(log); - // After chat completion hook Task.WaitAll(contentHooks.Select(hook => hook.AfterGenerated(responseMessage, new TokenStatsModel @@ -192,7 +185,6 @@ public async Task GetChatCompletionsStreamingAsync(Agent agent, List conversations) { var agentService = _services.GetRequiredService(); - var logHook = _services.GetService(); var chatCompletionsOptions = new ChatCompletionsOptions(); @@ -248,7 +240,6 @@ public async Task GetChatCompletionsStreamingAsync(Agent agent, List GetCompletion(string text, string agentId, string messageId) { var contentHooks = _services.GetServices().ToList(); - var logHook = _services.GetService(); // Before chat completion hook var agent = new Agent() @@ -63,7 +61,6 @@ public async Task GetCompletion(string text, string agentId, string mess MaxTokens = 256, }; completionsOptions.StopSequences.Add($"{AgentRole.Assistant}:"); - logHook?.GenerateLog(text); var state = _services.GetRequiredService(); var temperature = float.Parse(state.GetState("temperature", "0.5")); @@ -80,8 +77,6 @@ public async Task GetCompletion(string text, string agentId, string mess completion += t.Text; }; - logHook?.GenerateLog(completion); - // After chat completion hook var responseMessage = new RoleDialogModel(AgentRole.Assistant, completion) {