Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
namespace BotSharp.Abstraction.Agents.Constants;

public static class LlmConstant
{
public const int DEFAULT_MAX_OUTPUT_TOKEN = 1024;
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,16 @@ public class AgentLlmConfig
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Model { get; set; }

/// <summary>
/// Max recursion depth
/// </summary>
[JsonPropertyName("max_recursion_depth")]
public int MaxRecursionDepth { get; set; } = 3;

/// <summary>
/// Max output token
/// </summary>
[JsonPropertyName("max_output_tokens")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? MaxOutputTokens { get; set; }
}
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public override string ToString()
public static (DateTime, DateTime) BuildTimeInterval(DateTime recordTime, StatsInterval interval)
{
DateTime startTime = recordTime;
DateTime endTime = DateTime.UtcNow;
DateTime endTime = startTime;

switch (interval)
{
Expand All @@ -70,6 +70,7 @@ public static (DateTime, DateTime) BuildTimeInterval(DateTime recordTime, StatsI
break;
}

endTime = endTime.AddSeconds(-1);
startTime = DateTime.SpecifyKind(startTime, DateTimeKind.Utc);
endTime = DateTime.SpecifyKind(endTime, DateTimeKind.Utc);
return (startTime, endTime);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public Agent ToAgent()
MaxMessageCount = MaxMessageCount,
Profiles = Profiles,
Labels = Labels,
LlmConfig = LlmConfig,
LlmConfig = LlmConfig ?? new(),
KnowledgeBases = KnowledgeBases,
Rules = Rules,
RoutingRules = RoutingRules?.Select(x => RoutingRuleUpdateModel.ToDomainElement(x))?.ToList() ?? [],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ public Agent ToAgent()
Utilities = Utilities ?? [],
KnowledgeBases = KnowledgeBases ?? [],
Rules = Rules ?? [],
LlmConfig = LlmConfig
LlmConfig = LlmConfig ?? new()
};

return agent;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,12 +170,14 @@ public Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleDialogM

var state = _services.GetRequiredService<IConversationStateService>();
var temperature = decimal.Parse(state.GetState("temperature", "0.0"));
var maxToken = int.Parse(state.GetState("max_tokens", "512"));
var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;

var parameters = new MessageParameters()
{
Messages = messages,
MaxTokens = maxToken,
MaxTokens = maxTokens,
Model = settings.Name,
Stream = false,
Temperature = temperature,
Expand Down
1 change: 1 addition & 0 deletions src/Plugins/BotSharp.Plugin.AnthropicAI/Using.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
global using Anthropic.SDK.Constants;
global using Anthropic.SDK.Messaging;
global using BotSharp.Abstraction.Agents;
global using BotSharp.Abstraction.Agents.Constants;
global using BotSharp.Abstraction.Agents.Enums;
global using BotSharp.Abstraction.Agents.Models;
global using BotSharp.Abstraction.Conversations.Models;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,10 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
var messages = new List<ChatMessage>();

var temperature = float.Parse(state.GetState("temperature", "0.0"));
var maxTokens = int.Parse(state.GetState("max_tokens", "1024"));
var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;

var options = new ChatCompletionOptions()
{
Temperature = temperature,
Expand Down
1 change: 1 addition & 0 deletions src/Plugins/BotSharp.Plugin.AzureOpenAI/Using.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
global using System.Threading.Tasks;
global using Microsoft.Extensions.DependencyInjection;
global using Microsoft.Extensions.Logging;
global using BotSharp.Abstraction.Agents.Constants;
global using BotSharp.Abstraction.Agents.Enums;
global using BotSharp.Abstraction.Agents.Models;
global using BotSharp.Abstraction.Conversations;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,9 @@ public void SetModelName(string model)
var messages = new List<ChatMessage>();

var temperature = float.Parse(state.GetState("temperature", "0.0"));
var maxTokens = int.Parse(state.GetState("max_tokens", "1024"));
var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;
var options = new ChatCompletionOptions()
{
Temperature = temperature,
Expand Down
1 change: 1 addition & 0 deletions src/Plugins/BotSharp.Plugin.DeepSeekAI/Using.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
global using BotSharp.Abstraction.Agents.Models;
global using BotSharp.Abstraction.MLTasks;
global using BotSharp.Abstraction.Agents;
global using BotSharp.Abstraction.Agents.Constants;
global using BotSharp.Abstraction.Agents.Enums;
global using BotSharp.Abstraction.Conversations;
global using BotSharp.Abstraction.Loggers;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using BotSharp.Abstraction.Agents;
using BotSharp.Abstraction.Agents.Enums;
using BotSharp.Abstraction.Conversations;
using BotSharp.Abstraction.Loggers;
using Google.Protobuf.WellKnownTypes;
using Microsoft.Extensions.Logging;
Expand Down Expand Up @@ -188,10 +189,20 @@ public void SetModelName(string model)
}
}

var state = _services.GetRequiredService<IConversationStateService>();
var temperature = float.Parse(state.GetState("temperature", "0.0"));
var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;
var request = new GenerateContentRequest
{
Contents = contents,
Tools = tools
Tools = tools,
GenerationConfig = new()
{
Temperature = temperature,
MaxOutputTokens = maxTokens
}
};

var prompt = GetPrompt(systemPrompts, funcPrompts, convPrompts);
Expand Down
1 change: 1 addition & 0 deletions src/Plugins/BotSharp.Plugin.GoogleAI/Using.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
global using System.Linq;
global using System.Text.Json;
global using BotSharp.Abstraction.Conversations.Models;
global using BotSharp.Abstraction.Agents.Constants;
global using BotSharp.Abstraction.Agents.Models;
global using BotSharp.Abstraction.MLTasks;
global using Microsoft.Extensions.Configuration;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ public class AgentLlmConfigMongoElement
public string? Model { get; set; }
public bool IsInherit { get; set; }
public int MaxRecursionDepth { get; set; }
public int? MaxOutputTokens { get; set; }

public static AgentLlmConfigMongoElement? ToMongoElement(AgentLlmConfig? config)
{
Expand All @@ -19,6 +20,7 @@ public class AgentLlmConfigMongoElement
Model = config.Model,
IsInherit = config.IsInherit,
MaxRecursionDepth = config.MaxRecursionDepth,
MaxOutputTokens = config.MaxOutputTokens,
};
}

Expand All @@ -32,6 +34,7 @@ public class AgentLlmConfigMongoElement
Model = config.Model,
IsInherit = config.IsInherit,
MaxRecursionDepth = config.MaxRecursionDepth,
MaxOutputTokens = config.MaxOutputTokens,
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,9 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
var messages = new List<ChatMessage>();

var temperature = float.Parse(state.GetState("temperature", "0.0"));
var maxTokens = int.Parse(state.GetState("max_tokens", "1024"));
var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;
var options = new ChatCompletionOptions()
{
Temperature = temperature,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ public async Task<RealtimeSession> CreateSession(Agent agent, List<RoleDialogMod
var messages = new List<ChatMessage>();

var temperature = float.Parse(state.GetState("temperature", "0.0"));
var maxTokens = int.Parse(state.GetState("max_tokens", "1024"));
var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens)
? tokens
: agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN;
var options = new ChatCompletionOptions()
{
ToolChoice = ChatToolChoice.CreateAutoChoice(),
Expand Down
1 change: 1 addition & 0 deletions src/Plugins/BotSharp.Plugin.OpenAI/Using.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
global using Microsoft.Extensions.DependencyInjection;
global using Microsoft.Extensions.Logging;
global using BotSharp.Abstraction.Agents.Enums;
global using BotSharp.Abstraction.Agents.Constants;
global using BotSharp.Abstraction.Agents.Models;
global using BotSharp.Abstraction.Conversations;
global using BotSharp.Abstraction.Conversations.Models;
Expand Down