Skip to content

Commit c6f64a6

Browse files
authored
Merge branch 'SciSharp:master' into master
2 parents 6197ac5 + 9d44878 commit c6f64a6

File tree

5 files changed

+122
-11
lines changed

5 files changed

+122
-11
lines changed
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
using System.Text.Json;
2+
3+
namespace BotSharp.Abstraction.Functions.Models;
4+
5+
/// <summary>
6+
/// This class defines the LLM response output if function call needed
7+
/// </summary>
8+
public class FunctionCallingResponse
9+
{
10+
[JsonPropertyName("role")]
11+
public string Role { get; set; } = AgentRole.Assistant;
12+
13+
[JsonPropertyName("content")]
14+
public string? Content { get; set; }
15+
16+
[JsonPropertyName("function_name")]
17+
public string? FunctionName { get; set; }
18+
19+
[JsonPropertyName("args")]
20+
public JsonDocument? Args { get; set; }
21+
}

src/Infrastructure/BotSharp.Abstraction/Functions/Models/FunctionDef.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,10 @@ public class FunctionDef
44
{
55
public string Name { get; set; }
66
public string Description { get; set; }
7+
8+
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
79
public string? Impact { get; set; }
10+
811
public FunctionParametersDef Parameters { get; set; } = new FunctionParametersDef();
912

1013
public override string ToString()

src/Plugins/BotSharp.Plugin.GoogleAI/Providers/ChatCompletionProvider.cs

Lines changed: 88 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
using BotSharp.Abstraction.Agents;
22
using BotSharp.Abstraction.Agents.Enums;
3-
using BotSharp.Abstraction.Conversations;
43
using BotSharp.Abstraction.Loggers;
4+
using BotSharp.Abstraction.Functions.Models;
5+
using BotSharp.Abstraction.Routing;
56
using BotSharp.Plugin.GoogleAI.Settings;
67
using LLMSharp.Google.Palm;
78
using Microsoft.Extensions.Logging;
9+
using LLMSharp.Google.Palm.DiscussService;
810

911
namespace BotSharp.Plugin.GoogleAI.Providers;
1012

@@ -34,29 +36,105 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
3436
hook.BeforeGenerating(agent, conversations)).ToArray());
3537

3638
var client = new GooglePalmClient(apiKey: _settings.PaLM.ApiKey);
37-
var messages = conversations.Select(c => new PalmChatMessage(c.Content, c.Role == AgentRole.User ? "user" : "AI"))
38-
.ToList();
3939

40-
var agentService = _services.GetRequiredService<IAgentService>();
41-
var instruction = agentService.RenderedInstruction(agent);
42-
var response = client.ChatAsync(messages, instruction, null).Result;
40+
var (prompt, messages, hasFunctions) = PrepareOptions(agent, conversations);
4341

44-
var message = response.Candidates.First();
45-
var msg = new RoleDialogModel(AgentRole.Assistant, message.Content)
42+
RoleDialogModel msg;
43+
44+
if (hasFunctions)
45+
{
46+
// use text completion
47+
// var response = client.GenerateTextAsync(prompt, null).Result;
48+
var response = client.ChatAsync(new PalmChatCompletionRequest
49+
{
50+
Context = prompt,
51+
Messages = messages,
52+
Temperature = 0.1f
53+
}).Result;
54+
55+
var message = response.Candidates.First();
56+
57+
// check if returns function calling
58+
var llmResponse = message.Content.JsonContent<FunctionCallingResponse>();
59+
60+
msg = new RoleDialogModel(llmResponse.Role, llmResponse.Content)
61+
{
62+
CurrentAgentId = agent.Id,
63+
FunctionName = llmResponse.FunctionName,
64+
FunctionArgs = JsonSerializer.Serialize(llmResponse.Args)
65+
};
66+
}
67+
else
4668
{
47-
CurrentAgentId = agent.Id
48-
};
69+
var response = client.ChatAsync(messages, context: prompt, examples: null, options: null).Result;
70+
71+
var message = response.Candidates.First();
72+
73+
// check if returns function calling
74+
var llmResponse = message.Content.JsonContent<FunctionCallingResponse>();
75+
76+
msg = new RoleDialogModel(llmResponse.Role, llmResponse.Content ?? message.Content)
77+
{
78+
CurrentAgentId = agent.Id
79+
};
80+
}
4981

5082
// After chat completion hook
5183
Task.WaitAll(hooks.Select(hook =>
5284
hook.AfterGenerated(msg, new TokenStatsModel
5385
{
86+
Prompt = prompt,
5487
Model = _model
5588
})).ToArray());
5689

5790
return msg;
5891
}
5992

93+
private (string, List<PalmChatMessage>, bool) PrepareOptions(Agent agent, List<RoleDialogModel> conversations)
94+
{
95+
var prompt = "";
96+
97+
var agentService = _services.GetRequiredService<IAgentService>();
98+
99+
if (!string.IsNullOrEmpty(agent.Instruction))
100+
{
101+
prompt += agentService.RenderedInstruction(agent);
102+
}
103+
104+
var routing = _services.GetRequiredService<IRoutingService>();
105+
var router = routing.Router;
106+
107+
var messages = conversations.Select(c => new PalmChatMessage(c.Content, c.Role == AgentRole.User ? "user" : "AI"))
108+
.ToList();
109+
110+
if (agent.Functions != null && agent.Functions.Count > 0)
111+
{
112+
prompt += "\r\n\r\n[Functions] defined in JSON Schema:\r\n";
113+
prompt += JsonSerializer.Serialize(agent.Functions, new JsonSerializerOptions
114+
{
115+
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
116+
WriteIndented = true
117+
});
118+
119+
prompt += "\r\n\r\n[Conversations]\r\n";
120+
foreach (var dialog in conversations)
121+
{
122+
prompt += dialog.Role == AgentRole.Function ?
123+
$"{dialog.Role}: {dialog.FunctionName} => {dialog.Content}\r\n" :
124+
$"{dialog.Role}: {dialog.Content}\r\n";
125+
}
126+
127+
prompt += "\r\n\r\n" + router.Templates.FirstOrDefault(x => x.Name == "response_with_function").Content;
128+
129+
return (prompt, new List<PalmChatMessage>
130+
{
131+
new PalmChatMessage("Which function should be used for the next step based on latest user or function response, output your response in JSON:", AgentRole.User),
132+
}, true);
133+
}
134+
135+
return (prompt, messages, false);
136+
}
137+
60138
public Task<bool> GetChatCompletionsAsync(Agent agent, List<RoleDialogModel> conversations, Func<RoleDialogModel, Task> onMessageReceived, Func<RoleDialogModel, Task> onFunctionExecuting)
61139
{
62140
throw new NotImplementedException();
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
What is the next step based on the CONVERSATION?
2-
Response must be in appropriate JSON format.
2+
Response must be in required JSON format without any other contents.
33
Route to the Agent that last handled the conversation if necessary.
44
If user wants to speak to customer service, use function human_intervention_needed.
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
[Output Requirements]
2+
1. Read the [Functions] definition, you can utilize the function to retrieve data or execute actions.
3+
2. Think step by step, check if specific function will provider data to help complete user request based on the conversation.
4+
3. If you need to call a function to decide how to response user,
5+
response in format: {"role": "function", "reason":"why choose this function", "function_name": "", "args": {}},
6+
otherwise response in format: {"role": "assistant", "reason":"why response to user", "content":"next step question"}.
7+
4. If the conversation already contains the function execution result, don't need to call it again.
8+
5. If user mentioned some specific requirment, don't ask this question in your response.
9+
6. Don't repeat the same question in your response.

0 commit comments

Comments
 (0)