Skip to content

Commit c933a71

Browse files
authored
Merge pull request #226 from xbotter/kernel-memory
feat: adapts to SK Kernel Memory.
2 parents 98147d0 + c52e99a commit c933a71

File tree

10 files changed

+311
-9
lines changed

10 files changed

+311
-9
lines changed
182 KB
Binary file not shown.

LLama.Examples/LLama.Examples.csproj

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
</ItemGroup>
3434

3535
<ItemGroup>
36+
<ProjectReference Include="..\LLama.KernelMemory\LLamaSharp.KernelMemory.csproj" />
3637
<ProjectReference Include="..\LLama.SemanticKernel\LLamaSharp.SemanticKernel.csproj" />
3738
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
3839
</ItemGroup>
@@ -62,6 +63,9 @@
6263
<None Update="Assets\reason-act.txt">
6364
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
6465
</None>
66+
<None Update="Assets\sample-SK-Readme.pdf">
67+
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
68+
</None>
6569
</ItemGroup>
6670

6771
</Project>
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Linq;
4+
using System.Text;
5+
using System.Threading.Tasks;
6+
using LLamaSharp.KernelMemory;
7+
using Microsoft.KernelMemory;
8+
using Microsoft.KernelMemory.Handlers;
9+
10+
namespace LLama.Examples.NewVersion
11+
{
12+
public class KernelMemory
13+
{
14+
public static async Task Run()
15+
{
16+
Console.WriteLine("Example from: https://github.com/microsoft/kernel-memory/blob/main/examples/101-using-core-nuget/Program.cs");
17+
Console.Write("Please input your model path: ");
18+
var modelPath = Console.ReadLine();
19+
var memory = new KernelMemoryBuilder()
20+
.WithLLamaSharpDefaults(new LLamaSharpConfig(modelPath))
21+
.With(new TextPartitioningOptions
22+
{
23+
MaxTokensPerParagraph = 300,
24+
MaxTokensPerLine = 100,
25+
OverlappingTokens = 30
26+
})
27+
.BuildServerlessClient();
28+
29+
await memory.ImportDocumentAsync(@"./Assets/sample-SK-Readme.pdf", steps: Constants.PipelineWithoutSummary);
30+
31+
var question = "What's Semantic Kernel?";
32+
33+
Console.WriteLine($"\n\nQuestion: {question}");
34+
35+
var answer = await memory.AskAsync(question);
36+
37+
Console.WriteLine($"\nAnswer: {answer.Result}");
38+
39+
Console.WriteLine("\n\n Sources:\n");
40+
41+
foreach (var x in answer.RelevantSources)
42+
{
43+
Console.WriteLine($" - {x.SourceName} - {x.Link} [{x.Partitions.First().LastUpdate:D}]");
44+
}
45+
}
46+
}
47+
}

LLama.Examples/NewVersion/TestRunner.cs

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ public static async Task Run()
2323
Console.WriteLine("13: Semantic Kernel Memory.");
2424
Console.WriteLine("14: Coding Assistant.");
2525
Console.WriteLine("15: Batch Decoding.");
26+
Console.WriteLine("16: SK Kernel Memory.");
2627

2728
while (true)
2829
{
@@ -37,31 +38,31 @@ public static async Task Run()
3738
{
3839
await ChatSessionStripRoleName.Run();
3940
}
40-
else if(choice == 2)
41+
else if (choice == 2)
4142
{
4243
await InteractiveModeExecute.Run();
4344
}
44-
else if(choice == 3)
45+
else if (choice == 3)
4546
{
4647
await InstructModeExecute.Run();
4748
}
48-
else if(choice == 4)
49+
else if (choice == 4)
4950
{
5051
await StatelessModeExecute.Run();
5152
}
52-
else if(choice == 5)
53+
else if (choice == 5)
5354
{
5455
await SaveAndLoadSession.Run();
5556
}
56-
else if(choice == 6)
57+
else if (choice == 6)
5758
{
5859
await LoadAndSaveState.Run();
5960
}
60-
else if(choice == 7)
61+
else if (choice == 7)
6162
{
6263
GetEmbeddings.Run();
6364
}
64-
else if(choice == 8)
65+
else if (choice == 8)
6566
{
6667
QuantizeModel.Run();
6768
}
@@ -85,14 +86,18 @@ public static async Task Run()
8586
{
8687
await SemanticKernelMemory.Run();
8788
}
88-
else if(choice == 14)
89+
else if (choice == 14)
8990
{
9091
await CodingAssistant.Run();
9192
}
9293
else if (choice == 15)
9394
{
9495
await BatchedDecoding.Run();
9596
}
97+
else if (choice == 16)
98+
{
99+
await KernelMemory.Run();
100+
}
96101
else
97102
{
98103
Console.WriteLine("Cannot parse your choice. Please select again.");
@@ -103,5 +108,5 @@ public static async Task Run()
103108
}
104109
}
105110

106-
111+
107112
}
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
using Microsoft.KernelMemory;
2+
using System;
3+
using System.Collections.Generic;
4+
using System.Linq;
5+
using System.Text;
6+
using System.Threading.Tasks;
7+
8+
namespace LLamaSharp.KernelMemory
9+
{
10+
/// <summary>
11+
/// Provides extension methods for the KernelMemoryBuilder class.
12+
/// </summary>
13+
public static class BuilderExtensions
14+
{
15+
/// <summary>
16+
/// Adds LLamaSharpTextEmbeddingGeneration to the KernelMemoryBuilder.
17+
/// </summary>
18+
/// <param name="builder">The KernelMemoryBuilder instance.</param>
19+
/// <param name="config">The LLamaSharpConfig instance.</param>
20+
/// <returns>The KernelMemoryBuilder instance with LLamaSharpTextEmbeddingGeneration added.</returns>
21+
public static KernelMemoryBuilder WithLLamaSharpTextEmbeddingGeneration(this KernelMemoryBuilder builder, LLamaSharpConfig config)
22+
{
23+
builder.WithCustomEmbeddingGeneration(new LLamaSharpTextEmbeddingGeneration(config));
24+
return builder;
25+
}
26+
27+
/// <summary>
28+
/// Adds LLamaSharpTextGeneration to the KernelMemoryBuilder.
29+
/// </summary>
30+
/// <param name="builder">The KernelMemoryBuilder instance.</param>
31+
/// <param name="config">The LLamaSharpConfig instance.</param>
32+
/// <returns>The KernelMemoryBuilder instance with LLamaSharpTextGeneration added.</returns>
33+
public static KernelMemoryBuilder WithLLamaSharpTextGeneration(this KernelMemoryBuilder builder, LLamaSharpConfig config)
34+
{
35+
builder.WithCustomTextGeneration(new LlamaSharpTextGeneration(config));
36+
return builder;
37+
}
38+
39+
/// <summary>
40+
/// Adds LLamaSharpTextEmbeddingGeneration and LLamaSharpTextGeneration to the KernelMemoryBuilder.
41+
/// </summary>
42+
/// <param name="builder">The KernelMemoryBuilder instance.</param>
43+
/// <param name="config">The LLamaSharpConfig instance.</param>
44+
/// <returns>The KernelMemoryBuilder instance with LLamaSharpTextEmbeddingGeneration and LLamaSharpTextGeneration added.</returns>
45+
public static KernelMemoryBuilder WithLLamaSharpDefaults(this KernelMemoryBuilder builder, LLamaSharpConfig config)
46+
{
47+
builder.WithLLamaSharpTextEmbeddingGeneration(config);
48+
builder.WithLLamaSharpTextGeneration(config);
49+
return builder;
50+
}
51+
}
52+
}
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
3+
<PropertyGroup>
4+
<TargetFramework>net6.0</TargetFramework>
5+
<ImplicitUsings>enable</ImplicitUsings>
6+
<Nullable>enable</Nullable>
7+
</PropertyGroup>
8+
9+
<ItemGroup>
10+
<PackageReference Include="Microsoft.KernelMemory.Core" Version="0.5.231030.1-preview" />
11+
</ItemGroup>
12+
13+
<ItemGroup>
14+
<ProjectReference Include="..\LLama\LLamaSharp.csproj" />
15+
</ItemGroup>
16+
17+
</Project>
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
using LLama;
2+
using LLama.Common;
3+
using Microsoft.SemanticKernel.AI.Embeddings;
4+
using System;
5+
using System.Collections.Generic;
6+
using System.Linq;
7+
using System.Text;
8+
using System.Threading.Tasks;
9+
10+
namespace LLamaSharp.KernelMemory
11+
{
12+
/// <summary>
13+
/// Provides text embedding generation for LLamaSharp.
14+
/// </summary>
15+
public class LLamaSharpTextEmbeddingGeneration : ITextEmbeddingGeneration, IDisposable
16+
{
17+
private readonly LLamaSharpConfig _config;
18+
private readonly LLamaEmbedder _embedder;
19+
private readonly LLamaWeights _weights;
20+
21+
/// <summary>
22+
/// Initializes a new instance of the <see cref="LLamaSharpTextEmbeddingGeneration"/> class.
23+
/// </summary>
24+
/// <param name="config">The configuration for LLamaSharp.</param>
25+
public LLamaSharpTextEmbeddingGeneration(LLamaSharpConfig config)
26+
{
27+
this._config = config;
28+
var @params = new ModelParams(_config.ModelPath);
29+
_weights = LLamaWeights.LoadFromFile(@params);
30+
_embedder = new LLamaEmbedder(_weights, @params);
31+
}
32+
33+
/// <inheritdoc/>
34+
public void Dispose()
35+
{
36+
_embedder.Dispose();
37+
_weights.Dispose();
38+
}
39+
40+
/// <inheritdoc/>
41+
public Task<IList<ReadOnlyMemory<float>>> GenerateEmbeddingsAsync(IList<string> data, CancellationToken cancellationToken = default)
42+
{
43+
IList<ReadOnlyMemory<float>> results = new List<ReadOnlyMemory<float>>();
44+
45+
foreach (var d in data)
46+
{
47+
var embeddings = _embedder.GetEmbeddings(d);
48+
results.Add(new ReadOnlyMemory<float>(embeddings));
49+
}
50+
51+
return Task.FromResult(results);
52+
}
53+
}
54+
}
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Linq;
4+
using System.Text;
5+
using System.Threading.Tasks;
6+
7+
namespace LLamaSharp.KernelMemory
8+
{
9+
/// <summary>
10+
/// Represents the configuration for LLamaSharp.
11+
/// </summary>
12+
public class LLamaSharpConfig
13+
{
14+
/// <summary>
15+
/// Initializes a new instance of the <see cref="LLamaSharpConfig"/> class.
16+
/// </summary>
17+
/// <param name="modelPath">The path to the model file.</param>
18+
public LLamaSharpConfig(string modelPath)
19+
{
20+
ModelPath = modelPath;
21+
}
22+
23+
/// <summary>
24+
/// Gets or sets the path to the model file.
25+
/// </summary>
26+
public string ModelPath { get; set; }
27+
28+
/// <summary>
29+
/// Gets or sets the size of the context.
30+
/// </summary>
31+
public uint? ContextSize { get; set; }
32+
33+
/// <summary>
34+
/// Gets or sets the seed value.
35+
/// </summary>
36+
public uint? Seed { get; set; }
37+
38+
/// <summary>
39+
/// Gets or sets the number of GPU layers.
40+
/// </summary>
41+
public int? GpuLayerCount { get; set; }
42+
}
43+
}
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
using LLama;
2+
using LLama.Common;
3+
using Microsoft.KernelMemory.AI;
4+
using System;
5+
using System.Collections.Generic;
6+
using System.Linq;
7+
using System.Text;
8+
using System.Threading.Tasks;
9+
10+
namespace LLamaSharp.KernelMemory
11+
{
12+
/// <summary>
13+
/// Provides text generation for LLamaSharp.
14+
/// </summary>
15+
public class LlamaSharpTextGeneration : ITextGeneration, IDisposable
16+
{
17+
private readonly LLamaSharpConfig _config;
18+
private readonly LLamaWeights _weights;
19+
private readonly StatelessExecutor _executor;
20+
private readonly LLamaContext _context;
21+
22+
/// <summary>
23+
/// Initializes a new instance of the <see cref="LlamaSharpTextGeneration"/> class.
24+
/// </summary>
25+
/// <param name="config">The configuration for LLamaSharp.</param>
26+
public LlamaSharpTextGeneration(LLamaSharpConfig config)
27+
{
28+
this._config = config;
29+
var parameters = new ModelParams(config.ModelPath)
30+
{
31+
ContextSize = config?.ContextSize ?? 2048,
32+
Seed = config?.Seed ?? 0,
33+
GpuLayerCount = config?.GpuLayerCount ?? 20
34+
};
35+
_weights = LLamaWeights.LoadFromFile(parameters);
36+
_context = _weights.CreateContext(parameters);
37+
_executor = new StatelessExecutor(_weights, parameters);
38+
}
39+
40+
/// <inheritdoc/>
41+
public void Dispose()
42+
{
43+
_context.Dispose();
44+
_weights.Dispose();
45+
}
46+
47+
/// <inheritdoc/>
48+
public IAsyncEnumerable<string> GenerateTextAsync(string prompt, TextGenerationOptions options, CancellationToken cancellationToken = default)
49+
{
50+
return _executor.InferAsync(prompt, OptionsToParams(options), cancellationToken: cancellationToken);
51+
}
52+
53+
private static InferenceParams OptionsToParams(TextGenerationOptions options)
54+
{
55+
return new InferenceParams()
56+
{
57+
AntiPrompts = options.StopSequences.ToList().AsReadOnly(),
58+
Temperature = (float)options.Temperature,
59+
MaxTokens = options.MaxTokens ?? 1024,
60+
FrequencyPenalty = (float)options.FrequencyPenalty,
61+
PresencePenalty = (float)options.PresencePenalty,
62+
TopP = (float)options.TopP,
63+
};
64+
}
65+
}
66+
}

0 commit comments

Comments
 (0)