Skip to content

Commit bfe9cc8

Browse files
authored
Merge pull request #78 from SciSharp/rinne-dev
feat: update the llama backends.
2 parents 5e15077 + 1b15d28 commit bfe9cc8

File tree

12 files changed

+23
-12
lines changed

12 files changed

+23
-12
lines changed

.github/workflows/main.yml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,20 +12,20 @@ jobs:
1212
strategy:
1313
fail-fast: false
1414
matrix:
15-
build: [linux-debug, linux-release, macos-debug, macos-release, windows-debug, windows-release]
15+
build: [linux-debug, linux-release, windows-debug, windows-release]
1616
include:
1717
- build: linux-debug
1818
os: ubuntu-latest
1919
config: debug
2020
- build: linux-release
2121
os: ubuntu-latest
2222
config: release
23-
- build: macos-debug
24-
os: macos-latest
25-
config: debug
26-
- build: macos-release
27-
os: macos-latest
28-
config: release
23+
# - build: macos-debug
24+
# os: macos-latest
25+
# config: debug
26+
# - build: macos-release
27+
# os: macos-latest
28+
# config: release
2929
- build: windows-debug
3030
os: windows-2019
3131
config: debug

LLama/Common/FixedSizeQueue.cs

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,11 @@ public FixedSizeQueue(int size)
3030
/// <param name="data"></param>
3131
public FixedSizeQueue(int size, IEnumerable<T> data)
3232
{
33-
#if NETCOREAPP3_0_OR_GREATER
34-
// Try an early check on the amount of data supplied (if possible)
33+
#if NETSTANDARD2_0
34+
var dataCount = data.Count();
35+
if (data.Count() > size)
36+
throw new ArgumentException($"The max size set for the quene is {size}, but got {dataCount} initial values.");
37+
#else
3538
if (data.TryGetNonEnumeratedCount(out var count) && count > size)
3639
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");
3740
#endif
@@ -42,9 +45,12 @@ public FixedSizeQueue(int size, IEnumerable<T> data)
4245

4346
// Now check if that list is a valid size
4447
if (_storage.Count > _maxSize)
45-
throw new ArgumentException($"The max size set for the quene is {size}, but got {_storage.Count} initial values.");
48+
#if NETSTANDARD2_0
49+
throw new ArgumentException($"The max size set for the quene is {size}, but got {dataCount} initial values.");
50+
#else
51+
throw new ArgumentException($"The max size set for the quene is {size}, but got {count} initial values.");
52+
#endif
4653
}
47-
4854
/// <summary>
4955
/// Replace every item in the queue with the given value
5056
/// </summary>

LLama/LLamaSharp.Runtime.targets

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,10 @@
3131
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
3232
<Link>libllama.dylib</Link>
3333
</None>
34+
<None Include="$(MSBuildThisFileDirectory)runtimes/libllama-metal.dylib">
35+
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
36+
<Link>libllama-metal.dylib</Link>
37+
</None>
3438
<None Include="$(MSBuildThisFileDirectory)runtimes/ggml-metal.metal">
3539
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
3640
<Link>ggml-metal.metal</Link>

LLama/Native/NativeApi.cs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@ static NativeApi()
2121
"1. No LLamaSharp backend was installed. Please search LLamaSharp.Backend and install one of them. \n" +
2222
"2. You are using a device with only CPU but installed cuda backend. Please install cpu backend instead. \n" +
2323
"3. The backend is not compatible with your system cuda environment. Please check and fix it. If the environment is " +
24-
"expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.");
24+
"expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.\n" +
25+
"4. One of the dependency of the native library is missed.\n");
2526
}
2627
NativeApi.llama_backend_init(false);
2728
}

LLama/runtimes/libllama-cuda11.dll

2.59 MB
Binary file not shown.

LLama/runtimes/libllama-cuda11.so

2.92 MB
Binary file not shown.

LLama/runtimes/libllama-cuda12.dll

2.88 MB
Binary file not shown.

LLama/runtimes/libllama-cuda12.so

3.36 MB
Binary file not shown.
531 KB
Binary file not shown.

LLama/runtimes/libllama.dll

4.5 KB
Binary file not shown.

0 commit comments

Comments
 (0)