Skip to content

Commit 3875611

Browse files
authored
Merge pull request #957 from m0nsky/split-nuspec
Split platform-specific binaries for NuGet backends
2 parents 6862216 + 0d4b328 commit 3875611

9 files changed

+204
-64
lines changed
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda11.Linux</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda11.Linux</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Cuda11.Linux contains the Linux binaries for LLamaSharp with Cuda11 support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda11.props" />
19+
<file src="runtimes/deps/cu11.7.1/libllava_shared.so" target="runtimes/linux-x64/native/cuda11/libllava_shared.so" />
20+
<file src="runtimes/deps/cu11.7.1/libggml.so" target="runtimes/linux-x64/native/cuda11/libggml.so" />
21+
<file src="runtimes/deps/cu11.7.1/libllama.so" target="runtimes/linux-x64/native/cuda11/libllama.so" />
22+
23+
<file src="icon512.png" target="icon512.png" />
24+
</files>
25+
</package>
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda11.Windows</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda11.Windows</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Cuda11.Windows contains the Windows binaries for LLamaSharp with Cuda11 support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda11.props" />
19+
<file src="runtimes/deps/cu11.7.1/llava_shared.dll" target="runtimes\win-x64\native\cuda11\llava_shared.dll" />
20+
<file src="runtimes/deps/cu11.7.1/ggml.dll" target="runtimes\win-x64\native\cuda11\ggml.dll" />
21+
<file src="runtimes/deps/cu11.7.1/llama.dll" target="runtimes\win-x64\native\cuda11\llama.dll" />
22+
23+
<file src="icon512.png" target="icon512.png" />
24+
</files>
25+
</package>
Lines changed: 22 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,27 @@
11
<?xml version="1.0" encoding="utf-8"?>
22
<package >
3-
<metadata>
4-
<id>LLamaSharp.Backend.Cuda11</id>
5-
<version>$version$</version>
6-
<title>LLamaSharp.Backend.Cuda11, the backend for LLamaSharp</title>
7-
<authors>llama.cpp Authors</authors>
8-
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9-
<license type="expression">MIT</license>
10-
<icon>icon512.png</icon>
11-
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
12-
<description>LLamaSharp.Backend.Cuda11 is a backend for LLamaSharp to use with Cuda11.</description>
13-
<releaseNotes></releaseNotes>
14-
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
15-
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
16-
</metadata>
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda11</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda11, the backend for LLamaSharp</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<icon>icon512.png</icon>
11+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
12+
<description>LLamaSharp.Backend.Cuda11 is a backend for LLamaSharp to use with Cuda11.</description>
13+
<releaseNotes></releaseNotes>
14+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
15+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
1716

18-
<files>
19-
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda11.props" />
17+
<!-- Dependencies on platform-specific packages -->
18+
<dependencies>
19+
<dependency id="LLamaSharp.Backend.Cuda11.Windows" version="$version$" />
20+
<dependency id="LLamaSharp.Backend.Cuda11.Linux" version="$version$" />
21+
</dependencies>
22+
</metadata>
2023

21-
<file src="runtimes/deps/cu11.7.1/libllava_shared.so" target="runtimes\linux-x64\native\cuda11\libllava_shared.so" />
22-
<file src="runtimes/deps/cu11.7.1/llava_shared.dll" target="runtimes\win-x64\native\cuda11\llava_shared.dll" />
23-
24-
<file src="runtimes/deps/cu11.7.1/ggml.dll" target="runtimes\win-x64\native\cuda11\ggml.dll" />
25-
<file src="runtimes/deps/cu11.7.1/llama.dll" target="runtimes\win-x64\native\cuda11\llama.dll" />
26-
<file src="runtimes/deps/cu11.7.1/libggml.so" target="runtimes\linux-x64\native\cuda11\libggml.so" />
27-
<file src="runtimes/deps/cu11.7.1/libllama.so" target="runtimes\linux-x64\native\cuda11\libllama.so" />
28-
29-
<file src="icon512.png" target="icon512.png" />
30-
</files>
24+
<files>
25+
<file src="icon512.png" target="icon512.png" />
26+
</files>
3127
</package>
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda12.Linux</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda12.Linux</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Cuda12.Linux contains the Linux binaries for LLamaSharp with Cuda12 support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda12.props" />
19+
20+
<file src="runtimes/deps/cu12.2.0/libllava_shared.so" target="runtimes/linux-x64/native/cuda12/libllava_shared.so" />
21+
<file src="runtimes/deps/cu12.2.0/libggml.so" target="runtimes/linux-x64/native/cuda12/libggml.so" />
22+
<file src="runtimes/deps/cu12.2.0/libllama.so" target="runtimes/linux-x64/native/cuda12/libllama.so" />
23+
24+
<file src="icon512.png" target="icon512.png" />
25+
</files>
26+
</package>
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda12.Windows</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda12.Windows</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Cuda12.Windows contains the Windows binaries for LLamaSharp with Cuda12 support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda12.props" />
19+
20+
<file src="runtimes/deps/cu12.2.0/llava_shared.dll" target="runtimes\win-x64\native\cuda12\llava_shared.dll" />
21+
<file src="runtimes/deps/cu12.2.0/ggml.dll" target="runtimes\win-x64\native\cuda12\ggml.dll" />
22+
<file src="runtimes/deps/cu12.2.0/llama.dll" target="runtimes\win-x64\native\cuda12\llama.dll" />
23+
24+
<file src="icon512.png" target="icon512.png" />
25+
</files>
26+
</package>
Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,28 @@
11
<?xml version="1.0" encoding="utf-8"?>
22
<package >
3-
<metadata>
4-
<id>LLamaSharp.Backend.Cuda12</id>
5-
<version>$version$</version>
6-
<title>LLamaSharp.Backend.Cuda12, the backend for LLamaSharp</title>
7-
<authors>llama.cpp Authors</authors>
8-
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9-
<license type="expression">MIT</license>
10-
<icon>icon512.png</icon>
11-
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
12-
<description>LLamaSharp.Backend.Cuda12 is a backend for LLamaSharp to use with Cuda12.</description>
13-
<releaseNotes></releaseNotes>
14-
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
15-
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
16-
</metadata>
3+
<metadata>
4+
<id>LLamaSharp.Backend.Cuda12</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Cuda12, the backend for LLamaSharp</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<icon>icon512.png</icon>
11+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
12+
<description>LLamaSharp.Backend.Cuda12 is a backend for LLamaSharp to use with Cuda12.</description>
13+
<releaseNotes></releaseNotes>
14+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
15+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
1716

18-
<files>
19-
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda12.props" />
17+
<!-- Dependencies on platform-specific packages -->
18+
<dependencies>
19+
<dependency id="LLamaSharp.Backend.Cuda12.Windows" version="$version$" />
20+
<dependency id="LLamaSharp.Backend.Cuda12.Linux" version="$version$" />
21+
</dependencies>
22+
</metadata>
2023

21-
<file src="runtimes/deps/cu12.2.0/libllava_shared.so" target="runtimes\linux-x64\native\cuda12\libllava_shared.so" />
22-
<file src="runtimes/deps/cu12.2.0/llava_shared.dll" target="runtimes\win-x64\native\cuda12\llava_shared.dll" />
23-
24-
<file src="runtimes/deps/cu12.2.0/ggml.dll" target="runtimes\win-x64\native\cuda12\ggml.dll" />
25-
<file src="runtimes/deps/cu12.2.0/llama.dll" target="runtimes\win-x64\native\cuda12\llama.dll" />
26-
<file src="runtimes/deps/cu12.2.0/libggml.so" target="runtimes\linux-x64\native\cuda12\libggml.so" />
27-
<file src="runtimes/deps/cu12.2.0/libllama.so" target="runtimes\linux-x64\native\cuda12\libllama.so" />
28-
29-
<file src="icon512.png" target="icon512.png" />
30-
</files>
24+
<files>
25+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Cuda12.props" />
26+
<file src="icon512.png" target="icon512.png" />
27+
</files>
3128
</package>
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Vulkan.Linux</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Vulkan.Linux</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Vulkan.Linux contains the Linux binaries for LLamaSharp with Vulkan support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Vulkan.props" />
19+
<file src="runtimes/deps/vulkan/libllava_shared.so" target="runtimes/linux-x64/native/vulkan/libllava_shared.so" />
20+
<file src="runtimes/deps/vulkan/libggml.so" target="runtimes/linux-x64/native/vulkan/libggml.so" />
21+
<file src="runtimes/deps/vulkan/libllama.so" target="runtimes/linux-x64/native/vulkan/libllama.so" />
22+
23+
<file src="icon512.png" target="icon512.png" />
24+
</files>
25+
</package>
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
<?xml version="1.0" encoding="utf-8"?>
2+
<package >
3+
<metadata>
4+
<id>LLamaSharp.Backend.Vulkan.Windows</id>
5+
<version>$version$</version>
6+
<title>LLamaSharp.Backend.Vulkan.Windows</title>
7+
<authors>llama.cpp Authors</authors>
8+
<requireLicenseAcceptance>false</requireLicenseAcceptance>
9+
<license type="expression">MIT</license>
10+
<projectUrl>https://github.com/SciSharp/LLamaSharp</projectUrl>
11+
<description>LLamaSharp.Backend.Vulkan.Windows contains the Windows binaries for LLamaSharp with Vulkan support.</description>
12+
<releaseNotes></releaseNotes>
13+
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
14+
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
15+
</metadata>
16+
17+
<files>
18+
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Vulkan.props" />
19+
<file src="runtimes/deps/vulkan/llava_shared.dll" target="runtimes\win-x64\native\vulkan\llava_shared.dll" />
20+
<file src="runtimes/deps/vulkan/ggml.dll" target="runtimes\win-x64\native\vulkan\ggml.dll" />
21+
<file src="runtimes/deps/vulkan/llama.dll" target="runtimes\win-x64\native\vulkan\llama.dll" />
22+
<file src="icon512.png" target="icon512.png" />
23+
</files>
24+
</package>

LLama/runtimes/build/LLamaSharp.Backend.Vulkan.nuspec

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -13,19 +13,15 @@
1313
<releaseNotes></releaseNotes>
1414
<copyright>Copyright 2023 The llama.cpp Authors. All rights reserved.</copyright>
1515
<tags>LLamaSharp LLama LLM GPT AI ChatBot SciSharp</tags>
16+
17+
<!-- Dependencies on platform-specific packages -->
18+
<dependencies>
19+
<dependency id="LLamaSharp.Backend.Vulkan.Windows" version="$version$" />
20+
<dependency id="LLamaSharp.Backend.Vulkan.Linux" version="$version$" />
21+
</dependencies>
1622
</metadata>
17-
23+
1824
<files>
19-
<file src="LLamaSharpBackend.props" target="build/netstandard2.0/LLamaSharp.Backend.Vulkan.props" />
20-
21-
<file src="runtimes/deps/vulkan/libllava_shared.so" target="runtimes\linux-x64\native\vulkan\libllava_shared.so" />
22-
<file src="runtimes/deps/vulkan/llava_shared.dll" target="runtimes\win-x64\native\vulkan\llava_shared.dll" />
23-
24-
<file src="runtimes/deps/vulkan/ggml.dll" target="runtimes\win-x64\native\vulkan\ggml.dll" />
25-
<file src="runtimes/deps/vulkan/llama.dll" target="runtimes\win-x64\native\vulkan\llama.dll" />
26-
<file src="runtimes/deps/vulkan/libggml.so" target="runtimes\linux-x64\native\vulkan\libggml.so" />
27-
<file src="runtimes/deps/vulkan/libllama.so" target="runtimes\linux-x64\native\vulkan\libllama.so" />
28-
2925
<file src="icon512.png" target="icon512.png" />
3026
</files>
31-
</package>
27+
</package>

0 commit comments

Comments
 (0)