@@ -25,6 +25,9 @@ public unsafe partial class NativeApi
25
25
{
26
26
static NativeApi ( )
27
27
{
28
+ // Try to load a preferred library, based on CPU feature detection
29
+ TryLoadLibrary ( ) ;
30
+
28
31
try
29
32
{
30
33
llama_empty_call ( ) ;
@@ -35,11 +38,69 @@ static NativeApi()
35
38
"1. No LLamaSharp backend was installed. Please search LLamaSharp.Backend and install one of them. \n " +
36
39
"2. You are using a device with only CPU but installed cuda backend. Please install cpu backend instead. \n " +
37
40
"3. The backend is not compatible with your system cuda environment. Please check and fix it. If the environment is " +
38
- "expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.\n " +
41
+ "expected not to be changed, then consider build llama.cpp from source or submit an issue to LLamaSharp.\n " +
39
42
"4. One of the dependency of the native library is missed.\n " ) ;
40
43
}
41
- NativeApi . llama_backend_init ( false ) ;
44
+ llama_backend_init ( false ) ;
45
+ }
46
+
47
+ /// <summary>
48
+ /// Try to load libllama, using CPU feature detection to try and load a more specialised DLL if possible
49
+ /// </summary>
50
+ /// <returns>The library handle to unload later, or IntPtr.Zero if no library was loaded</returns>
51
+ private static IntPtr TryLoadLibrary ( )
52
+ {
53
+ #if NET6_0_OR_GREATER
54
+
55
+ if ( RuntimeInformation . IsOSPlatform ( OSPlatform . Windows ) )
56
+ {
57
+ // All of the Windows libraries, in order of preference
58
+ return TryLoad ( "cu12.1.0/libllama.dll" )
59
+ ?? TryLoad ( "cu11.7.1/libllama.dll" )
60
+ #if NET8_0_OR_GREATER
61
+ ?? TryLoad ( "avx512/libllama.dll" , System . Runtime . Intrinsics . X86 . Avx512 . IsSupported )
62
+ #endif
63
+ ?? TryLoad ( "avx2/libllama.dll" , System . Runtime . Intrinsics . X86 . Avx2 . IsSupported )
64
+ ?? TryLoad ( "avx/libllama.dll" , System . Runtime . Intrinsics . X86 . Avx . IsSupported )
65
+ ?? IntPtr . Zero ;
66
+ }
67
+
68
+ if ( RuntimeInformation . IsOSPlatform ( OSPlatform . Linux ) )
69
+ {
70
+ // All of the Linux libraries, in order of preference
71
+ return TryLoad ( "cu12.1.0/libllama.so" )
72
+ ?? TryLoad ( "cu11.7.1/libllama.so" )
73
+ #if NET8_0_OR_GREATER
74
+ ?? TryLoad ( "avx512/libllama.so" , System . Runtime . Intrinsics . X86 . Avx512 . IsSupported )
75
+ #endif
76
+ ?? TryLoad ( "avx2/libllama.so" , System . Runtime . Intrinsics . X86 . Avx2 . IsSupported )
77
+ ?? TryLoad ( "avx/libllama.so" , System . Runtime . Intrinsics . X86 . Avx . IsSupported )
78
+ ?? IntPtr . Zero ;
79
+ }
80
+
81
+ if ( RuntimeInformation . IsOSPlatform ( OSPlatform . OSX ) )
82
+ {
83
+ return IntPtr . Zero ;
84
+ }
85
+ #endif
86
+
87
+ return IntPtr . Zero ;
88
+
89
+ #if NET6_0_OR_GREATER
90
+ // Try to load a DLL from the path if supported. Returns null if nothing is loaded.
91
+ static IntPtr ? TryLoad ( string path , bool supported = true )
92
+ {
93
+ if ( ! supported )
94
+ return null ;
95
+
96
+ if ( NativeLibrary . TryLoad ( path , out var handle ) )
97
+ return handle ;
98
+
99
+ return null ;
100
+ }
101
+ #endif
42
102
}
103
+
43
104
private const string libraryName = "libllama" ;
44
105
45
106
/// <summary>
0 commit comments