Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 8d75016

Browse files
committed
Install required runtime dlls to package directory on windows
1 parent acf18fc commit 8d75016

File tree

3 files changed

+12
-3
lines changed

3 files changed

+12
-3
lines changed

CMakeLists.txt

+10-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
cmake_minimum_required(VERSION 3.4...3.22)
1+
cmake_minimum_required(VERSION 3.21)
22

33
project(llama_cpp)
44

@@ -33,4 +33,13 @@ if (LLAMA_BUILD)
3333
FRAMEWORK DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
3434
RESOURCE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
3535
)
36+
# Workaround for Windows + CUDA https://github.com/abetlen/llama-cpp-python/issues/563
37+
install(
38+
FILES $<TARGET_RUNTIME_DLLS:llama>
39+
DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
40+
)
41+
install(
42+
FILES $<TARGET_RUNTIME_DLLS:llama>
43+
DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
44+
)
3645
endif()

llama_cpp/llama_cpp.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def _load_shared_library(lib_base_name: str):
5858
if "CUDA_PATH" in os.environ:
5959
os.add_dll_directory(os.path.join(os.environ["CUDA_PATH"], "bin"))
6060
os.add_dll_directory(os.path.join(os.environ["CUDA_PATH"], "lib"))
61-
cdll_args["winmode"] = 0
61+
cdll_args["winmode"] = ctypes.RTLD_GLOBAL
6262

6363
# Try to load the shared library, handling potential errors
6464
for _lib_path in _lib_paths:

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ all = [
5454
[tool.scikit-build]
5555
wheel.packages = ["llama_cpp"]
5656
cmake.verbose = true
57-
cmake.minimum-version = "3.12"
57+
cmake.minimum-version = "3.21"
5858
minimum-version = "0.5"
5959
sdist.exclude = [".git", "vendor/llama.cpp/.git"]
6060

0 commit comments

Comments
 (0)