File: cmake-Install-to-private-directories.patch

package info (click to toggle)
llama.cpp 6641%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 43,640 kB
  • sloc: cpp: 218,020; ansic: 117,624; python: 29,020; lisp: 9,094; sh: 5,776; objc: 1,045; javascript: 828; xml: 259; makefile: 219
file content (53 lines) | stat: -rw-r--r-- 2,107 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
From: Mathieu Baudier <mbaudier@argeo.org>
Date: Wed, 11 Jun 2025 18:30:05 +0200
Subject: cmake-Install-to-private-directories

Install libraries and public headers to a private directory as they are
not yet stable.
---
 CMakeLists.txt    | 10 +++++++---
 cmake/llama.pc.in |  4 ++--
 2 files changed, 9 insertions(+), 5 deletions(-)

Index: llama.cpp/CMakeLists.txt
===================================================================
--- llama.cpp.orig/CMakeLists.txt
+++ llama.cpp/CMakeLists.txt
@@ -223,8 +223,8 @@ endif()
 include(GNUInstallDirs)
 include(CMakePackageConfigHelpers)
 
-set(LLAMA_INCLUDE_INSTALL_DIR ${CMAKE_INSTALL_INCLUDEDIR} CACHE PATH "Location of header  files")
-set(LLAMA_LIB_INSTALL_DIR     ${CMAKE_INSTALL_LIBDIR}     CACHE PATH "Location of library files")
+set(LLAMA_INCLUDE_INSTALL_DIR ${CMAKE_INSTALL_INCLUDEDIR}/llama CACHE PATH "Location of header  files")
+set(LLAMA_LIB_INSTALL_DIR     ${CMAKE_INSTALL_LIBDIR}/llama     CACHE PATH "Location of library files")
 set(LLAMA_BIN_INSTALL_DIR     ${CMAKE_INSTALL_BINDIR}     CACHE PATH "Location of binary  files")
 
 set(LLAMA_PUBLIC_HEADERS
@@ -235,7 +235,11 @@ set_target_properties(llama
     PROPERTIES
         PUBLIC_HEADER "${LLAMA_PUBLIC_HEADERS}")
 
-install(TARGETS llama LIBRARY PUBLIC_HEADER)
+install(TARGETS llama
+    # On Debian, we install the library and headers to a private directory "llama"
+    LIBRARY DESTINATION ${LLAMA_LIB_INSTALL_DIR}
+    PUBLIC_HEADER DESTINATION ${LLAMA_INCLUDE_INSTALL_DIR}
+)
 
 configure_package_config_file(
         ${CMAKE_CURRENT_SOURCE_DIR}/cmake/llama-config.cmake.in
Index: llama.cpp/cmake/llama.pc.in
===================================================================
--- llama.cpp.orig/cmake/llama.pc.in
+++ llama.cpp/cmake/llama.pc.in
@@ -1,7 +1,7 @@
 prefix=@CMAKE_INSTALL_PREFIX@
 exec_prefix=@CMAKE_INSTALL_PREFIX@
-libdir=@CMAKE_INSTALL_FULL_LIBDIR@
-includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
+libdir=${exec_prefix}/@LLAMA_LIB_INSTALL_DIR@
+includedir=${prefix}/@LLAMA_INCLUDE_INSTALL_DIR@
 
 Name: llama
 Description: Port of Facebook's LLaMA model in C/C++