File tree Expand file tree Collapse file tree 2 files changed +6
-0
lines changed Expand file tree Collapse file tree 2 files changed +6
-0
lines changed Original file line number Diff line number Diff line change 3434 rocmGpuTargets ? builtins . concatStringsSep ";" rocmPackages . clr . gpuTargets ,
3535 enableCurl ? true ,
3636 useVulkan ? false ,
37+ useRpc ? false ,
3738 llamaVersion ? "0.0.0" , # Arbitrary version, substituted by the flake
3839
3940 # It's necessary to consistently use backendStdenv when building with CUDA support,
@@ -175,6 +176,7 @@ effectiveStdenv.mkDerivation (finalAttrs: {
175176 ( cmakeBool "GGML_METAL" useMetalKit )
176177 ( cmakeBool "GGML_VULKAN" useVulkan )
177178 ( cmakeBool "GGML_STATIC" enableStatic )
179+ ( cmakeBool "GGML_RPC" useRpc )
178180 ]
179181 ++ optionals useCuda [
180182 (
Original file line number Diff line number Diff line change @@ -2,3 +2,7 @@ set(TARGET rpc-server)
22add_executable (${TARGET} rpc-server.cpp)
33target_link_libraries (${TARGET} PRIVATE ggml)
44target_compile_features (${TARGET} PRIVATE cxx_std_17)
5+
6+ if (LLAMA_TOOLS_INSTALL)
7+ install (TARGETS ${TARGET} RUNTIME)
8+ endif ()
You can’t perform that action at this time.
0 commit comments