aboutsummaryrefslogtreecommitdiff
path: root/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp
diff options
context:
space:
mode:
authorEthan Morgan <ethan@gweithio.com>2026-02-14 16:44:06 +0000
committerEthan Morgan <ethan@gweithio.com>2026-02-14 16:44:06 +0000
commit54409423f767d8b1cf30cb7d0efca6b4ca138823 (patch)
treed915ac7828703ce4b963efdd9728a1777ba18c1e /vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp
move to own git serverHEADmaster
Diffstat (limited to 'vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp')
-rw-r--r--vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/portfile.cmake9
-rw-r--r--vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/CMakeLists.txt24
-rw-r--r--vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/main.cxx15
-rw-r--r--vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/vcpkg.json17
4 files changed, 65 insertions, 0 deletions
diff --git a/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/portfile.cmake b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/portfile.cmake
new file mode 100644
index 0000000..0122f05
--- /dev/null
+++ b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/portfile.cmake
@@ -0,0 +1,9 @@
+set(VCPKG_POLICY_EMPTY_PACKAGE enabled)
+
+vcpkg_find_acquire_program(PKGCONFIG)
+set(ENV{PKG_CONFIG} "${PKGCONFIG}")
+
+vcpkg_cmake_configure(
+ SOURCE_PATH "${CURRENT_PORT_DIR}/project"
+)
+vcpkg_cmake_build()
diff --git a/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/CMakeLists.txt b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/CMakeLists.txt
new file mode 100644
index 0000000..a3aa216
--- /dev/null
+++ b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/CMakeLists.txt
@@ -0,0 +1,24 @@
+cmake_minimum_required(VERSION 3.16)
+project(llama-cpp-test)
+
+find_package(llama CONFIG REQUIRED)
+
+add_executable(test-cmake main.cxx)
+add_library(imported::llama ALIAS llama)
+target_link_libraries(test-cmake PRIVATE imported::llama)
+
+find_package(PkgConfig REQUIRED)
+pkg_check_modules(llama-cpp llama REQUIRED IMPORTED_TARGET)
+
+add_executable(test-pkconfig main.cxx)
+target_link_libraries(test-pkconfig PRIVATE PkgConfig::llama-cpp)
+
+# Verify that ggml::ggml-vulkan can be used with apps
+# which instantiate VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE.
+if(TARGET ggml::ggml-vulkan)
+ find_package(Vulkan REQUIRED)
+ target_link_libraries(test-cmake PRIVATE Vulkan::Vulkan)
+ target_compile_definitions(test-cmake PRIVATE VULKAN_HPP_DISPATCH_LOADER_DYNAMIC=1)
+ target_link_libraries(test-pkconfig PRIVATE Vulkan::Vulkan)
+ target_compile_definitions(test-pkconfig PRIVATE VULKAN_HPP_DISPATCH_LOADER_DYNAMIC=1)
+endif()
diff --git a/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/main.cxx b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/main.cxx
new file mode 100644
index 0000000..f407142
--- /dev/null
+++ b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/project/main.cxx
@@ -0,0 +1,15 @@
+#include <llama.h>
+
+// Verify that ggml::ggml-vulkan can be used with apps which
+// instantiate VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE.
+#if defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC) && VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#include <vulkan/vulkan.hpp>
+VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
+#endif
+
+int main()
+{
+ auto context_params = llama_context_default_params();
+ ggml_backend_load_all();
+ return 0;
+}
diff --git a/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/vcpkg.json b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/vcpkg.json
new file mode 100644
index 0000000..4a46322
--- /dev/null
+++ b/vcpkg/scripts/test_ports/vcpkg-ci-llama-cpp/vcpkg.json
@@ -0,0 +1,17 @@
+{
+ "name": "vcpkg-ci-llama-cpp",
+ "version-string": "ci",
+ "description": "Port to validate llama-cpp",
+ "homepage": "https://github.com/microsoft/vcpkg",
+ "license": "MIT",
+ "dependencies": [
+ {
+ "name": "llama-cpp",
+ "default-features": false
+ },
+ {
+ "name": "vcpkg-cmake",
+ "host": true
+ }
+ ]
+}