package/llama-cpp: bump to version b8117
Release notes: https://github.com/ggml-org/llama.cpp/releases
Merge BR2_PACKAGE_LLAMA_CPP_SERVER into BR2_PACKAGE_LLAMA_CPP_TOOLS, as
both of these options must be enabled to build tools like llama-cli and
llama-server. See upstream commit [1].
Since the Buildroot option BR2_PACKAGE_LLAMA_CPP_SERVER is removed, this
commit also removes it from support/testing/tests/package/test_aichat.py
which was using it.
[1] a180ba78c7
Signed-off-by: Joseph Kogut <joseph.kogut@gmail.com>
[Julien:
- reindent options in .mk
- remove BR2_PACKAGE_LLAMA_CPP_SERVER in test_aichat.py
]
Signed-off-by: Julien Olivain <ju.o@free.fr>
This commit is contained in:
committed by
Julien Olivain
parent
5e78c6cf33
commit
05c36d5d87
@@ -146,6 +146,15 @@ endif
|
||||
|
||||
comment "Legacy options removed in 2026.05"
|
||||
|
||||
config BR2_PACKAGE_LLAMA_CPP_SERVER
|
||||
bool "llama-cpp option removed"
|
||||
select BR2_PACKAGE_LLAMA_CPP_TOOLS
|
||||
select BR2_LEGACY
|
||||
help
|
||||
Several tools (server, cli) now require building both
|
||||
tools and server, so enable both with the same config
|
||||
BR2_PACKAGE_LLAMA_CPP_TOOLS
|
||||
|
||||
config BR2_PACKAGE_PYTHON3_OSSAUDIODEV
|
||||
bool "python3 ossaudiodev module removed"
|
||||
select BR2_LEGACY
|
||||
|
||||
@@ -25,12 +25,8 @@ if BR2_PACKAGE_LLAMA_CPP
|
||||
config BR2_PACKAGE_LLAMA_CPP_TOOLS
|
||||
bool "Enable tools"
|
||||
help
|
||||
Build CLI tools like llama-cli, llama-bench, etc.
|
||||
|
||||
config BR2_PACKAGE_LLAMA_CPP_SERVER
|
||||
bool "Enable server"
|
||||
help
|
||||
Build OpenAI API-compatible web server, llama-server.
|
||||
Build CLI tools like llama-cli, llama-server, llama-bench,
|
||||
etc.
|
||||
|
||||
config BR2_PACKAGE_LLAMA_CPP_VULKAN
|
||||
bool "Vulkan support"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Locally calculated
|
||||
sha256 b2c4c89b6282c4ddc7c9c00c79af77d84811b6092aa47838a194e8a2981fa104 b7271.tar.gz
|
||||
sha256 4f9fdf018339783722936d537573d40cca596262e42714f597b1c93299113cea b8117.tar.gz
|
||||
# License
|
||||
sha256 e562a2ddfaf8280537795ac5ecd34e3012b6582a147ef69ba6a6a5c08c84757d LICENSE
|
||||
sha256 94f29bbed6a22c35b992c5c6ebf0e7c92f13b836b90f36f461c9cf2f0f1d010d LICENSE
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
#
|
||||
################################################################################
|
||||
|
||||
LLAMA_CPP_VERSION = b7271
|
||||
LLAMA_CPP_VERSION = b8117
|
||||
LLAMA_CPP_SOURCE = $(LLAMA_CPP_VERSION).tar.gz
|
||||
LLAMA_CPP_SITE = https://github.com/ggml-org/llama.cpp/archive/refs/tags
|
||||
LLAMA_CPP_LICENSE = MIT
|
||||
@@ -42,15 +42,13 @@ LLAMA_CPP_CONF_OPTS += -DLLAMA_CURL=OFF
|
||||
endif
|
||||
|
||||
ifeq ($(BR2_PACKAGE_LLAMA_CPP_TOOLS),y)
|
||||
LLAMA_CPP_CONF_OPTS += -DLLAMA_BUILD_TOOLS=ON
|
||||
LLAMA_CPP_CONF_OPTS += \
|
||||
-DLLAMA_BUILD_SERVER=ON \
|
||||
-DLLAMA_BUILD_TOOLS=ON
|
||||
else
|
||||
LLAMA_CPP_CONF_OPTS += -DLLAMA_BUILD_TOOLS=OFF
|
||||
endif
|
||||
|
||||
ifeq ($(BR2_PACKAGE_LLAMA_CPP_SERVER),y)
|
||||
LLAMA_CPP_CONF_OPTS += -DLLAMA_BUILD_SERVER=ON
|
||||
else
|
||||
LLAMA_CPP_CONF_OPTS += -DLLAMA_BUILD_SERVER=OFF
|
||||
LLAMA_CPP_CONF_OPTS += \
|
||||
-DLLAMA_BUILD_SERVER=OFF \
|
||||
-DLLAMA_BUILD_TOOLS=OFF
|
||||
endif
|
||||
|
||||
ifeq ($(BR2_PACKAGE_LLAMA_CPP_VULKAN),y)
|
||||
|
||||
@@ -23,7 +23,6 @@ class TestAiChat(infra.basetest.BRTest):
|
||||
BR2_PACKAGE_LIBCURL=y
|
||||
BR2_PACKAGE_LIBCURL_CURL=y
|
||||
BR2_PACKAGE_LLAMA_CPP=y
|
||||
BR2_PACKAGE_LLAMA_CPP_SERVER=y
|
||||
BR2_PACKAGE_LLAMA_CPP_TOOLS=y
|
||||
BR2_PACKAGE_OPENSSL=y
|
||||
BR2_ROOTFS_OVERLAY="{rootfs_overlay}"
|
||||
|
||||
Reference in New Issue
Block a user