مهدي شينون (Mehdi Chinoune) d2fa75e98a llama.cpp: update to b6646
2025-10-01 07:34:21 +01:00

72 lines
2.0 KiB
Bash

_realname=llama.cpp
pkgbase=mingw-w64-${_realname}
pkgname=("${MINGW_PACKAGE_PREFIX}-${_realname}")
epoch=1
pkgver=b6646
pkgrel=1
pkgdesc="Library and tools for running inference with Meta's LLaMA model (and derivatives) in C/C++ (mingw-w64)"
arch=('any')
mingw_arch=('ucrt64' 'clang64' 'clangarm64' 'mingw64')
url="https://github.com/ggml-org/llama.cpp"
license=('spdx:MIT')
depends=(
"${MINGW_PACKAGE_PREFIX}-curl"
"${MINGW_PACKAGE_PREFIX}-cc-libs"
"${MINGW_PACKAGE_PREFIX}-ggml"
"${MINGW_PACKAGE_PREFIX}-openssl"
)
makedepends=(
"${MINGW_PACKAGE_PREFIX}-cc"
"${MINGW_PACKAGE_PREFIX}-cmake"
"${MINGW_PACKAGE_PREFIX}-ninja"
)
source=("https://github.com/ggml-org/llama.cpp/archive/${pkgver}/${_realname}-${pkgver}.tar.gz")
sha256sums=('f480793817199febfc10a800115a7f23c7c8f1a07aabd3b3375d8a87bf223554')
build() {
mkdir -p "${srcdir}/build-${MSYSTEM}" && cd "${srcdir}/build-${MSYSTEM}"
declare -a extra_config
if check_option "debug" "n"; then
extra_config+=("-DCMAKE_BUILD_TYPE=Release")
else
extra_config+=("-DCMAKE_BUILD_TYPE=Debug")
fi
CXXFLAGS+=" -D_WIN32_WINNT=0x0A00" \
MSYS2_ARG_CONV_EXCL="-DCMAKE_INSTALL_PREFIX=" \
cmake \
-GNinja \
-DCMAKE_INSTALL_PREFIX="${MINGW_PREFIX}" \
"${extra_config[@]}" \
-DBUILD_SHARED_LIBS=ON \
-DLLAMA_BUILD_EXAMPLES=ON \
-DLLAMA_ALL_WARNINGS=OFF \
-DLLAMA_BUILD_TESTS=OFF \
-DLLAMA_CURL=ON \
-DLLAMA_OPENSSL=ON \
-DLLAMA_USE_SYSTEM_GGML=ON \
../${_realname}-${pkgver}
cmake --build .
}
package() {
cd "${srcdir}/build-${MSYSTEM}"
DESTDIR="${pkgdir}" cmake --install .
# Rename output files by prefixing them with llama (if not already prefixed and if not DLL)
for i in "${pkgdir}"${MINGW_PREFIX}/bin/*.{exe,py}; do
filename=$(basename "$i")
if [[ ! ("$filename" =~ ^llama) ]]; then
mv "${i}" "${pkgdir}${MINGW_PREFIX}/bin/llama-${filename}"
fi
done
install -Dm644 "${srcdir}"/${_realname}-${pkgver}/LICENSE \
"${pkgdir}"${MINGW_PREFIX}/share/licenses/${_realname}/LICENSE
}