blob: 212af6e521d1db3d70525896b0df6002e1dc7248 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
|
# Maintainer: robertfoster
pkgname=llama.cpp-clblas-git
pkgver=b4082.r11.4047be74d
pkgrel=1
pkgdesc="Port of Facebook's LLaMA model in C/C++ (with OpenCL optimizations)"
arch=('armv7h' 'aarch64' 'x86_64')
url="https://github.com/ggerganov/llama.cpp"
license=("MIT")
depends=('clblast')
makedepends=(
'clblast'
'cmake'
'git'
)
conflicts=("llama.cpp")
provides=("llama.cpp")
source=("llama.cpp::git+${url}"
"kompute::git+https://github.com/nomic-ai/kompute.git"
"llama.cpp.conf"
"llama.cpp.service"
)
pkgver() {
cd "${srcdir}/llama.cpp"
printf "%s" "$(git describe --tags | sed 's/\([^-]*-\)g/r\1/;s/-/./g')"
}
prepare() {
cd "${srcdir}/llama.cpp"
git submodule init
git config submodule.kompute.url "${srcdir}/kompute"
git -c protocol.file.allow=always submodule update
}
build() {
local _cmake_args=(
-B build
-S .
-DCMAKE_INSTALL_PREFIX=/usr
-DCMAKE_BUILD_TYPE=Release
-DGGML_CLBLAST=ON
)
cd "${srcdir}/llama.cpp"
cmake "${_cmake_args[@]}"
cmake --build build
}
package() {
cd "${srcdir}/llama.cpp"
DESTDIR="${pkgdir}" cmake --install build
# systemd
install -D -m644 "${srcdir}/llama.cpp.conf" \
"${pkgdir}/etc/conf.d/llama.cpp"
install -D -m644 "${srcdir}/llama.cpp.service" \
-t "${pkgdir}/usr/lib/systemd/system"
rm "${pkgdir}/usr/include/"ggml*
}
sha256sums=('SKIP'
'SKIP'
'53fa70cfe40cb8a3ca432590e4f76561df0f129a31b121c9b4b34af0da7c4d87'
'1fc9d4f0cfa407404acc3859c26c53a79d14f5e5bc72f21084d87dde04e36f20')
|