From 8ef96f23b9239b81f59cf28bf98f0b824920fb96 Mon Sep 17 00:00:00 2001 From: Wu Zhenyu Date: Thu, 20 Jul 2023 06:26:48 +0800 Subject: [PATCH] new package: llama-cpp Fix #17453 --- packages/llama-cpp/build.sh | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 packages/llama-cpp/build.sh diff --git a/packages/llama-cpp/build.sh b/packages/llama-cpp/build.sh new file mode 100644 index 000000000000000..6914154ca67f1e4 --- /dev/null +++ b/packages/llama-cpp/build.sh @@ -0,0 +1,22 @@ +TERMUX_PKG_HOMEPAGE=https://github.com/ggerganov/llama.cpp +TERMUX_PKG_DESCRIPTION="Port of Facebook's LLaMA model in C/C++" +TERMUX_PKG_LICENSE=GPL-3.0 +TERMUX_PKG_MAINTAINER=@termux +TERMUX_PKG_VERSION=294f424 +TERMUX_PKG_SRCURL=$TERMUX_PKG_HOMEPAGE/archive/master-$TERMUX_PKG_VERSION.tar.gz +TERMUX_PKG_SHA256=95effaa75fdf1e7fb4819500f3aa6a9c970dbe36392a51a4ead904660841cd93 +TERMUX_PKG_AUTO_UPDATE=true +TERMUX_PKG_DEPENDS="openmpi, libopenblas" +TERMUX_PKG_RECOMMENDS="python-numpy, python-sentencepiece" +TERMUX_PKG_EXTRA_CONFIGURE_ARGS=" +-DLLAMA_MPI=ON +-DBUILD_SHARED_LIBS=ON +-DLLAMA_BLAS=ON +-DLLAMA_BLAS_VENDOR=OpenBLAS +" + +termux_step_post_make_install() { + cd "$TERMUX_PREFIX/bin" || exit 1 + mv main llama + mv server llama-server +}