Added Ollama
This commit is contained in:
3
app-misc/ollama-bin/Manifest
Normal file
3
app-misc/ollama-bin/Manifest
Normal file
@@ -0,0 +1,3 @@
|
||||
DIST ollama-linux-amd64-rocm.tar.zst 989754577 BLAKE2B 8e36db769cc542f5099f8d7581e4a34f245218984b87c1831a08c88983d37ae8043a54100cb147f336759af02a14a6cec10b0b3167f45cb8049a23cdac60769d SHA512 d81bfad0bb2e555c60f2c50796ddb36ead646497729da2d5c9e936a8d57adcd5a2be324d2491fa10f692403ab243d25666145437d1ae8a31592fc3535fbee003
|
||||
DIST ollama-linux-amd64.tar.zst 2093502493 BLAKE2B 13f5ab45c176297b9fe5cf44dcfa5489cd87bbad2d78812cddc915697532012afa0fa98000eb93099df17630498342c7306d764874139a1e1544f7f3262c995a SHA512 c8c75d131d2f5fa9fb8ffa3350bf8fb324b8bf09ce82fd1b05093da19493f2435e4abd6483b855147030ebe371d879b86d009417aac614f60282725c13315a00
|
||||
DIST ollama-linux-arm64.tar.zst 1323814678 BLAKE2B ae25d8ae598e19bb5d40da888119b4a0c6a9247244f57308062004f823ae59c7f29fdd7493d76caac028d619bdfbbb12a4f736b74a92a991d247300d5f6b91b2 SHA512 ea7e93927472f29f6e8e371b17dbced42b91fe75b95950aef01b69f93e19f79f0f475c305b95e4ce87aeeff85f6472d26a8314980e32b5fa283e564b871c3b0c
|
||||
30
app-misc/ollama-bin/files/ollama
Normal file
30
app-misc/ollama-bin/files/ollama
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/sbin/openrc-run
|
||||
|
||||
description="Ollama Embedded AI Service"
|
||||
command="/usr/bin/ollama"
|
||||
command_args="serve"
|
||||
command_user="ollama"
|
||||
command_group="ollama"
|
||||
command_background="yes"
|
||||
pidfile="/run/ollama.pid"
|
||||
log="/var/log/ollama/ollama.log"
|
||||
|
||||
# Ollama allows cross-origin requests from 127.0.0.1 and 0.0.0.0 by default.
|
||||
# Additional origins can be configured with OLLAMA_ORIGINS.
|
||||
# export OLLAMA_ORIGINS="<ip>"
|
||||
|
||||
start() {
|
||||
ebegin "Starting $description"
|
||||
exec >> >(logger -t "$RC_SVCNAME Start daemon" -p daemon.info)
|
||||
start-stop-daemon --start --background --user "$command_user" --group "$command_group" \
|
||||
--pidfile "$pidfile" --make-pidfile --exec $command $command_args -1 $log -2 $log
|
||||
eend $?
|
||||
}
|
||||
|
||||
stop() {
|
||||
ebegin "Stopping $description"
|
||||
exec >> >(logger -t "$RC_SVCNAME Stop daemon" -p daemon.info)
|
||||
start-stop-daemon --stop --pidfile "$pidfile"
|
||||
eend $?
|
||||
}
|
||||
|
||||
13
app-misc/ollama-bin/files/ollama.service
Normal file
13
app-misc/ollama-bin/files/ollama.service
Normal file
@@ -0,0 +1,13 @@
|
||||
[Unit]
|
||||
Description=Ollama Embedded AI Service
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/ollama serve
|
||||
User=ollama
|
||||
Group=ollama
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
99
app-misc/ollama-bin/ollama-bin-0.21.1.ebuild
Normal file
99
app-misc/ollama-bin/ollama-bin-0.21.1.ebuild
Normal file
@@ -0,0 +1,99 @@
|
||||
# Copyriht 1999-2025 Gentoo Authors
|
||||
# Distributed under the terms of the GNU General Public License v2
|
||||
|
||||
EAPI=8
|
||||
|
||||
inherit check-reqs systemd
|
||||
|
||||
DESCRIPTION="Local runner for LLMs"
|
||||
HOMEPAGE="https://ollama.com/"
|
||||
|
||||
RESTRICT="mirror"
|
||||
S="${WORKDIR}"
|
||||
|
||||
LICENSE="MIT"
|
||||
SLOT="0"
|
||||
|
||||
IUSE="amd nvidia systemd"
|
||||
|
||||
CHECKREQS_DISK_BUILD="4G"
|
||||
QA_PREBUILT="*"
|
||||
|
||||
DEPEND="
|
||||
acct-group/ollama
|
||||
acct-user/ollama
|
||||
amd? ( sci-libs/clblast
|
||||
dev-libs/rocm-opencl-runtime )
|
||||
nvidia? ( dev-util/nvidia-cuda-toolkit )
|
||||
systemd? ( sys-apps/systemd )"
|
||||
|
||||
case ${PV} in
|
||||
9999)
|
||||
KEYWORDS=""
|
||||
SRC_URI="
|
||||
amd64? ( https://ollama.com/download/ollama-linux-amd64.tar.zst )
|
||||
amd? ( https://ollama.com/download/ollama-linux-amd64-rocm.tar.zst )
|
||||
arm64? ( https://ollama.com/download/ollama-linux-arm64.tar.zst )"
|
||||
;;
|
||||
*)
|
||||
KEYWORDS="~amd64 ~arm64"
|
||||
SRC_URI="
|
||||
amd64? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-amd64.tar.zst )
|
||||
amd? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-amd64-rocm.tar.zst )
|
||||
arm64? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-arm64.tar.zst )"
|
||||
;;
|
||||
esac
|
||||
|
||||
pkg_setup() {
|
||||
check-reqs_pkg_setup
|
||||
}
|
||||
|
||||
pkg_pretend() {
|
||||
if use amd; then
|
||||
ewarn "WARNING: AMD & Nvidia support in this ebuild are experimental"
|
||||
einfo "If you run into issues, especially compiling dev-libs/rocm-opencl-runtime"
|
||||
einfo "you may try the docker image here https://github.com/ROCm/ROCm-docker"
|
||||
einfo "and follow instructions here"
|
||||
einfo "https://rocm.docs.amd.com/projects/install-on-linux/en/latest/how-to/docker.html"
|
||||
fi
|
||||
}
|
||||
|
||||
src_install() {
|
||||
insinto "/opt/${PN}"
|
||||
insopts -m0755
|
||||
doins -r lib
|
||||
doins -r bin
|
||||
|
||||
DISTRIBUTED_ATOM="/opt/${PN}/.ollama"
|
||||
|
||||
ewarn
|
||||
ewarn "INFO: Models and checksums saved into ${DISTRIBUTED_ATOM} are preserved..."
|
||||
ewarn
|
||||
|
||||
dosym -r "/opt/${PN}/bin/ollama" "/usr/bin/ollama"
|
||||
|
||||
if use systemd; then
|
||||
systemd_dounit "${FILESDIR}"/ollama.service
|
||||
else
|
||||
doinitd "${FILESDIR}"/ollama
|
||||
fi
|
||||
}
|
||||
|
||||
pkg_preinst() {
|
||||
keepdir /var/log/ollama
|
||||
fowners ollama:ollama /var/log/ollama
|
||||
}
|
||||
|
||||
pkg_postinst() {
|
||||
einfo
|
||||
einfo "Quick guide:"
|
||||
einfo
|
||||
einfo "Please, add your_user to ollama group,"
|
||||
einfo "# usermod -a -G ollama your_user"
|
||||
einfo
|
||||
einfo "$ ollama serve (standalone,systemd,openrc)"
|
||||
einfo "$ ollama run llama3:3b (client)"
|
||||
einfo
|
||||
einfo "Browse available models at: https://ollama.com/library/"
|
||||
einfo
|
||||
}
|
||||
Reference in New Issue
Block a user