Updated versions and removed upstreamed

This commit is contained in:
2025-07-09 08:33:04 +02:00
parent 95c068627d
commit 9da72d3b39
32 changed files with 117 additions and 497 deletions

View File

@@ -1,3 +0,0 @@
DIST ollama-linux-amd64-rocm.tgz 1214464625 BLAKE2B f84fbfe384bbaaa455e859f8814ca58c950ba975e954fa0078566b8b5e0d02e1ea2a2e626770a083ecdf7f841e108b4fcc5a21e9fd3991f21eb5d4fb2fc1f22c SHA512 c0e9e4830930a7049daad81e0a93667e9ea34d5faa7da6a262403ea3a9caaa7913c862ffc2153973ff8d3ca3d137dda6b508042587ebe8f9ac3f8e4c3a799b65
DIST ollama-linux-amd64.tgz 1682386464 BLAKE2B 5ad595d9e57d786363c85d153869769c4e6bddfda79f82553376b7474dc6595d27f883c41d87acf01bb6a3b178e35e20d92b39e1de5124508e4b40b2f5c5925d SHA512 e4e2bf2e640bebf92a31099bfcd23bbd1bf83e9447825a0c4cf1212b7aabb0193418f25f0c4815052076dc03468c770ee4eff4f4ba6b0014e8e47827c8a989ce
DIST ollama-linux-arm64.tgz 1540361219 BLAKE2B 5d9dc2931048dc0cfa19f7b15ce85d4edc6c55554aad03ef527899965efdf63a468f537b40462b0d3dd596735c66a202ac0eddbeca7083d7b2f2382653b51552 SHA512 4e18759ce95107407384f62b282a8b7eb1f38a07d4fcf309f067afdc69f4b983f3f25cf19227568ac82696fb244bb5a2d8036afa9a637b64617d802bae5493e6

View File

@@ -1,30 +0,0 @@
#!/sbin/openrc-run
description="Ollama Embedded AI Service"
command="/usr/bin/ollama"
command_args="serve"
command_user="ollama"
command_group="ollama"
command_background="yes"
pidfile="/run/ollama.pid"
log="/var/log/ollama/ollama.log"
# Ollama allows cross-origin requests from 127.0.0.1 and 0.0.0.0 by default.
# Additional origins can be configured with OLLAMA_ORIGINS.
# export OLLAMA_ORIGINS="<ip>"
start() {
ebegin "Starting $description"
exec >> >(logger -t "$RC_SVCNAME Start daemon" -p daemon.info)
start-stop-daemon --start --background --user "$command_user" --group "$command_group" \
--pidfile "$pidfile" --make-pidfile --exec $command $command_args -1 $log -2 $log
eend $?
}
stop() {
ebegin "Stopping $description"
exec >> >(logger -t "$RC_SVCNAME Stop daemon" -p daemon.info)
start-stop-daemon --stop --pidfile "$pidfile"
eend $?
}

View File

@@ -1,13 +0,0 @@
[Unit]
Description=Ollama Embedded AI Service
After=network-online.target
[Service]
ExecStart=/usr/bin/ollama serve
User=ollama
Group=ollama
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

View File

@@ -1,99 +0,0 @@
# Copyriht 1999-2025 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
EAPI=8
inherit check-reqs systemd
DESCRIPTION="Local runner for LLMs"
HOMEPAGE="https://ollama.com/"
RESTRICT="mirror"
S="${WORKDIR}"
LICENSE="MIT"
SLOT="0"
IUSE="amd nvidia systemd"
CHECKREQS_DISK_BUILD="4G"
QA_PREBUILT="*"
DEPEND="
acct-group/ollama
acct-user/ollama
amd? ( sci-libs/clblast
dev-libs/rocm-opencl-runtime )
nvidia? ( dev-util/nvidia-cuda-toolkit )
systemd? ( sys-apps/systemd )"
case ${PV} in
9999)
KEYWORDS=""
SRC_URI="
amd64? ( https://ollama.com/download/ollama-linux-amd64.tgz )
amd? ( https://ollama.com/download/ollama-linux-amd64-rocm.tgz )
arm64? ( https://ollama.com/download/ollama-linux-arm64.tgz )"
;;
*)
KEYWORDS="~amd64 ~arm64"
SRC_URI="
amd64? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-amd64.tgz )
amd? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-amd64-rocm.tgz )
arm64? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-arm64.tgz )"
;;
esac
pkg_setup() {
check-reqs_pkg_setup
}
pkg_pretend() {
if use amd; then
ewarn "WARNING: AMD & Nvidia support in this ebuild are experimental"
einfo "If you run into issues, especially compiling dev-libs/rocm-opencl-runtime"
einfo "you may try the docker image here https://github.com/ROCm/ROCm-docker"
einfo "and follow instructions here"
einfo "https://rocm.docs.amd.com/projects/install-on-linux/en/latest/how-to/docker.html"
fi
}
src_install() {
insinto "/opt/${PN}"
insopts -m0755
doins -r lib
doins -r bin
DISTRIBUTED_ATOM="/opt/${PN}/.ollama"
ewarn
ewarn "INFO: Models and checksums saved into ${DISTRIBUTED_ATOM} are preserved..."
ewarn
dosym -r "/opt/${PN}/bin/ollama" "/usr/bin/ollama"
if use systemd; then
systemd_dounit "${FILESDIR}"/ollama.service
else
doinitd "${FILESDIR}"/ollama
fi
}
pkg_preinst() {
keepdir /var/log/ollama
fowners ollama:ollama /var/log/ollama
}
pkg_postinst() {
einfo
einfo "Quick guide:"
einfo
einfo "Please, add your_user to ollama group,"
einfo "# usermod -a -G ollama your_user"
einfo
einfo "$ ollama serve (standalone,systemd,openrc)"
einfo "$ ollama run llama3:3b (client)"
einfo
einfo "Browse available models at: https://ollama.com/library/"
einfo
}