Ollama added

This commit is contained in:
Gerben Jan Dijkman 2025-01-11 16:24:19 +01:00
parent edfa368f53
commit 10b92a1fb4
4 changed files with 151 additions and 0 deletions

View File

@ -0,0 +1,9 @@
AUX ollama 863 BLAKE2B 9bba172136ab3d0468d943b46621068891280b6d071f8bf42951f3cb3a83be716476dd4d43d29e14c9d38afe7bd6e23b2023074bf1337a1785203167c42b0a3c SHA512 4e0ffb8edb5e90c40e14f817c99e56793516deea2bce5a9f5da3723e18c868446f9251b3a760bed7c950df9901122783420d8f2481e529a3fbceb3e62e6ca976
AUX ollama.service 208 BLAKE2B abfa112923b252ffd18d4b36240173311bfb3b99b9e44ecac6eb5753a0576fc07473a477529cb65733d16d4faa216e33bbc758a4ff4f998f3c7d56a667c2981a SHA512 851f45ecd7d184f48e1a6c3a1218e7ebc3ddf4090b9926cab3b3a1c3b8555dce89f364021d7b88d62040df603b2d93bb1cd842c7fe191c9451b0a44704bc4c75
DIST ollama-linux-amd64-rocm.tgz 1214464623 BLAKE2B bc641ca4fc91879dc85aad86ec940e194e9e6d83aabd8dac3e5773ab78c488de16f838fee1ead668ad5c342c7249c21c290427563d506b880a96dc53295a9956 SHA512 5313ad703bdb6e759ad3bdee2f284f63712cda5e25ecf3b79d9e07c27b9807f10a96f6c6dd41c6a3d8f9746adc62a0eeb4da3826c31845c01922aa06e9f1a5b4
DIST ollama-linux-amd64.tgz 1682060543 BLAKE2B 8794e145f17c83de8a590d9065fe89223a45ffbe4f1013513697e7349c6e6ada302635a0782b19456850bf64696f2af4e87b8805c8b73638ceee6a8050fc426b SHA512 421476be8ea5236313ad17d97d9886adabcac8cac3784d5dd770aa4e688a86b3b77cfcc57896332ed481da2e7213294c41ff85876ff6189d9d07d2afc7cdec82
DIST ollama-linux-arm64.tgz 1540039203 BLAKE2B 47e96c64fbf72583ac0d741f89d8d8f977e106a18c0308b69236ef8ff41cd61b8d44018463dc088986071612ed65aded7ae784c36c7a3e1b841e0357269372bc SHA512 5f2f1499cc5531e63ca3e09638b68cc533010ec90b1af3b4c406c882ff67f1374257969819de723302b7c80f93b8cf2cb28e74026e957b7a275b3cef88e124bb
EBUILD ollama-bin-0.5.5-r0.ebuild 2398 BLAKE2B a0f4a287cb7629a93b9ffac5223d76095e053c902464a99583036be53c77bbf0d953ddb3e354c4ec643644b5300b3728fb657a6a3b86df03664056b6974246c8 SHA512 148c62df1613c44e4a87e45c1e8b13468c80537aa504627dd8d656ef0497b1986cc07c295414e3c9e66e524d4fae3fc5802ac3703eace311aeecf1a8e56af843
EBUILD ollama-bin-9999.ebuild 2398 BLAKE2B a0f4a287cb7629a93b9ffac5223d76095e053c902464a99583036be53c77bbf0d953ddb3e354c4ec643644b5300b3728fb657a6a3b86df03664056b6974246c8 SHA512 148c62df1613c44e4a87e45c1e8b13468c80537aa504627dd8d656ef0497b1986cc07c295414e3c9e66e524d4fae3fc5802ac3703eace311aeecf1a8e56af843
MISC ChangeLog 1089 BLAKE2B 0e8b129ab95e866bb9eba23554d4c6ea708b633c284f26ca932457eab95898de6ade3b5d0d8b7f83da4f76c1245f48c1bbb3d226b77830840fed5cd6781be364 SHA512 05a9d44504ce2e786ef9cebbab84c387df46358985ba6057b79584e9303eb26fc622b8475a9598ddb5447f8745b876ac611ccc17afc0e0c4b589ec66381d6d83
MISC metadata.xml 720 BLAKE2B 0a45aebe348ee34cde397ac7c519df144f7b1ad996e8a444a4cb48788e3f972e08fd277e6c3a62029ebdca1fe83bbdb81a442801fc619ce4b6e971cc65959672 SHA512 307421dba773bce138d8067673f3f02df0acea964a8cd6dbe8cc444879186c036f482dcc89502cf173b9860308a89298ab598218b852e9b698f08cfed4e92909

View File

@ -0,0 +1,13 @@
[Unit]
Description=Ollama Embedded AI Service
After=network-online.target
[Service]
ExecStart=/usr/bin/ollama serve
User=ollama
Group=ollama
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,30 @@
#!/sbin/openrc-run
description="Ollama Embedded AI Service"
command="/usr/bin/ollama"
command_args="serve"
command_user="ollama"
command_group="ollama"
command_background="yes"
pidfile="/run/ollama.pid"
log="/var/log/ollama/ollama.log"
# Ollama allows cross-origin requests from 127.0.0.1 and 0.0.0.0 by default.
# Additional origins can be configured with OLLAMA_ORIGINS.
# export OLLAMA_ORIGINS="<ip>"
start() {
ebegin "Starting $description"
exec >> >(logger -t "$RC_SVCNAME Start daemon" -p daemon.info)
start-stop-daemon --start --background --user "$command_user" --group "$command_group" \
--pidfile "$pidfile" --make-pidfile --exec $command $command_args -1 $log -2 $log
eend $?
}
stop() {
ebegin "Stopping $description"
exec >> >(logger -t "$RC_SVCNAME Stop daemon" -p daemon.info)
start-stop-daemon --stop --pidfile "$pidfile"
eend $?
}

View File

@ -0,0 +1,99 @@
# Copyriht 1999-2025 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
EAPI=8
inherit check-reqs systemd
DESCRIPTION="Local runner for LLMs"
HOMEPAGE="https://ollama.com/"
RESTRICT="mirror"
S="${WORKDIR}"
LICENSE="MIT"
SLOT="0"
IUSE="amd nvidia systemd"
CHECKREQS_DISK_BUILD="4G"
QA_PREBUILT="*"
DEPEND="
acct-group/ollama
acct-user/ollama
amd? ( sci-libs/clblast
dev-libs/rocm-opencl-runtime )
nvidia? ( dev-util/nvidia-cuda-toolkit )
systemd? ( sys-apps/systemd )"
case ${PV} in
9999)
KEYWORDS=""
SRC_URI="
amd64? ( https://ollama.com/download/ollama-linux-amd64.tgz )
amd? ( https://ollama.com/download/ollama-linux-amd64-rocm.tgz )
arm64? ( https://ollama.com/download/ollama-linux-arm64.tgz )"
;;
*)
KEYWORDS="~amd64 ~arm64"
SRC_URI="
amd64? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-amd64.tgz )
amd? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-amd64-rocm.tgz )
arm64? ( https://github.com/ollama/ollama/releases/download/v${PV}/ollama-linux-arm64.tgz )"
;;
esac
pkg_setup() {
check-reqs_pkg_setup
}
pkg_pretend() {
if use amd; then
ewarn "WARNING: AMD & Nvidia support in this ebuild are experimental"
einfo "If you run into issues, especially compiling dev-libs/rocm-opencl-runtime"
einfo "you may try the docker image here https://github.com/ROCm/ROCm-docker"
einfo "and follow instructions here"
einfo "https://rocm.docs.amd.com/projects/install-on-linux/en/latest/how-to/docker.html"
fi
}
src_install() {
insinto "/opt/${PN}"
insopts -m0755
doins -r lib
doins -r bin
DISTRIBUTED_ATOM="/opt/${PN}/.ollama"
ewarn
ewarn "INFO: Models and checksums saved into ${DISTRIBUTED_ATOM} are preserved..."
ewarn
dosym -r "/opt/${PN}/bin/ollama" "/usr/bin/ollama"
if use systemd; then
systemd_dounit "${FILESDIR}"/ollama.service
else
doinitd "${FILESDIR}"/ollama
fi
}
pkg_preinst() {
keepdir /var/log/ollama
fowners ollama:ollama /var/log/ollama
}
pkg_postinst() {
einfo
einfo "Quick guide:"
einfo
einfo "Please, add your_user to ollama group,"
einfo "# usermod -a -G ollama your_user"
einfo
einfo "$ ollama serve (standalone,systemd,openrc)"
einfo "$ ollama run llama3:3b (client)"
einfo
einfo "Browse available models at: https://ollama.com/library/"
einfo
}