This commit is contained in:
CanbiZ 2025-04-30 09:10:30 +02:00
parent 559ac41919
commit bbb6956233
4 changed files with 3 additions and 209 deletions

View File

@ -1,64 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2025 tteck
# Author: havardthom | Co-Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://ollama.com/
APP="Ollama"
var_tags="${var_tags:-ai}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-35}"
var_os="${var_os:-ubuntu}"
var_version="${var_version:-24.04}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /usr/local/lib/ollama ]]; then
msg_error "No Ollama Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}')
if [[ ! -f /opt/Ollama_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/Ollama_version.txt)" ]]; then
msg_info "Stopping Services"
systemctl stop ollama
msg_ok "Services Stopped"
TMP_TAR=$(mktemp --suffix=.tgz)
curl -fL# -o "${TMP_TAR}" "https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz"
msg_info "Updating Ollama to ${RELEASE}"
tar -xzf "${TMP_TAR}" -C /usr/local/lib/ollama
ln -sf /usr/local/lib/ollama/bin/ollama /usr/local/bin/ollama
echo "${RELEASE}" >/opt/Ollama_version.txt
msg_ok "Updated Ollama to ${RELEASE}"
msg_info "Starting Services"
systemctl start ollama
msg_ok "Started Services"
msg_info "Cleaning Up"
rm -f "${TMP_TAR}"
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. Ollama is already at ${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:11434${CL}"

View File

@ -1,35 +0,0 @@
{
"name": "Ollama",
"slug": "ollama",
"categories": [
20
],
"date_created": "2025-04-28",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 11434,
"documentation": "https://github.com/ollama/ollama/tree/main/docs",
"config_path": "/usr/local/lib/ollama",
"website": "https://ollama.com/",
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/ollama.svg",
"description": "Ollama is a tool that allows you to run large language models locally on your own computer. This means you can experiment with and use these AI models without needing an internet connection or relying on cloud-based services. It simplifies the process of managing and running these models, offering a way to keep your data private and potentially work faster. 1 You can use Ollama to create local chatbots, conduct AI research, develop privacy-focused AI applications, and integrate AI into existing systems.",
"install_methods": [
{
"type": "default",
"script": "ct/ollama.sh",
"resources": {
"cpu": 4,
"ram": 4096,
"hdd": 35,
"os": "Ubuntu",
"version": "24.04"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": []
}

View File

@ -1,107 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2025 tteck
# Author: havardthom | Co-Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://ollama.com/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
build-essential \
pkg-config
msg_ok "Installed Dependencies"
msg_info "Setting up Intel® Repositories"
mkdir -p /etc/apt/keyrings
curl -fsSL https://repositories.intel.com/gpu/intel-graphics.key | gpg --dearmor -o /etc/apt/keyrings/intel-graphics.gpg
echo "deb [arch=amd64,i386 signed-by=/etc/apt/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy client" >/etc/apt/sources.list.d/intel-gpu-jammy.list
curl -fsSL https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor -o /etc/apt/keyrings/oneapi-archive-keyring.gpg
echo "deb [signed-by=/etc/apt/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" >/etc/apt/sources.list.d/oneAPI.list
$STD apt-get update
msg_ok "Set up Intel® Repositories"
msg_info "Setting Up Hardware Acceleration"
$STD apt-get -y install {va-driver-all,ocl-icd-libopencl1,intel-opencl-icd,vainfo,intel-gpu-tools,intel-level-zero-gpu,level-zero,level-zero-dev}
if [[ "$CTTYPE" == "0" ]]; then
chgrp video /dev/dri
chmod 755 /dev/dri
chmod 660 /dev/dri/*
$STD adduser $(id -u -n) video
$STD adduser $(id -u -n) render
fi
msg_ok "Set Up Hardware Acceleration"
msg_info "Installing Intel® oneAPI Base Toolkit (Patience)"
$STD apt-get install -y --no-install-recommends intel-basekit-2024.1
msg_ok "Installed Intel® oneAPI Base Toolkit"
msg_info "Installing Ollama (Patience)"
RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}')
OLLAMA_INSTALL_DIR="/usr/local/lib/ollama"
BINDIR="/usr/local/bin"
mkdir -p $OLLAMA_INSTALL_DIR
OLLAMA_URL="https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz"
TMP_TAR="/tmp/ollama.tgz"
echo -e "\n"
if curl -fL# -o "$TMP_TAR" "$OLLAMA_URL"; then
if tar -xzf "$TMP_TAR" -C "$OLLAMA_INSTALL_DIR"; then
ln -sf "$OLLAMA_INSTALL_DIR/bin/ollama" "$BINDIR/ollama"
echo "${RELEASE}" >/opt/Ollama_version.txt
msg_ok "Installed Ollama ${RELEASE}"
else
msg_error "Extraction failed archive corrupt or incomplete"
exit 1
fi
else
msg_error "Download failed $OLLAMA_URL not reachable"
exit 1
fi
msg_info "Creating ollama User and Group"
if ! id ollama >/dev/null 2>&1; then
useradd -r -s /usr/sbin/nologin -U -m -d /usr/share/ollama ollama
fi
$STD usermod -aG render ollama || true
$STD usermod -aG video ollama || true
$STD usermod -aG ollama $(id -u -n)
msg_ok "Created ollama User and adjusted Groups"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/ollama.service
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
Type=exec
ExecStart=/usr/local/bin/ollama serve
Environment=HOME=$HOME
Environment=OLLAMA_INTEL_GPU=true
Environment=OLLAMA_HOST=0.0.0.0
Environment=OLLAMA_NUM_GPU=999
Environment=SYCL_CACHE_PERSISTENT=1
Environment=ZES_ENABLE_SYSMAN=1
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now ollama
msg_ok "Created Service"
motd_ssh
customize
msg_info "Cleaning up"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -81,7 +81,7 @@ cd /opt/paperless
uv venv /opt/paperless/.venv
source /opt/paperless/.venv/bin/activate
uv sync --all-extras
$STD uv sync --all-extras
mv /opt/paperless/paperless.conf.example /opt/paperless/paperless.conf
mkdir -p consume data media static
sed -i -e 's|#PAPERLESS_REDIS=.*|PAPERLESS_REDIS=redis://localhost:6379|' \
@ -96,7 +96,7 @@ msg_ok "Installed Paperless-ngx"
if /opt/paperless/venv/bin/python3 -c "import nltk" &>/dev/null; then
msg_info "Installing Natural Language Toolkit (Patience)"
for d in snowball_data stopwords punkt_tab; do
/opt/paperless/venv/bin/python3 -m nltk.downloader -d /usr/share/nltk_data "$d"
$STD /opt/paperless/venv/bin/python3 -m nltk.downloader -d /usr/share/nltk_data "$d"
done
msg_ok "Installed NLTK components"
else
@ -126,7 +126,7 @@ sed -i -e "s|#PAPERLESS_DBHOST=.*|PAPERLESS_DBHOST=localhost|" \
-e "s|#PAPERLESS_SECRET_KEY=.*|PAPERLESS_SECRET_KEY=$SECRET_KEY|" \
/opt/paperless/paperless.conf
/opt/paperless/venv/bin/python3 /opt/paperless/src/manage.py migrate
$STD /opt/paperless/venv/bin/python3 /opt/paperless/src/manage.py migrate
msg_ok "Set up PostgreSQL database"
read -r -p "Would you like to add Adminer? <y/N> " prompt