Refactor: Open WebUI
This commit is contained in:
parent
a6ae347dd4
commit
2e07fb6f60
66
ct/openwebui.sh
Normal file
66
ct/openwebui.sh
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
|
# Copyright (c) 2021-2025 tteck
|
||||||
|
# Author: tteck | Co-Author: havardthom | Co-Author: Slaviša Arežina (tremor021)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://openwebui.com/
|
||||||
|
|
||||||
|
APP="Open WebUI"
|
||||||
|
var_tags="${var_tags:-ai;interface}"
|
||||||
|
var_cpu="${var_cpu:-4}"
|
||||||
|
var_ram="${var_ram:-8192}"
|
||||||
|
var_disk="${var_disk:-25}"
|
||||||
|
var_os="${var_os:-debian}"
|
||||||
|
var_version="${var_version:-13}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
header_info
|
||||||
|
check_container_storage
|
||||||
|
check_container_resources
|
||||||
|
if [[ ! -d /root/.open-webui ]]; then
|
||||||
|
msg_error "No ${APP} Installation Found!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -x "/usr/bin/ollama" ]; then
|
||||||
|
msg_info "Updating Ollama"
|
||||||
|
OLLAMA_VERSION=$(ollama -v | awk '{print $NF}')
|
||||||
|
RELEASE=$(curl -s https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
|
||||||
|
if [ "$OLLAMA_VERSION" != "$RELEASE" ]; then
|
||||||
|
msg_info "Stopping Service"
|
||||||
|
systemctl stop ollama
|
||||||
|
msg_ok "Stopped Service"
|
||||||
|
curl -fsSLO -C - https://ollama.com/download/ollama-linux-amd64.tgz
|
||||||
|
rm -rf /usr/lib/ollama
|
||||||
|
rm -rf /usr/bin/ollama
|
||||||
|
tar -C /usr -xzf ollama-linux-amd64.tgz
|
||||||
|
rm -rf ollama-linux-amd64.tgz
|
||||||
|
msg_info "Starting Service"
|
||||||
|
systemctl start ollama
|
||||||
|
msg_info "Started Service"
|
||||||
|
msg_ok "Ollama updated to version $RELEASE"
|
||||||
|
else
|
||||||
|
msg_ok "Ollama is already up to date."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
msg_info "Restarting Open WebUI to initiate update"
|
||||||
|
systemctl restart open-webui
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed Successfully!\n"
|
||||||
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
|
||||||
44
frontend/public/json/openwebui.json
Normal file
44
frontend/public/json/openwebui.json
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
{
|
||||||
|
"name": "Open WebUI",
|
||||||
|
"slug": "openwebui",
|
||||||
|
"categories": [
|
||||||
|
20
|
||||||
|
],
|
||||||
|
"date_created": "2024-10-24",
|
||||||
|
"type": "ct",
|
||||||
|
"updateable": true,
|
||||||
|
"privileged": false,
|
||||||
|
"interface_port": 8080,
|
||||||
|
"documentation": "https://docs.openwebui.com/",
|
||||||
|
"website": "https://openwebui.com/",
|
||||||
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/open-webui.webp",
|
||||||
|
"config_path": "/root/.env",
|
||||||
|
"description": "OpenWebUI is a self-hosted, web-based interface that allows you to run AI models entirely offline. It integrates with various LLM runners, such as OpenAI and Ollama, and supports features like markdown and LaTeX rendering, model management, and voice/video calls. It also offers multilingual support and the ability to generate images using APIs like DALL-E or ComfyUI",
|
||||||
|
"install_methods": [
|
||||||
|
{
|
||||||
|
"type": "default",
|
||||||
|
"script": "ct/openwebui.sh",
|
||||||
|
"resources": {
|
||||||
|
"cpu": 4,
|
||||||
|
"ram": 8192,
|
||||||
|
"hdd": 25,
|
||||||
|
"os": "debian",
|
||||||
|
"version": "13"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"default_credentials": {
|
||||||
|
"username": null,
|
||||||
|
"password": null
|
||||||
|
},
|
||||||
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "Script contains optional installation of Ollama.",
|
||||||
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Initial run of the application/container can take some time, depending on your host speed, as the application is installed/updated at runtime. Please be patient!",
|
||||||
|
"type": "warning"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
78
install/openwebui-install.sh
Normal file
78
install/openwebui-install.sh
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2025 tteck
|
||||||
|
# Author: tteck | Co-Author: havardthom | Co-Author: Slaviša Arežina (tremor021)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://openwebui.com/
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
msg_info "Installing Dependencies"
|
||||||
|
$STD apt install -y ffmpeg
|
||||||
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
|
USE_UVX="YES" PYTHON_VERSION="3.12" setup_uv
|
||||||
|
|
||||||
|
read -r -p "${TAB3}Would you like to add Ollama? <y/N> " prompt
|
||||||
|
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||||
|
msg_info "Installing Ollama"
|
||||||
|
curl -fsSLO -C - https://ollama.com/download/ollama-linux-amd64.tgz
|
||||||
|
tar -C /usr -xzf ollama-linux-amd64.tgz
|
||||||
|
rm -rf ollama-linux-amd64.tgz
|
||||||
|
cat <<EOF >/etc/systemd/system/ollama.service
|
||||||
|
[Unit]
|
||||||
|
Description=Ollama Service
|
||||||
|
After=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=exec
|
||||||
|
ExecStart=/usr/bin/ollama serve
|
||||||
|
Environment=HOME=$HOME
|
||||||
|
Environment=OLLAMA_HOST=0.0.0.0
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
systemctl enable -q --now ollama
|
||||||
|
echo "ENABLE_OLLAMA_API=true" >/root/.env
|
||||||
|
msg_ok "Installed Ollama"
|
||||||
|
fi
|
||||||
|
|
||||||
|
msg_info "Creating Service"
|
||||||
|
cat <<EOF >/etc/systemd/system/open-webui.service
|
||||||
|
[Unit]
|
||||||
|
Description=Open WebUI Service
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
EnvironmentFile=-/root/.env
|
||||||
|
Environment=DATA_DIR=/root/.open-webui
|
||||||
|
ExecStart=/usr/local/bin/uvx --python 3.12 open-webui@latest serve
|
||||||
|
WorkingDirectory=/root
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=5
|
||||||
|
User=root
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
systemctl enable -q --now open-webui
|
||||||
|
msg_ok "Created Service"
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
|
||||||
|
msg_info "Cleaning up"
|
||||||
|
$STD apt -y autoremove
|
||||||
|
$STD apt -y autoclean
|
||||||
|
$STD apt -y clean
|
||||||
|
msg_ok "Cleaned"
|
||||||
Loading…
x
Reference in New Issue
Block a user