This commit is contained in:
CanbiZ (MickLesk) 2026-01-29 10:37:03 +01:00
commit 025d688be3
3 changed files with 225 additions and 208 deletions

View File

@ -196,11 +196,17 @@ ensure_usr_local_bin_persist() {
download_with_progress() {
# $1 url, $2 dest
local url="$1" out="$2" cl
local url="$1" out="$2" content_length
need_tool curl pv || return 1
cl=$(curl -fsSLI "$url" 2>/dev/null | awk 'tolower($0) ~ /^content-length:/ {print $2}' | tr -d '\r')
if [ -n "$cl" ]; then
curl -fsSL "$url" | pv -s "$cl" >"$out" || {
content_length=$(
curl -fsSLI "$url" 2>/dev/null |
awk '(tolower($1) ~ /^content-length:/) && ($2 + 0 > 0) {print $2+0}' |
tail -1 | tr -cd '[:digit:]' || true
)
if [ -n "$content_length" ]; then
curl -fsSL "$url" | pv -s "$content_length" >"$out" || {
msg_error "Download failed: $url"
return 1
}
@ -277,7 +283,7 @@ fetch_and_deploy_gh_release() {
# $1 app, $2 repo, [$3 mode], [$4 version], [$5 target], [$6 asset_pattern
local app="$1" repo="$2" mode="${3:-tarball}" version="${4:-latest}" target="${5:-/opt/$1}" pattern="${6:-}"
local app_lc
app_lc="$(lower "$app" | tr -d ' ')"
app_lc=$(lower "$app" | tr -d ' ')
local vfile="$HOME/.${app_lc}"
local json url filename tmpd unpack
@ -288,7 +294,7 @@ fetch_and_deploy_gh_release() {
need_tool curl jq tar || return 1
[ "$mode" = "prebuild" ] || [ "$mode" = "singlefile" ] && need_tool unzip >/dev/null 2>&1 || true
tmpd="$(mktemp -d)" || return 1
tmpd=$(mktemp -d) || return 1
mkdir -p "$target"
# Release JSON (with token/rate-limit handling)
@ -305,10 +311,10 @@ fetch_and_deploy_gh_release() {
return 1
}
fi
json="$(cat "$tmpd/release.json")"
json=$(cat "$tmpd/release.json")
# correct Version
version="$(printf '%s' "$json" | jq -r '.tag_name // empty')"
version=$(printf '%s' "$json" | jq -r '.tag_name // empty')
version="${version#v}"
[ -z "$version" ] && {
@ -317,9 +323,15 @@ fetch_and_deploy_gh_release() {
return 1
}
get_url() {
printf '%s' "$json" | jq -r '.assets[].browser_download_url' |
awk -v p="$pattern" 'BEGIN{IGNORECASE=1} $0 ~ p {print; exit}' |
tr -d '[:cntrl:]'
}
case "$mode" in
tarball | source)
url="$(printf '%s' "$json" | jq -r '.tarball_url // empty')"
url=$(printf '%s' "$json" | jq -r '.tarball_url // empty')
[ -z "$url" ] && url="https://github.com/$repo/archive/refs/tags/v$version.tar.gz"
filename="${app_lc}-${version}.tar.gz"
download_with_progress "$url" "$tmpd/$filename" || {
@ -331,7 +343,7 @@ fetch_and_deploy_gh_release() {
rm -rf "$tmpd"
return 1
}
unpack="$(find "$tmpd" -mindepth 1 -maxdepth 1 -type d | head -n1)"
unpack=$(find "$tmpd" -mindepth 1 -maxdepth 1 -type d | head -n1)
[ "${CLEAN_INSTALL:-0}" = "1" ] && rm -rf "${target:?}/"*
# copy content of unpack to target
(cd "$unpack" && tar -cf - .) | (cd "$target" && tar -xf -) || {
@ -342,7 +354,7 @@ fetch_and_deploy_gh_release() {
;;
binary)
[ -n "$pattern" ] || pattern="*.apk"
url="$(printf '%s' "$json" | jq -r '.assets[].browser_download_url' | awk -v p="$pattern" 'BEGIN{IGNORECASE=1} $0 ~ p {print; exit}')"
url=$(get_url)
[ -z "$url" ] && {
msg_error "binary asset not found for pattern: $pattern"
rm -rf "$tmpd"
@ -374,10 +386,7 @@ fetch_and_deploy_gh_release() {
rm -rf "$tmpd"
return 1
}
url="$(printf '%s' "$json" | jq -r '.assets[].browser_download_url' | awk -v p="$pattern" '
BEGIN{IGNORECASE=1}
$0 ~ p {print; exit}
')"
url=$(get_url)
[ -z "$url" ] && {
msg_error "asset not found for pattern: $pattern"
rm -rf "$tmpd"
@ -411,7 +420,7 @@ fetch_and_deploy_gh_release() {
[ "${CLEAN_INSTALL:-0}" = "1" ] && rm -rf "${target:?}/"*
# top-level folder strippen
if [ "$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type d | wc -l)" -eq 1 ] && [ -z "$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type f | head -n1)" ]; then
unpack="$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type d)"
unpack=$(find "$tmpd/unp" -mindepth 1 -maxdepth 1 -type d)
(cd "$unpack" && tar -cf - .) | (cd "$target" && tar -xf -) || {
msg_error "copy failed"
rm -rf "$tmpd"
@ -431,10 +440,7 @@ fetch_and_deploy_gh_release() {
rm -rf "$tmpd"
return 1
}
url="$(printf '%s' "$json" | jq -r '.assets[].browser_download_url' | awk -v p="$pattern" '
BEGIN{IGNORECASE=1}
$0 ~ p {print; exit}
')"
url=$(get_url)
[ -z "$url" ] && {
msg_error "asset not found for pattern: $pattern"
rm -rf "$tmpd"

View File

@ -363,7 +363,7 @@ validate_hostname() {
# Split by dots and validate each label
local IFS='.'
read -ra labels <<< "$hostname"
read -ra labels <<<"$hostname"
for label in "${labels[@]}"; do
# Each label: 1-63 chars, alphanumeric, hyphens allowed (not at start/end)
if [[ -z "$label" ]] || [[ ${#label} -gt 63 ]]; then
@ -467,7 +467,7 @@ validate_ipv6_address() {
# Check that no segment exceeds 4 hex chars
local IFS=':'
local -a segments
read -ra segments <<< "$addr"
read -ra segments <<<"$addr"
for seg in "${segments[@]}"; do
if [[ ${#seg} -gt 4 ]]; then
return 1
@ -517,14 +517,14 @@ validate_gateway_in_subnet() {
# Convert IPs to integers
local IFS='.'
read -r i1 i2 i3 i4 <<< "$ip"
read -r g1 g2 g3 g4 <<< "$gateway"
read -r i1 i2 i3 i4 <<<"$ip"
read -r g1 g2 g3 g4 <<<"$gateway"
local ip_int=$(( (i1 << 24) + (i2 << 16) + (i3 << 8) + i4 ))
local gw_int=$(( (g1 << 24) + (g2 << 16) + (g3 << 8) + g4 ))
local ip_int=$(((i1 << 24) + (i2 << 16) + (i3 << 8) + i4))
local gw_int=$(((g1 << 24) + (g2 << 16) + (g3 << 8) + g4))
# Check if both are in same network
if (( (ip_int & mask) != (gw_int & mask) )); then
if (((ip_int & mask) != (gw_int & mask))); then
return 1
fi
@ -1152,7 +1152,7 @@ load_vars_file() {
fi
fi
;;
var_fuse|var_tun|var_gpu|var_ssh|var_verbose|var_protection)
var_fuse | var_tun | var_gpu | var_ssh | var_verbose | var_protection)
if [[ "$var_val" != "yes" && "$var_val" != "no" ]]; then
msg_warn "Invalid boolean '$var_val' for $var_key in $file (must be yes/no), ignoring"
continue
@ -3086,21 +3086,29 @@ check_container_resources() {
# ------------------------------------------------------------------------------
# check_container_storage()
#
# - Checks /boot partition usage
# - Checks root (/) partition usage
# - Warns if usage >80% and asks user confirmation before proceeding
# ------------------------------------------------------------------------------
check_container_storage() {
total_size=$(df /boot --output=size | tail -n 1)
local used_size=$(df /boot --output=used | tail -n 1)
usage=$((100 * used_size / total_size))
if ((usage > 80)); then
echo -e "${INFO}${HOLD} ${YWB}Warning: Storage is dangerously low (${usage}%).${CL}"
echo -ne "Continue anyway? <y/N> "
read -r prompt
if [[ ! ${prompt,,} =~ ^(y|yes)$ ]]; then
echo -e "${CROSS}${HOLD}${YWB}Exiting based on user input.${CL}"
usage=$(df / -P | awk 'NR==2 {print $5}' | tr -d '%')
if [ -z "$usage" ] || [ "$usage" -lt 0 ]; then
echo -e "${CROSS}${HOLD}${RD}Error: Failed to check disk usage.${CL}"
exit 1
fi
if [ "$usage" -gt 80 ]; then
echo -e "${INFO}${HOLD}${YWB}Warning: Storage is dangerously low (${usage}%).${CL}"
printf "Continue anyway? <y/N> "
read -r prompt
case "$prompt" in
[yY][eE][sS] | [yY]) ;;
*)
echo -e "${CROSS}${HOLD}${YWB}Exiting based on user input.${CL}"
exit 1
;;
esac
fi
}

View File

@ -1692,7 +1692,11 @@ function download_with_progress() {
# Content-Length aus HTTP-Header holen
local content_length
content_length=$(curl -fsSLI "$url" | awk '/Content-Length/ {print $2}' | tr -d '\r' || true)
content_length=$(
curl -fsSLI "$url" 2>/dev/null |
awk '(tolower($1) ~ /^content-length:/) && ($2 + 0 > 0) {print $2+0}' |
tail -1 | tr -cd '[:digit:]' || true
)
if [[ -z "$content_length" ]]; then
if ! curl -fL# -o "$output" "$url"; then
@ -6205,4 +6209,3 @@ function fetch_and_deploy_archive() {
msg_ok "Successfully deployed archive to $directory"
return 0
}