From abaca2862477166cba7a83ac2c0160341c46cc97 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20B=C3=A9dard-Couture?= Date: Sat, 7 Feb 2026 08:19:03 -0500 Subject: [PATCH 01/87] Minor fixes following code review --- ct/zitadel.sh | 7 +++++-- install/zitadel-install.sh | 28 ++++++++++------------------ 2 files changed, 15 insertions(+), 20 deletions(-) diff --git a/ct/zitadel.sh b/ct/zitadel.sh index f98ad4052..a1c00fd2c 100644 --- a/ct/zitadel.sh +++ b/ct/zitadel.sh @@ -30,7 +30,8 @@ function update_script() { if check_for_gh_release "zitadel" "zitadel/zitadel"; then msg_info "Stopping Service" - systemctl stop zitadel-api zitadel-login + systemctl stop zitadel-api + systemctl stop zitadel-login msg_ok "Stopped Service" msg_info "Updating Zitadel" @@ -45,7 +46,9 @@ function update_script() { msg_ok "Updated Zitadel" msg_info "Starting Service" - systemctl start zitadel + systemctl start zitadel-api + sleep 5 + systemctl start zitadel-login msg_ok "Started Service" msg_ok "Updated successfully!" fi diff --git a/install/zitadel-install.sh b/install/zitadel-install.sh index e7988c311..ae219f0b0 100644 --- a/install/zitadel-install.sh +++ b/install/zitadel-install.sh @@ -30,10 +30,6 @@ LOGIN_PORT="3000" # Detect server IP address SERVER_IP=$(hostname -I | awk '{print $1}') -msg_info "Installing Dependencies (Patience)" -$STD apt install -y ca-certificates -msg_ok "Installed Dependecies" - # Create zitadel user msg_info "Creating zitadel system user" groupadd --system "${ZITADEL_GROUP}" @@ -59,7 +55,9 @@ msg_ok "Configured PostgreSQL" msg_info "Installing Zitadel" cd "${ZITADEL_DIR}" mkdir -p ${CONFIG_DIR} -echo "${MASTERKEY}" > ${CONFIG_DIR}/.masterkey +echo -n "${MASTERKEY}" > ${CONFIG_DIR}/.masterkey +chmod 600 "${CONFIG_DIR}/.masterkey" +chown "${ZITADEL_USER}:${ZITADEL_GROUP}" "${CONFIG_DIR}/.masterkey" # Update config.yaml for network access cat > "${CONFIG_DIR}/config.yaml" < "${CONFIG_DIR}/api.env" <~/zitadel-rerun.sh -systemctl stop zitadel -timeout --kill-after=5s 15s zitadel setup --masterkeyFile ${CONFIG_DIR}/.masterkey --config ${CONFIG_DIR}/config.yaml" -systemctl restart zitadel +systemctl stop zitadel-api zitadel-login +timeout --kill-after=5s 15s /opt/zitadel/zitadel setup --masterkeyFile ${CONFIG_DIR}/.masterkey --config ${CONFIG_DIR}/config.yaml +systemctl restart zitadel-api zitadel-login EOF msg_ok "Bash script for rerunning Zitadel after changing Zitadel config.yaml" From 8c0c4ce1f787a0ace6132e006f9fba33dde0ee2e Mon Sep 17 00:00:00 2001 From: Benjamin Zumhagen Date: Fri, 6 Feb 2026 13:15:33 -0600 Subject: [PATCH 02/87] Add skylite-ux install scripts --- ct/skylite-ux.sh | 75 ++++++++++++++++++++++++++++ frontend/public/json/skylite-ux.json | 44 ++++++++++++++++ install/skylite-ux-install.sh | 72 ++++++++++++++++++++++++++ 3 files changed, 191 insertions(+) create mode 100644 ct/skylite-ux.sh create mode 100644 frontend/public/json/skylite-ux.json create mode 100644 install/skylite-ux-install.sh diff --git a/ct/skylite-ux.sh b/ct/skylite-ux.sh new file mode 100644 index 000000000..41c217d28 --- /dev/null +++ b/ct/skylite-ux.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env bash +source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func) +# Copyright (c) 2021-2026 community-scripts ORG +# Author: bzumhagen +# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE +# Source: https://github.com/Wetzel402/Skylite-UX + +APP="Skylite-UX" +var_tags="${var_tags:-family;productivity}" +var_cpu="${var_cpu:-2}" +var_ram="${var_ram:-4096}" +var_disk="${var_disk:-8}" +var_os="${var_os:-debian}" +var_version="${var_version:-13}" +var_unprivileged="${var_unprivileged:-1}" + +header_info "$APP" +variables +color +catch_errors + +function update_script() { + header_info + check_container_storage + check_container_resources + + if [[ ! -d /opt/skylite-ux ]]; then + msg_error "No ${APP} Installation Found!" + exit + fi + + if check_for_gh_release "skylite-ux" "Wetzel402/Skylite-UX"; then + msg_info "Stopping Service" + systemctl stop skylite-ux + msg_ok "Stopped Service" + + msg_info "Backing up Data" + cp /opt/skylite-ux/.env /tmp/skylite-ux.env.backup + msg_ok "Backed up Data" + + CLEAN_INSTALL=1 fetch_and_deploy_gh_release "skylite-ux" "Wetzel402/Skylite-UX" "tarball" + + msg_info "Restoring Data" + cp /tmp/skylite-ux.env.backup /opt/skylite-ux/.env + rm -f /tmp/skylite-ux.env.backup + msg_ok "Restored Data" + + msg_info "Building Skylite-UX" + cd /opt/skylite-ux + $STD npm ci + $STD npx prisma generate + $STD npm run build + msg_ok "Built Skylite-UX" + + msg_info "Running Database Migrations" + cd /opt/skylite-ux + $STD npx prisma migrate deploy + msg_ok "Database Migrations Complete" + + msg_info "Starting Service" + systemctl start skylite-ux + msg_ok "Started Service" + msg_ok "Updated successfully!" + fi + exit +} + +start +build_container +description + +msg_ok "Completed successfully!\n" +echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" +echo -e "${INFO}${YW} Access it using the following URL:${CL}" +echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}" diff --git a/frontend/public/json/skylite-ux.json b/frontend/public/json/skylite-ux.json new file mode 100644 index 000000000..2ff8107cb --- /dev/null +++ b/frontend/public/json/skylite-ux.json @@ -0,0 +1,44 @@ +{ + "name": "Skylite-UX", + "slug": "skylite-ux", + "categories": [ + 19 + ], + "date_created": "2026-02-06", + "type": "ct", + "updateable": true, + "privileged": false, + "interface_port": 3000, + "documentation": "https://github.com/Wetzel402/Skylite-UX", + "website": "https://github.com/Wetzel402/Skylite-UX", + "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/skylite-ux.webp", + "config_path": "/opt/skylite-ux/.env", + "description": "Skylite-UX is an open-source, self-hosted family management app with calendar, todos, shopping lists, and user management.", + "install_methods": [ + { + "type": "default", + "script": "ct/skylite-ux.sh", + "resources": { + "cpu": 2, + "ram": 4096, + "hdd": 8, + "os": "Debian", + "version": "13" + } + } + ], + "default_credentials": { + "username": null, + "password": null + }, + "notes": [ + { + "text": "Database credentials: `cat ~/skylite-ux.creds`", + "type": "info" + }, + { + "text": "Build process requires ~4GB RAM. Runtime usage is much lower β€” RAM can be reduced after installation.", + "type": "info" + } + ] +} diff --git a/install/skylite-ux-install.sh b/install/skylite-ux-install.sh new file mode 100644 index 000000000..4c3f9b43e --- /dev/null +++ b/install/skylite-ux-install.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash + +# Copyright (c) 2021-2026 community-scripts ORG +# Author: bzumhagen +# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE +# Source: https://github.com/Wetzel402/Skylite-UX + +source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" +color +verb_ip6 +catch_errors +setting_up_container +network_check +update_os + +msg_info "Installing Dependencies" +$STD apt install -y openssl +msg_ok "Installed Dependencies" + +PG_VERSION="16" setup_postgresql +NODE_VERSION="20" setup_nodejs +PG_DB_NAME="skylite" PG_DB_USER="skylite" PG_DB_SCHEMA_PERMS="true" setup_postgresql_db + +fetch_and_deploy_gh_release "skylite-ux" "Wetzel402/Skylite-UX" "tarball" "2026.2.2" + +msg_info "Configuring skylite-ux" +cat </opt/skylite-ux/.env +DATABASE_URL=postgresql://${PG_DB_USER}:${PG_DB_PASS}@localhost:5432/${PG_DB_NAME} +NODE_ENV=production +HOST=0.0.0.0 +NUXT_PUBLIC_TZ=Etc/UTC +NUXT_PUBLIC_LOG_LEVEL=warn +EOF +msg_ok "Configured skylite-ux" + +msg_info "Building skylite-ux" +cd /opt/skylite-ux +$STD npm ci +$STD npx prisma generate +$STD npm run build +msg_ok "Built skylite-ux" + +msg_info "Running Database Migrations" +cd /opt/skylite-ux +$STD npx prisma migrate deploy +msg_ok "Ran Database Migrations" + +msg_info "Creating Service" +cat </etc/systemd/system/skylite-ux.service +[Unit] +Description=Skylite-UX +After=network.target postgresql.service +Wants=postgresql.service + +[Service] +Type=simple +User=root +WorkingDirectory=/opt/skylite-ux +EnvironmentFile=/opt/skylite-ux/.env +ExecStart=/usr/bin/node /opt/skylite-ux/.output/server/index.mjs +Restart=on-failure +RestartSec=5 + +[Install] +WantedBy=multi-user.target +EOF +systemctl enable -q --now skylite-ux +msg_ok "Created Service" + +motd_ssh +customize +cleanup_lxc From 146f681e9cbf654a01b74698958430f07453bb1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slavi=C5=A1a=20Are=C5=BEina?= <58952836+tremor021@users.noreply.github.com> Date: Sat, 7 Feb 2026 19:43:29 +0100 Subject: [PATCH 03/87] Apply suggestion from @tremor021 --- install/zitadel-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/zitadel-install.sh b/install/zitadel-install.sh index ae219f0b0..0b68be627 100644 --- a/install/zitadel-install.sh +++ b/install/zitadel-install.sh @@ -216,7 +216,7 @@ systemctl enable -q --now zitadel-api.service sleep 5 # Enable and start Login service -systemctl enable -q --now zitadel-login.service +systemctl enable -q --now zitadel-login msg_ok "Created Services" msg_info "Saving Credentials" From f67f382777329cb6a3d14284c1e660bc0f09fd1a Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 20:42:24 +0100 Subject: [PATCH 04/87] Update skylite-ux-install.sh --- install/skylite-ux-install.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/install/skylite-ux-install.sh b/install/skylite-ux-install.sh index 4c3f9b43e..845d175de 100644 --- a/install/skylite-ux-install.sh +++ b/install/skylite-ux-install.sh @@ -20,7 +20,6 @@ msg_ok "Installed Dependencies" PG_VERSION="16" setup_postgresql NODE_VERSION="20" setup_nodejs PG_DB_NAME="skylite" PG_DB_USER="skylite" PG_DB_SCHEMA_PERMS="true" setup_postgresql_db - fetch_and_deploy_gh_release "skylite-ux" "Wetzel402/Skylite-UX" "tarball" "2026.2.2" msg_info "Configuring skylite-ux" From cf38bc519590ea3f4bd9019cc24d557f467ac55c Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 20:43:34 +0100 Subject: [PATCH 05/87] Update success message for database migrations --- ct/skylite-ux.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ct/skylite-ux.sh b/ct/skylite-ux.sh index 41c217d28..18e27a58a 100644 --- a/ct/skylite-ux.sh +++ b/ct/skylite-ux.sh @@ -55,7 +55,7 @@ function update_script() { msg_info "Running Database Migrations" cd /opt/skylite-ux $STD npx prisma migrate deploy - msg_ok "Database Migrations Complete" + msg_ok "Ran Database Migrations" msg_info "Starting Service" systemctl start skylite-ux From c193627217edc7110e6e4f4b94806a437bc194dc Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 20:52:07 +0100 Subject: [PATCH 06/87] Add workflow to manage stale pull requests --- .github/workflows/stale_pr_close.yml | 89 ++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 .github/workflows/stale_pr_close.yml diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml new file mode 100644 index 000000000..df62a6f8e --- /dev/null +++ b/.github/workflows/stale_pr_close.yml @@ -0,0 +1,89 @@ +name: Stale PR Management + +on: + schedule: + # Runs daily at midnight UTC + - cron: "0 0 * * *" + pull_request: + types: + - labeled + +jobs: + stale-prs: + runs-on: ubuntu-latest + steps: + - name: Handle stale label + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const now = new Date(); + const owner = context.repo.owner; + const repo = context.repo.repo; + + // Handle when PR is labeled + if (context.eventName === "pull_request" && context.payload.action === "labeled") { + const label = context.payload.label?.name; + if (label === "stale") { + await github.issues.createComment({ + owner, + repo, + issue_number: context.payload.pull_request.number, + body: "This PR has been marked as stale. It will be closed if no new commits are added in 7 days." + }); + } + return; + } + + // Scheduled run: fetch all open PRs + const { data: prs } = await github.pulls.list({ + owner, + repo, + state: "open", + per_page: 100 + }); + + for (const pr of prs) { + const hasStale = pr.labels.some(l => l.name === "stale"); + if (!hasStale) continue; + + // Fetch commits for this PR + const { data: commits } = await github.pulls.listCommits({ + owner, + repo, + pull_number: pr.number + }); + + const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); + const diffDays = (now - lastCommitDate) / (1000 * 60 * 60 * 24); + + if (diffDays > 7) { + // Close stale PR + await github.pulls.update({ + owner, + repo, + pull_number: pr.number, + state: "closed" + }); + await github.issues.createComment({ + owner, + repo, + issue_number: pr.number, + body: "Closing stale PR due to inactivity." + }); + } else if (lastCommitDate > new Date(now - 7*24*60*60*1000)) { + // Remove stale label if recent activity + await github.issues.removeLabel({ + owner, + repo, + issue_number: pr.number, + name: "stale" + }); + await github.issues.createComment({ + owner, + repo, + issue_number: pr.number, + body: "Recent activity detected. Removing stale label." + }); + } + } From 7bd8140c6f5161404579083518a6a86b5b52e8a7 Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 20:58:24 +0100 Subject: [PATCH 07/87] Refactor stale PR management script for clarity --- .github/workflows/stale_pr_close.yml | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index df62a6f8e..5ffc9475a 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -1,5 +1,4 @@ name: Stale PR Management - on: schedule: # Runs daily at midnight UTC @@ -7,7 +6,6 @@ on: pull_request: types: - labeled - jobs: stale-prs: runs-on: ubuntu-latest @@ -20,8 +18,7 @@ jobs: const now = new Date(); const owner = context.repo.owner; const repo = context.repo.repo; - - // Handle when PR is labeled + // --- PR labeled event --- if (context.eventName === "pull_request" && context.payload.action === "labeled") { const label = context.payload.label?.name; if (label === "stale") { @@ -32,33 +29,26 @@ jobs: body: "This PR has been marked as stale. It will be closed if no new commits are added in 7 days." }); } - return; + return; // exit, nothing else to do } - - // Scheduled run: fetch all open PRs + // --- Scheduled run --- const { data: prs } = await github.pulls.list({ owner, repo, state: "open", per_page: 100 }); - for (const pr of prs) { const hasStale = pr.labels.some(l => l.name === "stale"); if (!hasStale) continue; - - // Fetch commits for this PR const { data: commits } = await github.pulls.listCommits({ owner, repo, pull_number: pr.number }); - const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); const diffDays = (now - lastCommitDate) / (1000 * 60 * 60 * 24); - if (diffDays > 7) { - // Close stale PR await github.pulls.update({ owner, repo, @@ -71,8 +61,7 @@ jobs: issue_number: pr.number, body: "Closing stale PR due to inactivity." }); - } else if (lastCommitDate > new Date(now - 7*24*60*60*1000)) { - // Remove stale label if recent activity + } else if (diffDays <= 7) { await github.issues.removeLabel({ owner, repo, From 79455ee41730d9f1898a221c1721401930e0d395 Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:01:56 +0100 Subject: [PATCH 08/87] Update GitHub API methods in stale PR workflow --- .github/workflows/stale_pr_close.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index 5ffc9475a..9fe3709ac 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -22,7 +22,7 @@ jobs: if (context.eventName === "pull_request" && context.payload.action === "labeled") { const label = context.payload.label?.name; if (label === "stale") { - await github.issues.createComment({ + await github.rest.issues.createComment({ owner, repo, issue_number: context.payload.pull_request.number, @@ -32,7 +32,7 @@ jobs: return; // exit, nothing else to do } // --- Scheduled run --- - const { data: prs } = await github.pulls.list({ + const { data: prs } = await github.rest.pulls.list({ owner, repo, state: "open", @@ -41,7 +41,7 @@ jobs: for (const pr of prs) { const hasStale = pr.labels.some(l => l.name === "stale"); if (!hasStale) continue; - const { data: commits } = await github.pulls.listCommits({ + const { data: commits } = await github.rest.pulls.listCommits({ owner, repo, pull_number: pr.number @@ -49,26 +49,26 @@ jobs: const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); const diffDays = (now - lastCommitDate) / (1000 * 60 * 60 * 24); if (diffDays > 7) { - await github.pulls.update({ + await github.rest.pulls.update({ owner, repo, pull_number: pr.number, state: "closed" }); - await github.issues.createComment({ + await github.rest.issues.createComment({ owner, repo, issue_number: pr.number, body: "Closing stale PR due to inactivity." }); } else if (diffDays <= 7) { - await github.issues.removeLabel({ + await github.rest.issues.removeLabel({ owner, repo, issue_number: pr.number, name: "stale" }); - await github.issues.createComment({ + await github.rest.issues.createComment({ owner, repo, issue_number: pr.number, From 2f2d0235b73cd5937833f8600e71474acad5190e Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:03:28 +0100 Subject: [PATCH 09/87] Enhance stale PR workflow with permissions --- .github/workflows/stale_pr_close.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index 9fe3709ac..f9229abdd 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -9,15 +9,18 @@ on: jobs: stale-prs: runs-on: ubuntu-latest + permissions: + pull-requests: write + issues: write steps: - name: Handle stale label uses: actions/github-script@v7 with: - github-token: ${{ secrets.GITHUB_TOKEN }} script: | const now = new Date(); const owner = context.repo.owner; const repo = context.repo.repo; + // --- PR labeled event --- if (context.eventName === "pull_request" && context.payload.action === "labeled") { const label = context.payload.label?.name; @@ -31,6 +34,7 @@ jobs: } return; // exit, nothing else to do } + // --- Scheduled run --- const { data: prs } = await github.rest.pulls.list({ owner, @@ -38,16 +42,20 @@ jobs: state: "open", per_page: 100 }); + for (const pr of prs) { const hasStale = pr.labels.some(l => l.name === "stale"); if (!hasStale) continue; + const { data: commits } = await github.rest.pulls.listCommits({ owner, repo, pull_number: pr.number }); + const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); const diffDays = (now - lastCommitDate) / (1000 * 60 * 60 * 24); + if (diffDays > 7) { await github.rest.pulls.update({ owner, From 460d68ecc9f91223d23dbe596bb911ee6ad5f9bb Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:04:45 +0100 Subject: [PATCH 10/87] Enhance stale PR workflow with debug logging Added logging for GitHub context and event name. --- .github/workflows/stale_pr_close.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index f9229abdd..07e697dd4 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -17,6 +17,10 @@ jobs: uses: actions/github-script@v7 with: script: | + console.log('github object:', typeof github); + console.log('github.rest:', typeof github?.rest); + console.log('context.eventName:', context.eventName); + const now = new Date(); const owner = context.repo.owner; const repo = context.repo.repo; From 509c37024c3d37126c02e65a03ff1e8283b59595 Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:06:11 +0100 Subject: [PATCH 11/87] Refactor stale PR close workflow permissions and script --- .github/workflows/stale_pr_close.yml | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index 07e697dd4..9fe3709ac 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -9,22 +9,15 @@ on: jobs: stale-prs: runs-on: ubuntu-latest - permissions: - pull-requests: write - issues: write steps: - name: Handle stale label uses: actions/github-script@v7 with: + github-token: ${{ secrets.GITHUB_TOKEN }} script: | - console.log('github object:', typeof github); - console.log('github.rest:', typeof github?.rest); - console.log('context.eventName:', context.eventName); - const now = new Date(); const owner = context.repo.owner; const repo = context.repo.repo; - // --- PR labeled event --- if (context.eventName === "pull_request" && context.payload.action === "labeled") { const label = context.payload.label?.name; @@ -38,7 +31,6 @@ jobs: } return; // exit, nothing else to do } - // --- Scheduled run --- const { data: prs } = await github.rest.pulls.list({ owner, @@ -46,20 +38,16 @@ jobs: state: "open", per_page: 100 }); - for (const pr of prs) { const hasStale = pr.labels.some(l => l.name === "stale"); if (!hasStale) continue; - const { data: commits } = await github.rest.pulls.listCommits({ owner, repo, pull_number: pr.number }); - const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); const diffDays = (now - lastCommitDate) / (1000 * 60 * 60 * 24); - if (diffDays > 7) { await github.rest.pulls.update({ owner, From e0e1475d7455ea9ebee270fa0445a283c495bfbb Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:09:06 +0100 Subject: [PATCH 12/87] Update permissions in stale PR close workflow --- .github/workflows/stale_pr_close.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index 9fe3709ac..f7c1fcb72 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -9,6 +9,10 @@ on: jobs: stale-prs: runs-on: ubuntu-latest + permissions: + pull-requests: write + issues: write + contents: read steps: - name: Handle stale label uses: actions/github-script@v7 From 0a446423c62e024467bdecf7d6f155c945b914ad Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:15:15 +0100 Subject: [PATCH 13/87] Refactor stale PR management workflow --- .github/workflows/stale_pr_close.yml | 30 +++++++++------------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index f7c1fcb72..6b0531936 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -1,11 +1,9 @@ name: Stale PR Management on: schedule: - # Runs daily at midnight UTC - - cron: "0 0 * * *" - pull_request: - types: - - labeled + - cron: "0 0 * * *" # Daily at midnight UTC + workflow_dispatch: # Allow manual trigger for testing + jobs: stale-prs: runs-on: ubuntu-latest @@ -14,44 +12,34 @@ jobs: issues: write contents: read steps: - - name: Handle stale label + - name: Handle stale PRs uses: actions/github-script@v7 with: - github-token: ${{ secrets.GITHUB_TOKEN }} script: | const now = new Date(); const owner = context.repo.owner; const repo = context.repo.repo; - // --- PR labeled event --- - if (context.eventName === "pull_request" && context.payload.action === "labeled") { - const label = context.payload.label?.name; - if (label === "stale") { - await github.rest.issues.createComment({ - owner, - repo, - issue_number: context.payload.pull_request.number, - body: "This PR has been marked as stale. It will be closed if no new commits are added in 7 days." - }); - } - return; // exit, nothing else to do - } - // --- Scheduled run --- + const { data: prs } = await github.rest.pulls.list({ owner, repo, state: "open", per_page: 100 }); + for (const pr of prs) { const hasStale = pr.labels.some(l => l.name === "stale"); if (!hasStale) continue; + const { data: commits } = await github.rest.pulls.listCommits({ owner, repo, pull_number: pr.number }); + const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); const diffDays = (now - lastCommitDate) / (1000 * 60 * 60 * 24); + if (diffDays > 7) { await github.rest.pulls.update({ owner, From 28cfa9ccd268af0af54c86c5a3a05608a9b2ef71 Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:17:26 +0100 Subject: [PATCH 14/87] Modify stale PR workflow for labeled events --- .github/workflows/stale_pr_close.yml | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index 6b0531936..e16a66afa 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -1,8 +1,11 @@ name: Stale PR Management on: schedule: - - cron: "0 0 * * *" # Daily at midnight UTC - workflow_dispatch: # Allow manual trigger for testing + - cron: "0 0 * * *" + workflow_dispatch: + pull_request_target: # Changed from pull_request + types: + - labeled jobs: stale-prs: @@ -12,7 +15,7 @@ jobs: issues: write contents: read steps: - - name: Handle stale PRs + - name: Handle stale label uses: actions/github-script@v7 with: script: | @@ -20,6 +23,21 @@ jobs: const owner = context.repo.owner; const repo = context.repo.repo; + // --- PR labeled event --- + if (context.eventName === "pull_request_target" && context.payload.action === "labeled") { + const label = context.payload.label?.name; + if (label === "stale") { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: context.payload.pull_request.number, + body: "This PR has been marked as stale. It will be closed if no new commits are added in 7 days." + }); + } + return; + } + + // --- Scheduled run --- const { data: prs } = await github.rest.pulls.list({ owner, repo, From 02dc3efc7b697de1d3cb8aa6ce97eb01d3be41ad Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:21:10 +0100 Subject: [PATCH 15/87] Update stale PR handling logic in workflow --- .github/workflows/stale_pr_close.yml | 59 +++++++++++++++++++--------- 1 file changed, 40 insertions(+), 19 deletions(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index e16a66afa..aa6355f7d 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -3,7 +3,7 @@ on: schedule: - cron: "0 0 * * *" workflow_dispatch: - pull_request_target: # Changed from pull_request + pull_request_target: types: - labeled @@ -15,7 +15,7 @@ jobs: issues: write contents: read steps: - - name: Handle stale label + - name: Handle stale PRs uses: actions/github-script@v7 with: script: | @@ -23,7 +23,7 @@ jobs: const owner = context.repo.owner; const repo = context.repo.repo; - // --- PR labeled event --- + // --- When stale label is added, comment immediately --- if (context.eventName === "pull_request_target" && context.payload.action === "labeled") { const label = context.payload.label?.name; if (label === "stale") { @@ -37,7 +37,7 @@ jobs: return; } - // --- Scheduled run --- + // --- Scheduled run: check all stale PRs --- const { data: prs } = await github.rest.pulls.list({ owner, repo, @@ -49,6 +49,25 @@ jobs: const hasStale = pr.labels.some(l => l.name === "stale"); if (!hasStale) continue; + // Get timeline events to find when stale label was added + const { data: events } = await github.rest.issues.listEvents({ + owner, + repo, + issue_number: pr.number, + per_page: 100 + }); + + // Find the most recent time the stale label was added + const staleLabelEvents = events + .filter(e => e.event === "labeled" && e.label?.name === "stale") + .sort((a, b) => new Date(b.created_at) - new Date(a.created_at)); + + if (staleLabelEvents.length === 0) continue; + + const staleLabelDate = new Date(staleLabelEvents[0].created_at); + const daysSinceStale = (now - staleLabelDate) / (1000 * 60 * 60 * 24); + + // Check for new commits since stale label was added const { data: commits } = await github.rest.pulls.listCommits({ owner, repo, @@ -56,22 +75,9 @@ jobs: }); const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); - const diffDays = (now - lastCommitDate) / (1000 * 60 * 60 * 24); - if (diffDays > 7) { - await github.rest.pulls.update({ - owner, - repo, - pull_number: pr.number, - state: "closed" - }); - await github.rest.issues.createComment({ - owner, - repo, - issue_number: pr.number, - body: "Closing stale PR due to inactivity." - }); - } else if (diffDays <= 7) { + // If there are new commits after the stale label, remove it + if (lastCommitDate > staleLabelDate) { await github.rest.issues.removeLabel({ owner, repo, @@ -85,4 +91,19 @@ jobs: body: "Recent activity detected. Removing stale label." }); } + // If 7 days have passed since stale label, close the PR + else if (daysSinceStale > 7) { + await github.rest.pulls.update({ + owner, + repo, + pull_number: pr.number, + state: "closed" + }); + await github.rest.issues.createComment({ + owner, + repo, + issue_number: pr.number, + body: "Closing stale PR due to inactivity (no commits for 7 days after stale label)." + }); + } } From 765a455be7e3533f6ac4637567f8ab6af23961a9 Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Sun, 8 Feb 2026 21:24:03 +0100 Subject: [PATCH 16/87] Personalize comments with PR author mention --- .github/workflows/stale_pr_close.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/stale_pr_close.yml b/.github/workflows/stale_pr_close.yml index aa6355f7d..1a97a01a4 100644 --- a/.github/workflows/stale_pr_close.yml +++ b/.github/workflows/stale_pr_close.yml @@ -27,11 +27,12 @@ jobs: if (context.eventName === "pull_request_target" && context.payload.action === "labeled") { const label = context.payload.label?.name; if (label === "stale") { + const author = context.payload.pull_request.user.login; await github.rest.issues.createComment({ owner, repo, issue_number: context.payload.pull_request.number, - body: "This PR has been marked as stale. It will be closed if no new commits are added in 7 days." + body: `@${author} This PR has been marked as stale. It will be closed if no new commits are added in 7 days.` }); } return; @@ -75,6 +76,7 @@ jobs: }); const lastCommitDate = new Date(commits[commits.length - 1].commit.author.date); + const author = pr.user.login; // If there are new commits after the stale label, remove it if (lastCommitDate > staleLabelDate) { @@ -88,7 +90,7 @@ jobs: owner, repo, issue_number: pr.number, - body: "Recent activity detected. Removing stale label." + body: `@${author} Recent activity detected. Removing stale label.` }); } // If 7 days have passed since stale label, close the PR @@ -103,7 +105,7 @@ jobs: owner, repo, issue_number: pr.number, - body: "Closing stale PR due to inactivity (no commits for 7 days after stale label)." + body: `@${author} Closing stale PR due to inactivity (no commits for 7 days after stale label).` }); } } From d750a97404742f70c996a2e7847c26f488cb4b4e Mon Sep 17 00:00:00 2001 From: Benjamin Zumhagen Date: Sun, 8 Feb 2026 15:49:19 -0600 Subject: [PATCH 17/87] Update node to v24 --- install/skylite-ux-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/skylite-ux-install.sh b/install/skylite-ux-install.sh index 845d175de..c81f0f810 100644 --- a/install/skylite-ux-install.sh +++ b/install/skylite-ux-install.sh @@ -18,7 +18,7 @@ $STD apt install -y openssl msg_ok "Installed Dependencies" PG_VERSION="16" setup_postgresql -NODE_VERSION="20" setup_nodejs +NODE_VERSION="24" setup_nodejs PG_DB_NAME="skylite" PG_DB_USER="skylite" PG_DB_SCHEMA_PERMS="true" setup_postgresql_db fetch_and_deploy_gh_release "skylite-ux" "Wetzel402/Skylite-UX" "tarball" "2026.2.2" From 48179d26b6da3283f744b73dcb6d5e449ac236a9 Mon Sep 17 00:00:00 2001 From: CrazyWolf13 Date: Mon, 9 Feb 2026 07:35:32 +0100 Subject: [PATCH 18/87] paperless-exporter --- frontend/public/json/paperless-exporter.json | 35 ++++ tools/addon/paperless-exporter.sh | 188 +++++++++++++++++++ 2 files changed, 223 insertions(+) create mode 100644 frontend/public/json/paperless-exporter.json create mode 100644 tools/addon/paperless-exporter.sh diff --git a/frontend/public/json/paperless-exporter.json b/frontend/public/json/paperless-exporter.json new file mode 100644 index 000000000..ac5741c48 --- /dev/null +++ b/frontend/public/json/paperless-exporter.json @@ -0,0 +1,35 @@ +{ + "name": "Prometheus Paperless NGX Exporter", + "slug": "prometheus-paperless-ngx-exporter", + "categories": [ + 9 + ], + "date_created": "2025-02-07", + "type": "addon", + "updateable": true, + "privileged": false, + "interface_port": 8081, + "documentation": "https://github.com/hansmi/prometheus-paperless-exporter", + "website": "https://github.com/hansmi/prometheus-paperless-exporter", + "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/paperless-ngx.webp", + "config_path": "/etc/prometheus-paperless-ngx-exporter/config.env", + "description": "Prometheus metrics exporter for Paperless-NGX, a document management system transforming physical documents into a searchable online archive. The exporter relies on Paperless' REST API.", + "install_methods": [ + { + "type": "default", + "script": "tools/addon/prometheus-paperless-ngx-exporter.sh", + "resources": { + "cpu": null, + "ram": null, + "hdd": null, + "os": null, + "version": null + } + } + ], + "default_credentials": { + "username": null, + "password": null + }, + "notes": [] +} diff --git a/tools/addon/paperless-exporter.sh b/tools/addon/paperless-exporter.sh new file mode 100644 index 000000000..6f320c92b --- /dev/null +++ b/tools/addon/paperless-exporter.sh @@ -0,0 +1,188 @@ +#!/usr/bin/env bash + +# Copyright (c) 2021-2026 community-scripts ORG +# Author: Andy Grunwald (andygrunwald) +# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE +# Source: https://github.com/hansmi/prometheus-paperless-exporter + +source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func) +source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func) +source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func) + +# Enable error handling +set -Eeuo pipefail +trap 'error_handler' ERR +load_functions + +# ============================================================================== +# CONFIGURATION +# ============================================================================== +VERBOSE=${var_verbose:-no} +APP="prometheus-paperless-ngx-exporter" +APP_TYPE="tools" +BINARY_PATH="/usr/bin/prometheus-paperless-exporter" +CONFIG_PATH="/etc/prometheus-paperless-ngx-exporter/config.env" +SERVICE_PATH="/etc/systemd/system/prometheus-paperless-ngx-exporter.service" +AUTH_TOKEN_FILE="/etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file" + +# ============================================================================== +# OS DETECTION +# ============================================================================== +if ! grep -qE 'ID=debian|ID=ubuntu' /etc/os-release 2>/dev/null; then + echo -e "${CROSS} Unsupported OS detected. This script only supports Debian and Ubuntu." + exit 1 +fi + +# ============================================================================== +# UNINSTALL +# ============================================================================== +function uninstall() { + msg_info "Uninstalling Prometheus-Paperless-NGX-Exporter" + systemctl disable -q --now prometheus-paperless-ngx-exporter + + if dpkg -l | grep -q prometheus-paperless-exporter; then + $STD apt-get remove -y prometheus-paperless-exporter || $STD dpkg -r prometheus-paperless-exporter + fi + + rm -f "$SERVICE_PATH" + rm -rf /etc/prometheus-paperless-ngx-exporter + rm -f "/usr/local/bin/update_prometheus-paperless-ngx-exporter" + rm -f "$HOME/.prometheus-paperless-ngx-exporter" + msg_ok "Prometheus-Paperless-NGX-Exporter has been uninstalled" +} + +# ============================================================================== +# UPDATE +# ============================================================================== +function update() { + if check_for_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter"; then + msg_info "Stopping service" + systemctl stop prometheus-paperless-ngx-exporter + msg_ok "Stopped service" + + fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary" "latest" + + msg_info "Starting service" + systemctl start prometheus-paperless-ngx-exporter + msg_ok "Started service" + msg_ok "Updated successfully!" + exit + fi +} + +# ============================================================================== +# INSTALL +# ============================================================================== +function install() { + read -erp "Enter URL of Paperless-NGX, example: (http://127.0.0.1:8000): " PAPERLESS_URL + read -rsp "Enter Paperless-NGX authentication token: " PAPERLESS_AUTH_TOKEN + printf "\n" + + fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary" "latest" + + msg_info "Creating configuration" + mkdir -p /etc/prometheus-paperless-ngx-exporter + cat <"$CONFIG_PATH" +# https://github.com/hansmi/prometheus-paperless-exporter +PAPERLESS_URL="${PAPERLESS_URL}" +EOF + echo "${PAPERLESS_AUTH_TOKEN}" >"$AUTH_TOKEN_FILE" + chmod 600 "$AUTH_TOKEN_FILE" + msg_ok "Created configuration" + + msg_info "Creating service" + cat <"$SERVICE_PATH" +[Unit] +Description=Prometheus Paperless NGX Exporter +Wants=network-online.target +After=network-online.target + +[Service] +User=root +EnvironmentFile=$CONFIG_PATH +ExecStart=$BINARY_PATH \\ + --paperless_url=\${PAPERLESS_URL} \\ + --paperless_auth_token_file=$AUTH_TOKEN_FILE +Restart=always + +[Install] +WantedBy=multi-user.target +EOF + systemctl daemon-reload + systemctl enable -q --now prometheus-paperless-ngx-exporter + msg_ok "Created and started service" + + # Create update script + msg_info "Creating update script" + ensure_usr_local_bin_persist + cat <<'UPDATEEOF' >/usr/local/bin/update_prometheus-paperless-ngx-exporter +#!/usr/bin/env bash +# prometheus-paperless-ngx-exporter Update Script +type=update bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/tools/addon/prometheus-paperless-ngx-exporter.sh)" +UPDATEEOF + chmod +x /usr/local/bin/update_prometheus-paperless-ngx-exporter + msg_ok "Created update script (/usr/local/bin/update_prometheus-paperless-ngx-exporter)" + + echo "" + msg_ok "Prometheus-Paperless-NGX-Exporter installed successfully" + msg_ok "Metrics: ${BL}http://${LOCAL_IP}:8081/metrics${CL}" + msg_ok "Config: ${BL}${CONFIG_PATH}${CL}" +} + +# ============================================================================== +# MAIN +# ============================================================================== +header_info +ensure_usr_local_bin_persist +get_lxc_ip + +# Handle type=update (called from update script) +if [[ "${type:-}" == "update" ]]; then + if [[ -f "$BINARY_PATH" ]]; then + update + else + msg_error "Prometheus-Paperless-NGX-Exporter is not installed. Nothing to update." + exit 1 + fi + exit 0 +fi + +# Check if already installed +if [[ -f "$BINARY_PATH" ]]; then + msg_warn "Prometheus-Paperless-NGX-Exporter is already installed." + echo "" + + echo -n "${TAB}Uninstall Prometheus-Paperless-NGX-Exporter? (y/N): " + read -r uninstall_prompt + if [[ "${uninstall_prompt,,}" =~ ^(y|yes)$ ]]; then + uninstall + exit 0 + fi + + echo -n "${TAB}Update Prometheus-Paperless-NGX-Exporter? (y/N): " + read -r update_prompt + if [[ "${update_prompt,,}" =~ ^(y|yes)$ ]]; then + update + exit 0 + fi + + msg_warn "No action selected. Exiting." + exit 0 +fi + +# Fresh installation +msg_warn "Prometheus-Paperless-NGX-Exporter is not installed." +echo "" +echo -e "${TAB}${INFO} This will install:" +echo -e "${TAB} - Prometheus Paperless NGX Exporter (binary)" +echo -e "${TAB} - Systemd service" +echo "" + +echo -n "${TAB}Install Prometheus-Paperless-NGX-Exporter? (y/N): " +read -r install_prompt +if [[ "${install_prompt,,}" =~ ^(y|yes)$ ]]; then + install +else + msg_warn "Installation cancelled. Exiting." + exit 0 +fi From 6b3f1e3a0b7f399ad71caa68412b86866dcd497b Mon Sep 17 00:00:00 2001 From: CrazyWolf13 Date: Mon, 9 Feb 2026 07:38:38 +0100 Subject: [PATCH 19/87] fix json --- frontend/public/json/paperless-exporter.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/public/json/paperless-exporter.json b/frontend/public/json/paperless-exporter.json index ac5741c48..56dace551 100644 --- a/frontend/public/json/paperless-exporter.json +++ b/frontend/public/json/paperless-exporter.json @@ -17,7 +17,7 @@ "install_methods": [ { "type": "default", - "script": "tools/addon/prometheus-paperless-ngx-exporter.sh", + "script": "tools/addon/paperless-ngx-exporter.sh", "resources": { "cpu": null, "ram": null, From 4b36571661d3de58a0db69922881bda1c3c15761 Mon Sep 17 00:00:00 2001 From: CrazyWolf13 Date: Mon, 9 Feb 2026 07:39:52 +0100 Subject: [PATCH 20/87] fix json v2 --- frontend/public/json/paperless-exporter.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/public/json/paperless-exporter.json b/frontend/public/json/paperless-exporter.json index 56dace551..2b10c7c5e 100644 --- a/frontend/public/json/paperless-exporter.json +++ b/frontend/public/json/paperless-exporter.json @@ -17,7 +17,7 @@ "install_methods": [ { "type": "default", - "script": "tools/addon/paperless-ngx-exporter.sh", + "script": "tools/addon/paperless-exporter.sh", "resources": { "cpu": null, "ram": null, From 3232665b920f440e12dde94f4b03a0a8cec890cd Mon Sep 17 00:00:00 2001 From: Finn Joshua Bartels Date: Mon, 9 Feb 2026 11:49:02 +0100 Subject: [PATCH 21/87] feat(core): dev mode configurator menu - Implement interactive Whiptail checklist for dev_mode configuration - Add logic to read and pre-populate menu from existing dev_mode enviorment var --- misc/build.func | 67 +++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 62 insertions(+), 5 deletions(-) diff --git a/misc/build.func b/misc/build.func index 20a20e985..6a73e688c 100644 --- a/misc/build.func +++ b/misc/build.func @@ -2807,6 +2807,61 @@ EOF fi } +dev_mode_menu() { + local motd=OFF keep=OFF trace=OFF pause=OFF breakpoint=OFF logs=OFF dryrun=OFF verbose=OFF + + IFS=',' read -r -a _modes <<< "$dev_mode" + for m in "${_modes[@]}"; do + case "$m" in + motd) motd=ON ;; + keep) keep=ON ;; + trace) trace=ON ;; + pause) pause=ON ;; + breakpoint) breakpoint=ON ;; + logs) logs=ON ;; + dryrun) dryrun=ON ;; + esac + done + + [[ "$var_verbose" == "yes" ]] && verbose=ON + + local selection + selection=$(whiptail --backtitle "Proxmox VE Helper Scripts" \ + --title "DEV MODE" \ + --checklist "Choose one or more Options" 16 51 10 \ + "motd" "Early SSH/MOTD Setup" "$motd" \ + "keep" "Preserve Container on Failure" "$keep" \ + "trace" "Bash Command Tracing" "$trace" \ + "pause" "Step-by-Step Execution" "$pause" \ + "breakpoint" "Interactive Shell on Error" "$breakpoint" \ + "logs" "Persistent Logging" "$logs" \ + "dryrun" "Simulation Mode" "$dryrun" \ + "verbose" "Verbose logging" "$verbose" \ + 3>&1 1>&2 2>&3) || exit_script + + dev_mode="" + var_verbose="no" + local modes_out=() + + for tag in $selection; do + tag="${tag%\"}" + tag="${tag#\"}" + if [[ "$tag" == "verbose" ]]; then + var_verbose="yes" + else + modes_out+=("$tag") + fi + done + + dev_mode=$(IFS=,; echo "${modes_out[*]}") + unset DEV_MODE_MOTD DEV_MODE_KEEP DEV_MODE_TRACE DEV_MODE_PAUSE DEV_MODE_BREAKPOINT DEV_MODE_LOGS DEV_MODE_DRYRUN + parse_dev_mode + if [[ "${DEV_MODE_LOGS:-false}" == "true" ]]; then + mkdir -p /var/log/community-scripts + BUILD_LOG="/var/log/community-scripts/create-lxc-${SESSION_ID}-$(date +%Y%m%d_%H%M%S).log" + fi +} + diagnostics_menu() { if [ "${DIAGNOSTICS:-no}" = "yes" ]; then if whiptail --backtitle "Proxmox VE Helper Scripts" \ @@ -3024,12 +3079,13 @@ settings_menu() { local settings_items=( "1" "Manage API-Diagnostic Setting" "2" "Edit Default.vars" + "3" "Configure dev mode" ) if [ -f "$(get_app_defaults_path)" ]; then - settings_items+=("3" "Edit App.vars for ${APP}") - settings_items+=("4" "Back to Main Menu") + settings_items+=("4" "Edit App.vars for ${APP}") + settings_items+=("5" "Back to Main Menu") else - settings_items+=("3" "Back to Main Menu") + settings_items+=("4" "Back to Main Menu") fi local choice @@ -3043,7 +3099,8 @@ settings_menu() { case "$choice" in 1) diagnostics_menu ;; 2) nano /usr/local/community-scripts/default.vars ;; - 3) + 3) dev_mode_menu ;; + 4) if [ -f "$(get_app_defaults_path)" ]; then nano "$(get_app_defaults_path)" else @@ -3051,7 +3108,7 @@ settings_menu() { return fi ;; - 4) + 5) # Back to main menu return ;; From e61d4f997837b18e212f4b1a9cb6f156c1f65cb6 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 12:09:41 +0100 Subject: [PATCH 22/87] Add Linkding CT/install scripts and frontend Add Linkding integration: new CT template (ct/linkding.sh) and installer (install/linkding-install.sh) to deploy Linkding as a container. Installer fetches the GitHub release, builds the frontend, compiles the SQLite ICU extension, creates .env with generated admin password, and installs systemd services (linkding, linkding-tasks). Also add frontend metadata (frontend/public/json/linkding.json) for the app catalog. Minor tweak: remove unused import_local_ip from install/wger-install.sh. --- ct/linkding.sh | 80 ++++++++++++++++++++ frontend/public/json/linkding.json | 40 ++++++++++ install/linkding-install.sh | 116 +++++++++++++++++++++++++++++ install/wger-install.sh | 1 - 4 files changed, 236 insertions(+), 1 deletion(-) create mode 100644 ct/linkding.sh create mode 100644 frontend/public/json/linkding.json create mode 100644 install/linkding-install.sh diff --git a/ct/linkding.sh b/ct/linkding.sh new file mode 100644 index 000000000..d1fcc0fcd --- /dev/null +++ b/ct/linkding.sh @@ -0,0 +1,80 @@ +#!/usr/bin/env bash +source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func) +# Copyright (c) 2021-2026 community-scripts ORG +# Author: MickLesk (MickLesk) +# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE +# Source: https://linkding.link/ + +APP="linkding" +var_tags="${var_tags:-bookmarks;management}" +var_cpu="${var_cpu:-2}" +var_ram="${var_ram:-1024}" +var_disk="${var_disk:-4}" +var_os="${var_os:-debian}" +var_version="${var_version:-13}" +var_unprivileged="${var_unprivileged:-1}" + +header_info "$APP" +variables +color +catch_errors + +function update_script() { + header_info + check_container_storage + check_container_resources + + if [[ ! -d /opt/linkding ]]; then + msg_error "No ${APP} Installation Found!" + exit + fi + + if check_for_gh_release "linkding" "sissbruecker/linkding"; then + msg_info "Stopping Services" + systemctl stop linkding linkding-tasks + msg_ok "Stopped Services" + + msg_info "Backing up Data" + cp -r /opt/linkding/data /opt/linkding_data_backup + cp /opt/linkding/.env /opt/linkding_env_backup + cp /opt/linkding/libicu.so /opt/linkding_libicu_backup + msg_ok "Backed up Data" + + CLEAN_INSTALL=1 fetch_and_deploy_gh_release "linkding" "sissbruecker/linkding" + + msg_info "Restoring Data" + cp -r /opt/linkding_data_backup/. /opt/linkding/data + cp /opt/linkding_env_backup /opt/linkding/.env + cp /opt/linkding_libicu_backup /opt/linkding/libicu.so + rm -rf /opt/linkding_data_backup /opt/linkding_env_backup /opt/linkding_libicu_backup + msg_ok "Restored Data" + + msg_info "Updating ${APP}" + cd /opt/linkding + rm -f bookmarks/settings/dev.py + touch bookmarks/settings/custom.py + $STD npm ci + $STD npm run build + $STD uv sync --no-dev + $STD uv pip install gunicorn + set -a && source /opt/linkding/.env && set +a + $STD uv run python manage.py migrate + $STD uv run python manage.py collectstatic --no-input + msg_ok "Updated ${APP}" + + msg_info "Starting Services" + systemctl start linkding linkding-tasks + msg_ok "Started Services" + msg_ok "Updated Successfully" + fi + exit +} + +start +build_container +description + +msg_ok "Completed Successfully!\n" +echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" +echo -e "${INFO}${YW} Access it using the following URL:${CL}" +echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9090${CL}" diff --git a/frontend/public/json/linkding.json b/frontend/public/json/linkding.json new file mode 100644 index 000000000..cd07dd1b5 --- /dev/null +++ b/frontend/public/json/linkding.json @@ -0,0 +1,40 @@ +{ + "name": "linkding", + "slug": "linkding", + "categories": [ + 12 + ], + "date_created": "2026-02-09", + "type": "ct", + "updateable": true, + "privileged": false, + "interface_port": 9090, + "documentation": "https://linkding.link/", + "website": "https://linkding.link/", + "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/linkding.webp", + "config_path": "/opt/linkding/.env", + "description": "linkding is a self-hosted bookmark manager that is designed to be minimal, fast, and easy to set up. It features a clean UI, tag-based organization, bulk editing, Markdown notes, read it later functionality, sharing, REST API, and browser extensions for Firefox and Chrome.", + "install_methods": [ + { + "type": "default", + "script": "ct/linkding.sh", + "resources": { + "cpu": 2, + "ram": 1024, + "hdd": 4, + "os": "Debian", + "version": "13" + } + } + ], + "default_credentials": { + "username": "admin", + "password": null + }, + "notes": [ + { + "text": "Admin credentials are stored in /opt/linkding/.env", + "type": "info" + } + ] +} diff --git a/install/linkding-install.sh b/install/linkding-install.sh new file mode 100644 index 000000000..fa8f68fb9 --- /dev/null +++ b/install/linkding-install.sh @@ -0,0 +1,116 @@ +#!/usr/bin/env bash + +# Copyright (c) 2021-2026 community-scripts ORG +# Author: MickLesk (MickLesk) +# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE +# Source: https://linkding.link/ + +source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" +color +verb_ip6 +catch_errors +setting_up_container +network_check +update_os + +msg_info "Installing Dependencies" +$STD apt-get install -y \ + build-essential \ + pkg-config \ + libpq-dev \ + libicu-dev \ + libsqlite3-dev \ + libffi-dev \ + unzip \ + wget +msg_ok "Installed Dependencies" + +NODE_VERSION="22" setup_nodejs +setup_uv + +fetch_and_deploy_gh_release "linkding" "sissbruecker/linkding" + +msg_info "Building Frontend" +cd /opt/linkding +$STD npm ci +$STD npm run build +msg_ok "Built Frontend" + +msg_info "Compiling SQLite ICU Extension" +cd /tmp +SQLITE_RELEASE_YEAR=2023 +SQLITE_RELEASE=3430000 +$STD wget https://www.sqlite.org/${SQLITE_RELEASE_YEAR}/sqlite-amalgamation-${SQLITE_RELEASE}.zip +$STD unzip -o sqlite-amalgamation-${SQLITE_RELEASE}.zip +cp sqlite-amalgamation-${SQLITE_RELEASE}/sqlite3.h . +cp sqlite-amalgamation-${SQLITE_RELEASE}/sqlite3ext.h . +$STD wget "https://www.sqlite.org/src/raw/ext/icu/icu.c?name=91c021c7e3e8bbba286960810fa303295c622e323567b2e6def4ce58e4466e60" -O icu.c +$STD gcc -fPIC -shared icu.c $(pkg-config --libs --cflags icu-uc icu-io) -o /opt/linkding/libicu.so +rm -rf sqlite-amalgamation-${SQLITE_RELEASE}* icu.c sqlite3.h sqlite3ext.h +cd /opt/linkding +msg_ok "Compiled SQLite ICU Extension" + +msg_info "Setting up ${APP}" +rm -f bookmarks/settings/dev.py +touch bookmarks/settings/custom.py +$STD uv sync --no-dev +$STD uv pip install gunicorn +mkdir -p data/{favicons,previews,assets} +ADMIN_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | cut -c1-13) +cat </opt/linkding/.env +LD_SUPERUSER_NAME=admin +LD_SUPERUSER_PASSWORD=${ADMIN_PASS} +LD_CSRF_TRUSTED_ORIGINS=http://${LOCAL_IP}:9090 +EOF +set -a && source /opt/linkding/.env && set +a +$STD uv run python manage.py generate_secret_key +$STD uv run python manage.py migrate +$STD uv run python manage.py enable_wal +$STD uv run python manage.py create_initial_superuser +$STD uv run python manage.py collectstatic --no-input +msg_ok "Set up ${APP}" + +msg_info "Creating Services" +cat </etc/systemd/system/linkding.service +[Unit] +Description=linkding Bookmark Manager +After=network.target + +[Service] +User=root +WorkingDirectory=/opt/linkding +EnvironmentFile=/opt/linkding/.env +ExecStart=/opt/linkding/.venv/bin/gunicorn \ + --bind 0.0.0.0:9090 \ + --workers 3 \ + --threads 2 \ + --timeout 120 \ + bookmarks.wsgi:application +Restart=on-failure +RestartSec=5 + +[Install] +WantedBy=multi-user.target +EOF +cat </etc/systemd/system/linkding-tasks.service +[Unit] +Description=linkding Background Tasks +After=network.target + +[Service] +User=root +WorkingDirectory=/opt/linkding +EnvironmentFile=/opt/linkding/.env +ExecStart=/opt/linkding/.venv/bin/python manage.py run_huey +Restart=on-failure +RestartSec=5 + +[Install] +WantedBy=multi-user.target +EOF +systemctl enable -q --now linkding linkding-tasks +msg_ok "Created Services" + +motd_ssh +customize +cleanup_lxc diff --git a/install/wger-install.sh b/install/wger-install.sh index df8a089d0..8931520e9 100644 --- a/install/wger-install.sh +++ b/install/wger-install.sh @@ -21,7 +21,6 @@ $STD apt install -y \ libpq-dev msg_ok "Installed Dependencies" -import_local_ip NODE_VERSION="22" NODE_MODULE="sass" setup_nodejs setup_uv PG_VERSION="16" setup_postgresql From d6dc2279f6302ab2876028582ccf3f7b286e93ef Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 12:37:54 +0100 Subject: [PATCH 23/87] Use literal linkding in install messages Replace occurrences of the ${APP} placeholder with the explicit string "linkding" in the install/linkding-install.sh messaging to ensure consistent, clear output during installation. Also a trivial change to frontend/public/json/linkding.json (removed trailing newline) with no functional impact. --- frontend/public/json/linkding.json | 2 +- install/linkding-install.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/public/json/linkding.json b/frontend/public/json/linkding.json index cd07dd1b5..6ba919d81 100644 --- a/frontend/public/json/linkding.json +++ b/frontend/public/json/linkding.json @@ -37,4 +37,4 @@ "type": "info" } ] -} +} \ No newline at end of file diff --git a/install/linkding-install.sh b/install/linkding-install.sh index fa8f68fb9..84c754f2e 100644 --- a/install/linkding-install.sh +++ b/install/linkding-install.sh @@ -50,7 +50,7 @@ rm -rf sqlite-amalgamation-${SQLITE_RELEASE}* icu.c sqlite3.h sqlite3ext.h cd /opt/linkding msg_ok "Compiled SQLite ICU Extension" -msg_info "Setting up ${APP}" +msg_info "Setting up linkding" rm -f bookmarks/settings/dev.py touch bookmarks/settings/custom.py $STD uv sync --no-dev @@ -68,7 +68,7 @@ $STD uv run python manage.py migrate $STD uv run python manage.py enable_wal $STD uv run python manage.py create_initial_superuser $STD uv run python manage.py collectstatic --no-input -msg_ok "Set up ${APP}" +msg_ok "Set up linkding" msg_info "Creating Services" cat </etc/systemd/system/linkding.service From b9b62c61b87eb595cfd4363984c5af120b8ad30b Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 12:48:17 +0100 Subject: [PATCH 24/87] Update linkding-install.sh --- install/linkding-install.sh | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/install/linkding-install.sh b/install/linkding-install.sh index 84c754f2e..a0487e316 100644 --- a/install/linkding-install.sh +++ b/install/linkding-install.sh @@ -14,20 +14,18 @@ network_check update_os msg_info "Installing Dependencies" -$STD apt-get install -y \ +$STD apt install -y \ build-essential \ pkg-config \ + python3-dev \ libpq-dev \ libicu-dev \ libsqlite3-dev \ - libffi-dev \ - unzip \ - wget + libffi-dev msg_ok "Installed Dependencies" NODE_VERSION="22" setup_nodejs setup_uv - fetch_and_deploy_gh_release "linkding" "sissbruecker/linkding" msg_info "Building Frontend" From fdedcbe5f1e396bc02ef31a9b98a2dc937d902c9 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 13:01:26 +0100 Subject: [PATCH 25/87] add nginx --- ct/linkding.sh | 4 ++-- install/linkding-install.sh | 31 ++++++++++++++++++++++++++++--- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/ct/linkding.sh b/ct/linkding.sh index d1fcc0fcd..66e0e7ac1 100644 --- a/ct/linkding.sh +++ b/ct/linkding.sh @@ -31,7 +31,7 @@ function update_script() { if check_for_gh_release "linkding" "sissbruecker/linkding"; then msg_info "Stopping Services" - systemctl stop linkding linkding-tasks + systemctl stop nginx linkding linkding-tasks msg_ok "Stopped Services" msg_info "Backing up Data" @@ -63,7 +63,7 @@ function update_script() { msg_ok "Updated ${APP}" msg_info "Starting Services" - systemctl start linkding linkding-tasks + systemctl start nginx linkding linkding-tasks msg_ok "Started Services" msg_ok "Updated Successfully" fi diff --git a/install/linkding-install.sh b/install/linkding-install.sh index a0487e316..f3fef5bf5 100644 --- a/install/linkding-install.sh +++ b/install/linkding-install.sh @@ -14,10 +14,11 @@ network_check update_os msg_info "Installing Dependencies" -$STD apt install -y \ +$STD apt-get install -y \ build-essential \ pkg-config \ python3-dev \ + nginx \ libpq-dev \ libicu-dev \ libsqlite3-dev \ @@ -79,7 +80,7 @@ User=root WorkingDirectory=/opt/linkding EnvironmentFile=/opt/linkding/.env ExecStart=/opt/linkding/.venv/bin/gunicorn \ - --bind 0.0.0.0:9090 \ + --bind 127.0.0.1:8000 \ --workers 3 \ --threads 2 \ --timeout 120 \ @@ -106,7 +107,31 @@ RestartSec=5 [Install] WantedBy=multi-user.target EOF -systemctl enable -q --now linkding linkding-tasks +cat <<'EOF' >/etc/nginx/sites-available/linkding +server { + listen 9090; + server_name _; + + client_max_body_size 20M; + + location /static/ { + alias /opt/linkding/static/; + expires 30d; + } + + location / { + proxy_pass http://127.0.0.1:8000; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_redirect off; + } +} +EOF +$STD rm -f /etc/nginx/sites-enabled/default +$STD ln -sf /etc/nginx/sites-available/linkding /etc/nginx/sites-enabled/linkding +systemctl enable -q --now nginx linkding linkding-tasks +systemctl restart nginx msg_ok "Created Services" motd_ssh From e1d5a626b2169d5a77181a840bdb359d636d135d Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 13:17:12 +0100 Subject: [PATCH 26/87] Update linkding-install.sh --- install/linkding-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/linkding-install.sh b/install/linkding-install.sh index f3fef5bf5..2e3ac3e4d 100644 --- a/install/linkding-install.sh +++ b/install/linkding-install.sh @@ -52,7 +52,7 @@ msg_ok "Compiled SQLite ICU Extension" msg_info "Setting up linkding" rm -f bookmarks/settings/dev.py touch bookmarks/settings/custom.py -$STD uv sync --no-dev +$STD uv sync --no-dev --frozen $STD uv pip install gunicorn mkdir -p data/{favicons,previews,assets} ADMIN_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | cut -c1-13) From d2bbeed04d9de1c232c07f2ba03c34947546fdc0 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 13:45:13 +0100 Subject: [PATCH 27/87] fixes --- ct/linkding.sh | 6 +++--- install/linkding-install.sh | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ct/linkding.sh b/ct/linkding.sh index 66e0e7ac1..c019e7df0 100644 --- a/ct/linkding.sh +++ b/ct/linkding.sh @@ -55,11 +55,11 @@ function update_script() { touch bookmarks/settings/custom.py $STD npm ci $STD npm run build - $STD uv sync --no-dev + $STD uv sync --no-dev --frozen $STD uv pip install gunicorn set -a && source /opt/linkding/.env && set +a - $STD uv run python manage.py migrate - $STD uv run python manage.py collectstatic --no-input + $STD /opt/linkding/.venv/bin/python manage.py migrate + $STD /opt/linkding/.venv/bin/python manage.py collectstatic --no-input msg_ok "Updated ${APP}" msg_info "Starting Services" diff --git a/install/linkding-install.sh b/install/linkding-install.sh index 2e3ac3e4d..d60ea4a1e 100644 --- a/install/linkding-install.sh +++ b/install/linkding-install.sh @@ -62,11 +62,11 @@ LD_SUPERUSER_PASSWORD=${ADMIN_PASS} LD_CSRF_TRUSTED_ORIGINS=http://${LOCAL_IP}:9090 EOF set -a && source /opt/linkding/.env && set +a -$STD uv run python manage.py generate_secret_key -$STD uv run python manage.py migrate -$STD uv run python manage.py enable_wal -$STD uv run python manage.py create_initial_superuser -$STD uv run python manage.py collectstatic --no-input +$STD /opt/linkding/.venv/bin/python manage.py generate_secret_key +$STD /opt/linkding/.venv/bin/python manage.py migrate +$STD /opt/linkding/.venv/bin/python manage.py enable_wal +$STD /opt/linkding/.venv/bin/python manage.py create_initial_superuser +$STD /opt/linkding/.venv/bin/python manage.py collectstatic --no-input msg_ok "Set up linkding" msg_info "Creating Services" From 1eb0cc55ff88099c9c88be0553822ac09faf2fd2 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 14:41:22 +0100 Subject: [PATCH 28/87] Use system ICU module and remove local build Stop backing up and restoring a locally built libicu and instead symlink the distribution ICU module (/usr/lib/x86_64-linux-gnu/mod_icu.so) into /opt/linkding/libicu.so. Add libsqlite3-mod-icu to dependencies and create the symlink after building the frontend. Remove the entire manual download/compile flow for the SQLite ICU extension and its related backups, simplifying installation and relying on the system-provided ICU extension. --- ct/linkding.sh | 5 ++--- install/linkding-install.sh | 16 ++-------------- 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/ct/linkding.sh b/ct/linkding.sh index c019e7df0..d3f78ce8f 100644 --- a/ct/linkding.sh +++ b/ct/linkding.sh @@ -37,7 +37,6 @@ function update_script() { msg_info "Backing up Data" cp -r /opt/linkding/data /opt/linkding_data_backup cp /opt/linkding/.env /opt/linkding_env_backup - cp /opt/linkding/libicu.so /opt/linkding_libicu_backup msg_ok "Backed up Data" CLEAN_INSTALL=1 fetch_and_deploy_gh_release "linkding" "sissbruecker/linkding" @@ -45,8 +44,8 @@ function update_script() { msg_info "Restoring Data" cp -r /opt/linkding_data_backup/. /opt/linkding/data cp /opt/linkding_env_backup /opt/linkding/.env - cp /opt/linkding_libicu_backup /opt/linkding/libicu.so - rm -rf /opt/linkding_data_backup /opt/linkding_env_backup /opt/linkding_libicu_backup + rm -rf /opt/linkding_data_backup /opt/linkding_env_backup + ln -sf /usr/lib/x86_64-linux-gnu/mod_icu.so /opt/linkding/libicu.so msg_ok "Restored Data" msg_info "Updating ${APP}" diff --git a/install/linkding-install.sh b/install/linkding-install.sh index d60ea4a1e..e2080a082 100644 --- a/install/linkding-install.sh +++ b/install/linkding-install.sh @@ -22,6 +22,7 @@ $STD apt-get install -y \ libpq-dev \ libicu-dev \ libsqlite3-dev \ + libsqlite3-mod-icu \ libffi-dev msg_ok "Installed Dependencies" @@ -33,22 +34,9 @@ msg_info "Building Frontend" cd /opt/linkding $STD npm ci $STD npm run build +ln -sf /usr/lib/x86_64-linux-gnu/mod_icu.so /opt/linkding/libicu.so msg_ok "Built Frontend" -msg_info "Compiling SQLite ICU Extension" -cd /tmp -SQLITE_RELEASE_YEAR=2023 -SQLITE_RELEASE=3430000 -$STD wget https://www.sqlite.org/${SQLITE_RELEASE_YEAR}/sqlite-amalgamation-${SQLITE_RELEASE}.zip -$STD unzip -o sqlite-amalgamation-${SQLITE_RELEASE}.zip -cp sqlite-amalgamation-${SQLITE_RELEASE}/sqlite3.h . -cp sqlite-amalgamation-${SQLITE_RELEASE}/sqlite3ext.h . -$STD wget "https://www.sqlite.org/src/raw/ext/icu/icu.c?name=91c021c7e3e8bbba286960810fa303295c622e323567b2e6def4ce58e4466e60" -O icu.c -$STD gcc -fPIC -shared icu.c $(pkg-config --libs --cflags icu-uc icu-io) -o /opt/linkding/libicu.so -rm -rf sqlite-amalgamation-${SQLITE_RELEASE}* icu.c sqlite3.h sqlite3ext.h -cd /opt/linkding -msg_ok "Compiled SQLite ICU Extension" - msg_info "Setting up linkding" rm -f bookmarks/settings/dev.py touch bookmarks/settings/custom.py From 820d4551a1c19bb47e944ee57a8aa72f7b2e6f32 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:34:17 +0100 Subject: [PATCH 29/87] Replace Go API with PocketBase; update docs Remove the old Go/Mongo API (api/main.go, go.mod, go.sum, .env.example) and switch telemetry backend to PocketBase (http://db.community-scripts.org). Update documentation and flowcharts to reflect the PocketBase collection (_dev_telemetry_data), new REST endpoints (POST/PATCH/GET), field schema, and revised api.func integration (LXC/VM reporting and status updates). Misc scripts and helpers were adjusted (misc/api.func, misc/build.func, misc/error_handler.func) and a new misc/ingest.go was added. This consolidates telemetry to a hosted PocketBase instance and updates docs and integration points accordingly. --- api/.env.example | 5 - api/go.mod | 23 - api/go.sum | 56 -- api/main.go | 450 ---------- docs/api/README.md | 171 ++-- docs/misc/api.func/API_FLOWCHART.md | 554 ++++++------ docs/misc/api.func/API_FUNCTIONS_REFERENCE.md | 500 ++++++----- docs/misc/api.func/API_INTEGRATION.md | 593 ++++--------- docs/misc/api.func/API_USAGE_EXAMPLES.md | 794 ------------------ docs/misc/api.func/README.md | 134 +-- misc/api.func | 226 +++-- misc/build.func | 12 +- misc/error_handler.func | 176 ++-- misc/ingest.go | 0 14 files changed, 1137 insertions(+), 2557 deletions(-) delete mode 100644 api/.env.example delete mode 100644 api/go.mod delete mode 100644 api/go.sum delete mode 100644 api/main.go delete mode 100644 docs/misc/api.func/API_USAGE_EXAMPLES.md create mode 100644 misc/ingest.go diff --git a/api/.env.example b/api/.env.example deleted file mode 100644 index fc7bdbb59..000000000 --- a/api/.env.example +++ /dev/null @@ -1,5 +0,0 @@ -MONGO_USER= -MONGO_PASSWORD= -MONGO_IP= -MONGO_PORT= -MONGO_DATABASE= \ No newline at end of file diff --git a/api/go.mod b/api/go.mod deleted file mode 100644 index 044bc8428..000000000 --- a/api/go.mod +++ /dev/null @@ -1,23 +0,0 @@ -module proxmox-api - -go 1.24.0 - -require ( - github.com/gorilla/mux v1.8.1 - github.com/joho/godotenv v1.5.1 - github.com/rs/cors v1.11.1 - go.mongodb.org/mongo-driver v1.17.2 -) - -require ( - github.com/golang/snappy v0.0.4 // indirect - github.com/klauspost/compress v1.16.7 // indirect - github.com/montanaflynn/stats v0.7.1 // indirect - github.com/xdg-go/pbkdf2 v1.0.0 // indirect - github.com/xdg-go/scram v1.1.2 // indirect - github.com/xdg-go/stringprep v1.0.4 // indirect - github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect - golang.org/x/crypto v0.45.0 // indirect - golang.org/x/sync v0.18.0 // indirect - golang.org/x/text v0.31.0 // indirect -) diff --git a/api/go.sum b/api/go.sum deleted file mode 100644 index cb111bdb8..000000000 --- a/api/go.sum +++ /dev/null @@ -1,56 +0,0 @@ -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= -github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= -github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= -github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= -github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= -github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE= -github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= -github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= -github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= -github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= -github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= -github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= -github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= -github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= -github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793SqyhzM= -go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= -golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= -golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= -golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/api/main.go b/api/main.go deleted file mode 100644 index 55b2f24f8..000000000 --- a/api/main.go +++ /dev/null @@ -1,450 +0,0 @@ -// Copyright (c) 2021-2025 community-scripts ORG -// Author: Michel Roegl-Brunner (michelroegl-brunner) -// License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE - -package main - -import ( - "context" - "encoding/json" - "fmt" - "log" - "net/http" - "os" - "strconv" - "time" - - "github.com/gorilla/mux" - "github.com/joho/godotenv" - "github.com/rs/cors" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/bson/primitive" - "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" -) - -var client *mongo.Client -var collection *mongo.Collection - -func loadEnv() { - if err := godotenv.Load(); err != nil { - log.Fatal("Error loading .env file") - } -} - -// DataModel represents a single document in MongoDB -type DataModel struct { - ID primitive.ObjectID `json:"id" bson:"_id,omitempty"` - CT_TYPE uint `json:"ct_type" bson:"ct_type"` - DISK_SIZE float32 `json:"disk_size" bson:"disk_size"` - CORE_COUNT uint `json:"core_count" bson:"core_count"` - RAM_SIZE uint `json:"ram_size" bson:"ram_size"` - OS_TYPE string `json:"os_type" bson:"os_type"` - OS_VERSION string `json:"os_version" bson:"os_version"` - DISABLEIP6 string `json:"disableip6" bson:"disableip6"` - NSAPP string `json:"nsapp" bson:"nsapp"` - METHOD string `json:"method" bson:"method"` - CreatedAt time.Time `json:"created_at" bson:"created_at"` - PVEVERSION string `json:"pve_version" bson:"pve_version"` - STATUS string `json:"status" bson:"status"` - RANDOM_ID string `json:"random_id" bson:"random_id"` - TYPE string `json:"type" bson:"type"` - ERROR string `json:"error" bson:"error"` -} - -type StatusModel struct { - RANDOM_ID string `json:"random_id" bson:"random_id"` - ERROR string `json:"error" bson:"error"` - STATUS string `json:"status" bson:"status"` -} - -type CountResponse struct { - TotalEntries int64 `json:"total_entries"` - StatusCount map[string]int64 `json:"status_count"` - NSAPPCount map[string]int64 `json:"nsapp_count"` -} - -// ConnectDatabase initializes the MongoDB connection -func ConnectDatabase() { - loadEnv() - - mongoURI := fmt.Sprintf("mongodb://%s:%s@%s:%s", - os.Getenv("MONGO_USER"), - os.Getenv("MONGO_PASSWORD"), - os.Getenv("MONGO_IP"), - os.Getenv("MONGO_PORT")) - - database := os.Getenv("MONGO_DATABASE") - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var err error - client, err = mongo.Connect(ctx, options.Client().ApplyURI(mongoURI)) - if err != nil { - log.Fatal("Failed to connect to MongoDB!", err) - } - collection = client.Database(database).Collection("data_models") - fmt.Println("Connected to MongoDB on 10.10.10.18") -} - -// UploadJSON handles API requests and stores data as a document in MongoDB -func UploadJSON(w http.ResponseWriter, r *http.Request) { - var input DataModel - - if err := json.NewDecoder(r.Body).Decode(&input); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - return - } - input.CreatedAt = time.Now() - - _, err := collection.InsertOne(context.Background(), input) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - - log.Println("Received data:", input) - w.WriteHeader(http.StatusCreated) - json.NewEncoder(w).Encode(map[string]string{"message": "Data saved successfully"}) -} - -// UpdateStatus updates the status of a record based on RANDOM_ID -func UpdateStatus(w http.ResponseWriter, r *http.Request) { - var input StatusModel - - if err := json.NewDecoder(r.Body).Decode(&input); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - return - } - - filter := bson.M{"random_id": input.RANDOM_ID} - update := bson.M{"$set": bson.M{"status": input.STATUS, "error": input.ERROR}} - - _, err := collection.UpdateOne(context.Background(), filter, update) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - - log.Println("Updated data:", input) - w.WriteHeader(http.StatusOK) - json.NewEncoder(w).Encode(map[string]string{"message": "Record updated successfully"}) -} - -// GetDataJSON fetches all data from MongoDB -func GetDataJSON(w http.ResponseWriter, r *http.Request) { - var records []DataModel - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - cursor, err := collection.Find(ctx, bson.M{}) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - var record DataModel - if err := cursor.Decode(&record); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - records = append(records, record) - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(records) -} -func GetPaginatedData(w http.ResponseWriter, r *http.Request) { - page, _ := strconv.Atoi(r.URL.Query().Get("page")) - limit, _ := strconv.Atoi(r.URL.Query().Get("limit")) - if page < 1 { - page = 1 - } - if limit < 1 { - limit = 10 - } - skip := (page - 1) * limit - var records []DataModel - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - options := options.Find().SetSkip(int64(skip)).SetLimit(int64(limit)) - cursor, err := collection.Find(ctx, bson.M{}, options) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - var record DataModel - if err := cursor.Decode(&record); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - records = append(records, record) - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(records) -} - -func GetSummary(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - totalCount, err := collection.CountDocuments(ctx, bson.M{}) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - - statusCount := make(map[string]int64) - nsappCount := make(map[string]int64) - - pipeline := []bson.M{ - {"$group": bson.M{"_id": "$status", "count": bson.M{"$sum": 1}}}, - } - cursor, err := collection.Aggregate(ctx, pipeline) - if err == nil { - for cursor.Next(ctx) { - var result struct { - ID string `bson:"_id"` - Count int64 `bson:"count"` - } - if err := cursor.Decode(&result); err == nil { - statusCount[result.ID] = result.Count - } - } - } - - pipeline = []bson.M{ - {"$group": bson.M{"_id": "$nsapp", "count": bson.M{"$sum": 1}}}, - } - cursor, err = collection.Aggregate(ctx, pipeline) - if err == nil { - for cursor.Next(ctx) { - var result struct { - ID string `bson:"_id"` - Count int64 `bson:"count"` - } - if err := cursor.Decode(&result); err == nil { - nsappCount[result.ID] = result.Count - } - } - } - - response := CountResponse{ - TotalEntries: totalCount, - StatusCount: statusCount, - NSAPPCount: nsappCount, - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(response) -} - -func GetByNsapp(w http.ResponseWriter, r *http.Request) { - nsapp := r.URL.Query().Get("nsapp") - var records []DataModel - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - cursor, err := collection.Find(ctx, bson.M{"nsapp": nsapp}) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - var record DataModel - if err := cursor.Decode(&record); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - records = append(records, record) - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(records) -} - -func GetByDateRange(w http.ResponseWriter, r *http.Request) { - - startDate := r.URL.Query().Get("start_date") - endDate := r.URL.Query().Get("end_date") - - if startDate == "" || endDate == "" { - http.Error(w, "Both start_date and end_date are required", http.StatusBadRequest) - return - } - - start, err := time.Parse("2006-01-02T15:04:05.999999+00:00", startDate+"T00:00:00+00:00") - if err != nil { - http.Error(w, "Invalid start_date format", http.StatusBadRequest) - return - } - - end, err := time.Parse("2006-01-02T15:04:05.999999+00:00", endDate+"T23:59:59+00:00") - if err != nil { - http.Error(w, "Invalid end_date format", http.StatusBadRequest) - return - } - - var records []DataModel - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - cursor, err := collection.Find(ctx, bson.M{ - "created_at": bson.M{ - "$gte": start, - "$lte": end, - }, - }) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - var record DataModel - if err := cursor.Decode(&record); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - records = append(records, record) - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(records) -} -func GetByStatus(w http.ResponseWriter, r *http.Request) { - status := r.URL.Query().Get("status") - var records []DataModel - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - cursor, err := collection.Find(ctx, bson.M{"status": status}) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - var record DataModel - if err := cursor.Decode(&record); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - records = append(records, record) - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(records) -} - -func GetByOS(w http.ResponseWriter, r *http.Request) { - osType := r.URL.Query().Get("os_type") - osVersion := r.URL.Query().Get("os_version") - var records []DataModel - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - cursor, err := collection.Find(ctx, bson.M{"os_type": osType, "os_version": osVersion}) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - var record DataModel - if err := cursor.Decode(&record); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - records = append(records, record) - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(records) -} - -func GetErrors(w http.ResponseWriter, r *http.Request) { - errorCount := make(map[string]int) - - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - cursor, err := collection.Find(ctx, bson.M{"error": bson.M{"$ne": ""}}) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - defer cursor.Close(ctx) - - for cursor.Next(ctx) { - var record DataModel - if err := cursor.Decode(&record); err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - - if record.ERROR != "" { - errorCount[record.ERROR]++ - } - } - - type ErrorCountResponse struct { - Error string `json:"error"` - Count int `json:"count"` - } - - var errorCounts []ErrorCountResponse - for err, count := range errorCount { - errorCounts = append(errorCounts, ErrorCountResponse{ - Error: err, - Count: count, - }) - } - - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(struct { - ErrorCounts []ErrorCountResponse `json:"error_counts"` - }{ - ErrorCounts: errorCounts, - }) -} - -func main() { - ConnectDatabase() - - router := mux.NewRouter() - router.HandleFunc("/upload", UploadJSON).Methods("POST") - router.HandleFunc("/upload/updatestatus", UpdateStatus).Methods("POST") - router.HandleFunc("/data/json", GetDataJSON).Methods("GET") - router.HandleFunc("/data/paginated", GetPaginatedData).Methods("GET") - router.HandleFunc("/data/summary", GetSummary).Methods("GET") - router.HandleFunc("/data/nsapp", GetByNsapp).Methods("GET") - router.HandleFunc("/data/date", GetByDateRange).Methods("GET") - router.HandleFunc("/data/status", GetByStatus).Methods("GET") - router.HandleFunc("/data/os", GetByOS).Methods("GET") - router.HandleFunc("/data/errors", GetErrors).Methods("GET") - - c := cors.New(cors.Options{ - AllowedOrigins: []string{"*"}, - AllowedMethods: []string{"GET", "POST"}, - AllowedHeaders: []string{"Content-Type", "Authorization"}, - AllowCredentials: true, - }) - - handler := c.Handler(router) - - fmt.Println("Server running on port 8080") - log.Fatal(http.ListenAndServe(":8080", handler)) -} diff --git a/docs/api/README.md b/docs/api/README.md index 3780a0d00..83ad70f8c 100644 --- a/docs/api/README.md +++ b/docs/api/README.md @@ -1,38 +1,30 @@ -# API Integration Documentation (/api) - -This directory contains comprehensive documentation for API integration and the `/api` directory. +# API Integration Documentation ## Overview -The `/api` directory contains the Proxmox Community Scripts API backend for diagnostic reporting, telemetry, and analytics integration. +The telemetry and diagnostics API uses **PocketBase** as backend, hosted at `http://db.community-scripts.org`. All telemetry data is stored in the `_dev_telemetry_data` collection. + +The Go/MongoDB API server (`/api` directory) has been replaced entirely by PocketBase. ## Key Components -### Main API Service -Located in `/api/main.go`: +### PocketBase Backend +- **URL**: `http://db.community-scripts.org` +- **Collection**: `_dev_telemetry_data` +- **Admin UI**: `http://db.community-scripts.org/_/#/collections` - RESTful API for receiving telemetry data - Installation statistics tracking - Error reporting and analytics -- Performance monitoring ### Integration with Scripts The API is integrated into all installation scripts via `api.func`: - Sends installation start/completion events -- Reports errors and exit codes +- Reports errors and exit codes with numeric values - Collects anonymous usage statistics - Enables project analytics ## Documentation Structure -API documentation covers: -- API endpoint specifications -- Integration methods -- Data formats and schemas -- Error handling -- Privacy and data handling - -## Key Resources - - **[misc/api.func/](../misc/api.func/)** - API function library documentation - **[misc/api.func/README.md](../misc/api.func/README.md)** - Quick reference - **[misc/api.func/API_FUNCTIONS_REFERENCE.md](../misc/api.func/API_FUNCTIONS_REFERENCE.md)** - Complete function reference @@ -42,48 +34,92 @@ API documentation covers: The `api.func` library provides: ### `post_to_api()` -Send container installation data to API. +Send LXC container installation data to PocketBase. -**Usage**: -```bash -post_to_api CTID STATUS APP_NAME -``` +Creates a new record in `_dev_telemetry_data` with status `installing`. -### `post_update_to_api()` -Report application update status. +### `post_to_api_vm()` +Send VM installation data to PocketBase. -**Usage**: -```bash -post_update_to_api CTID APP_NAME VERSION -``` +Creates a new record with `type=vm` and `ct_type=2`. -### `get_error_description()` +### `post_update_to_api(status, exit_code)` +Update installation status via PocketBase PATCH. + +Maps status values: +- `"done"` β†’ PocketBase status `"sucess"` +- `"failed"` β†’ PocketBase status `"failed"` + +### `explain_exit_code(code)` Get human-readable error description from exit code. **Usage**: ```bash -ERROR_DESC=$(get_error_description EXIT_CODE) +ERROR_DESC=$(explain_exit_code 137) +# β†’ "Killed (SIGKILL / Out of memory?)" ``` +## PocketBase Collection Schema + +| Field | Type | Required | Description | +|-------|------|----------|-------------| +| `id` | text (auto) | yes | PocketBase record ID | +| `random_id` | text | yes | Session UUID (unique) | +| `type` | select | yes | `lxc`, `vm`, `addon`, `pve` | +| `ct_type` | number | yes | 1=LXC, 2=VM | +| `nsapp` | text | yes | Application name | +| `status` | select | yes | `installing`, `sucess`, `failed`, `unknown` | +| `disk_size` | number | no | Disk size in GB | +| `core_count` | number | no | CPU cores | +| `ram_size` | number | no | RAM in MB | +| `os_type` | text | no | OS type (debian, ubuntu, etc.) | +| `os_version` | text | no | OS version | +| `pve_version` | text | no | Proxmox VE version | +| `method` | text | no | Installation method | +| `error` | text | no | Error description | +| `exit_code` | number | no | Numeric exit code | +| `created` | autodate | auto | Record creation timestamp | +| `updated` | autodate | auto | Last update timestamp | + +## API Endpoints (PocketBase REST) + +**Base URL**: `http://db.community-scripts.org` + +| Method | Endpoint | Description | +|--------|----------|-------------| +| `POST` | `/api/collections/_dev_telemetry_data/records` | Create telemetry record | +| `PATCH` | `/api/collections/_dev_telemetry_data/records/{id}` | Update record status | +| `GET` | `/api/collections/_dev_telemetry_data/records` | List/search records | + +### Query Parameters (GET) +- `filter` – PocketBase filter syntax, e.g. `(nsapp='debian' && status='failed')` +- `sort` – Sort fields, e.g. `-created,nsapp` +- `page` / `perPage` – Pagination +- `fields` – Limit returned fields + ## API Integration Points ### In Container Creation (`ct/AppName.sh`) -- Called by build.func to report container creation -- Sends initial container setup data -- Reports success or failure +- Called by `build.func` to report container creation via `post_to_api` +- Sends initial container setup data with status `installing` +- Reports success or failure via `post_update_to_api` -### In Installation Scripts (`install/appname-install.sh`) -- Called at start of installation -- Called on installation completion -- Called on error conditions +### In VM Creation (`vm/AppName.sh`) +- Calls `post_to_api_vm` after VM creation +- Status updates via `post_update_to_api` -### Data Collected -- Container/VM ID -- Application name and version -- Installation duration -- Success/failure status -- Error codes (if failure) -- Anonymous usage metrics +### Data Flow +``` +Installation Scripts + β”‚ + β”œβ”€ Call: api.func functions + β”‚ + β”œβ”€ POST β†’ PocketBase (create record, status=installing) + β”‚ └─ Returns record ID (stored in PB_RECORD_ID) + β”‚ + └─ PATCH β†’ PocketBase (update record with final status) + └─ status=sucess/failed + exit_code + error +``` ## Privacy @@ -92,55 +128,18 @@ All API data: - βœ… Aggregated for statistics - βœ… Used only for project improvement - βœ… No tracking of user identities -- βœ… Can be disabled if desired - -## API Architecture - -``` -Installation Scripts - β”‚ - β”œβ”€ Call: api.func functions - β”‚ - └─ POST to: https://api.community-scripts.org - β”‚ - β”œβ”€ Receives data - β”œβ”€ Validates format - β”œβ”€ Stores metrics - └─ Aggregates statistics - β”‚ - └─ Used for: - β”œβ”€ Download tracking - β”œβ”€ Error trending - β”œβ”€ Feature usage stats - └─ Project health monitoring -``` - -## Common API Tasks - -- **Enable API reporting** β†’ Built-in by default, no configuration needed -- **Disable API** β†’ Set `api_disable="yes"` before running -- **View API data** β†’ Visit https://community-scripts.org/stats -- **Report API errors** β†’ [GitHub Issues](https://github.com/community-scripts/ProxmoxVED/issues) +- βœ… Can be disabled via diagnostics settings ## Debugging API Issues If API calls fail: 1. Check internet connectivity -2. Verify API endpoint availability +2. Verify PocketBase endpoint: `curl -s http://db.community-scripts.org/api/health` 3. Review error codes in [EXIT_CODES.md](../EXIT_CODES.md) -4. Check API function logs -5. Report issues on GitHub - -## API Endpoint - -**Base URL**: `https://api.community-scripts.org` - -**Endpoints**: -- `POST /install` - Report container installation -- `POST /update` - Report application update -- `GET /stats` - Public statistics +4. Check that `DIAGNOSTICS=yes` in `/usr/local/community-scripts/diagnostics` +5. Report issues on [GitHub](https://git.community-scripts.org/community-scripts/ProxmoxVED/issues) --- -**Last Updated**: December 2025 +**Last Updated**: February 2026 **Maintainers**: community-scripts team diff --git a/docs/misc/api.func/API_FLOWCHART.md b/docs/misc/api.func/API_FLOWCHART.md index a46cd56e9..bc01fce9f 100644 --- a/docs/misc/api.func/API_FLOWCHART.md +++ b/docs/misc/api.func/API_FLOWCHART.md @@ -1,5 +1,9 @@ # api.func Execution Flowchart +## Overview + +This document illustrates the execution flow of `api.func` functions. The backend is **PocketBase** at `http://db.community-scripts.org`, collection `_dev_telemetry_data`. + ## Main API Communication Flow ``` @@ -10,333 +14,321 @@ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Prerequisites Check β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ Prerequisites Validation β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Check curl β”‚ β”‚ Check β”‚ β”‚ Check β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Availability β”‚ β”‚ Diagnostics β”‚ β”‚ Random UUID β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ Setting β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ command -v β”‚ β”‚ β€’ DIAGNOSTICS β”‚ β”‚ β€’ RANDOM_UUID β”‚ β”‚ -β”‚ β”‚ β”‚ curl β”‚ β”‚ = "yes" β”‚ β”‚ not empty β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Return if β”‚ β”‚ β€’ Return if β”‚ β”‚ β€’ Return if β”‚ β”‚ -β”‚ β”‚ β”‚ not found β”‚ β”‚ disabled β”‚ β”‚ not set β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ Prerequisites Check β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ Prerequisites Validation β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ +β”‚ β”‚ β”‚ Check curl β”‚ β”‚ Check β”‚ β”‚ Check β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ Availability β”‚ β”‚ DIAGNOSTICS β”‚ β”‚ RANDOM_UUID β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ command -v β”‚ β”‚ β€’ Must be "yes" β”‚ β”‚ β€’ Must not be β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ curl β”‚ β”‚ β€’ Return if β”‚ β”‚ empty β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ Return if β”‚ β”‚ "no" or unset β”‚ β”‚ β€’ Return if β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ not found β”‚ β”‚ β”‚ β”‚ not set β”‚ β”‚ β”‚ +β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Data Collection β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ System Information Gathering β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Get PVE β”‚ β”‚ Collect β”‚ β”‚ Prepare JSON β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Version β”‚ β”‚ Environment β”‚ β”‚ Payload β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ Variables β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ pveversion β”‚ β”‚ β€’ CT_TYPE β”‚ β”‚ β€’ Create JSON β”‚ β”‚ -β”‚ β”‚ β”‚ command β”‚ β”‚ β€’ DISK_SIZE β”‚ β”‚ structure β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Parse version β”‚ β”‚ β€’ CORE_COUNT β”‚ β”‚ β€’ Include all β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Extract β”‚ β”‚ β€’ RAM_SIZE β”‚ β”‚ variables β”‚ β”‚ -β”‚ β”‚ β”‚ major.minor β”‚ β”‚ β€’ var_os β”‚ β”‚ β€’ Format for API β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ var_version β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ NSAPP β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ METHOD β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ Data Collection β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ System Information Gathering β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ +β”‚ β”‚ β”‚ Get PVE β”‚ β”‚ Collect Env β”‚ β”‚ Build JSON β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ Version β”‚ β”‚ Variables β”‚ β”‚ Payload β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ pveversion β”‚ β”‚ β€’ CT_TYPE β”‚ β”‚ β€’ Heredoc JSON β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ command β”‚ β”‚ β€’ DISK_SIZE β”‚ β”‚ β€’ Include all β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ Parse version β”‚ β”‚ β€’ CORE_COUNT β”‚ β”‚ fields β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ Fallback: β”‚ β”‚ β€’ RAM_SIZE β”‚ β”‚ β€’ status = β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ "not found" β”‚ β”‚ β€’ var_os β”‚ β”‚ "installing" β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β€’ NSAPP, METHOD β”‚ β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ API Request Execution β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ HTTP Request Processing β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Prepare β”‚ β”‚ Execute β”‚ β”‚ Handle β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Request β”‚ β”‚ HTTP Request β”‚ β”‚ Response β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Set API URL β”‚ β”‚ β€’ curl -s -w β”‚ β”‚ β€’ Capture HTTP β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Set headers β”‚ β”‚ "%{http_code}" β”‚ β”‚ status code β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Set payload β”‚ β”‚ β€’ POST request β”‚ β”‚ β€’ Store response β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Content-Type β”‚ β”‚ β€’ JSON data β”‚ β”‚ β€’ Handle errors β”‚ β”‚ -β”‚ β”‚ β”‚ application/ β”‚ β”‚ β€’ Follow β”‚ β”‚ gracefully β”‚ β”‚ -β”‚ β”‚ β”‚ json β”‚ β”‚ redirects β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ PocketBase API Request β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ HTTP Request Processing β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ +β”‚ β”‚ β”‚ Prepare β”‚ β”‚ Execute β”‚ β”‚ Handle β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ Request β”‚ β”‚ HTTP POST β”‚ β”‚ Response β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ URL: β”‚ β”‚ β€’ curl -s -w β”‚ β”‚ β€’ Check HTTP β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ PB_API_URL β”‚ β”‚ "%{http_code}"β”‚ β”‚ 200/201 β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ Method: POST β”‚ β”‚ β€’ -X POST β”‚ β”‚ β€’ Extract "id" β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β€’ Content-Type: β”‚ β”‚ β€’ -L (follow β”‚ β”‚ from response β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ application/ β”‚ β”‚ redirects) β”‚ β”‚ β€’ Store in β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ json β”‚ β”‚ β€’ JSON body β”‚ β”‚ PB_RECORD_ID β”‚ β”‚ β”‚ +β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ ``` -## LXC API Reporting Flow +## LXC API Reporting Flow β€” `post_to_api()` ``` β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ POST_TO_API() Flow β”‚ -β”‚ Send LXC container installation data to API β”‚ +β”‚ post_to_api() Flow β”‚ +β”‚ POST β†’ Create LXC telemetry record in PocketBase β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ LXC Data Preparation β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ LXC-Specific Data Collection β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Set LXC β”‚ β”‚ Include LXC β”‚ β”‚ Set Status β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Type β”‚ β”‚ Variables β”‚ β”‚ Information β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ ct_type: 1 β”‚ β”‚ β€’ DISK_SIZE β”‚ β”‚ β€’ status: β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ type: "lxc" β”‚ β”‚ β€’ CORE_COUNT β”‚ β”‚ "installing" β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Include all β”‚ β”‚ β€’ RAM_SIZE β”‚ β”‚ β€’ Include all β”‚ β”‚ -β”‚ β”‚ β”‚ LXC data β”‚ β”‚ β€’ var_os β”‚ β”‚ tracking data β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ var_version β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ DISABLEIP6 β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ NSAPP β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ METHOD β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ pve_version β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ random_id β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ - β–Ό -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ JSON Payload Creation β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ JSON Structure Generation β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Create JSON β”‚ β”‚ Validate β”‚ β”‚ Format for β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Structure β”‚ β”‚ Data β”‚ β”‚ API Request β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Use heredoc β”‚ β”‚ β€’ Check all β”‚ β”‚ β€’ Ensure proper β”‚ β”‚ -β”‚ β”‚ β”‚ syntax β”‚ β”‚ variables β”‚ β”‚ JSON format β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Include all β”‚ β”‚ are set β”‚ β”‚ β€’ Escape special β”‚ β”‚ -β”‚ β”‚ β”‚ required β”‚ β”‚ β€’ Validate β”‚ β”‚ characters β”‚ β”‚ -β”‚ β”‚ β”‚ fields β”‚ β”‚ data types β”‚ β”‚ β€’ Set content β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Format β”‚ β”‚ β€’ Handle β”‚ β”‚ type β”‚ β”‚ -β”‚ β”‚ β”‚ properly β”‚ β”‚ missing β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ values β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ -``` - -## VM API Reporting Flow - -``` -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ POST_TO_API_VM() Flow β”‚ -β”‚ Send VM installation data to API β”‚ +β”‚ Prerequisites: curl? ──► DIAGNOSTICS="yes"? ──► RANDOM_UUID set? β”‚ +β”‚ (return silently on any failure) β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ VM Data Preparation β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ VM-Specific Data Collection β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Check β”‚ β”‚ Set VM β”‚ β”‚ Process Disk β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Diagnostics β”‚ β”‚ Type β”‚ β”‚ Size β”‚ β”‚ -β”‚ β”‚ β”‚ File β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ ct_type: 2 β”‚ β”‚ β€’ Remove 'G' β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Check file β”‚ β”‚ β€’ type: "vm" β”‚ β”‚ suffix β”‚ β”‚ -β”‚ β”‚ β”‚ existence β”‚ β”‚ β€’ Include all β”‚ β”‚ β€’ Convert to β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Read β”‚ β”‚ VM data β”‚ β”‚ numeric value β”‚ β”‚ -β”‚ β”‚ β”‚ DIAGNOSTICS β”‚ β”‚ β”‚ β”‚ β€’ Store in β”‚ β”‚ -β”‚ β”‚ β”‚ setting β”‚ β”‚ β”‚ β”‚ DISK_SIZE_API β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Parse value β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ - β–Ό -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ VM JSON Payload Creation β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ VM-Specific JSON Structure β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Include VM β”‚ β”‚ Set VM β”‚ β”‚ Format VM β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Variables β”‚ β”‚ Status β”‚ β”‚ Data for API β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ DISK_SIZE_API β”‚ β”‚ β€’ status: β”‚ β”‚ β€’ Ensure proper β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ CORE_COUNT β”‚ β”‚ "installing" β”‚ β”‚ JSON format β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ RAM_SIZE β”‚ β”‚ β€’ Include all β”‚ β”‚ β€’ Handle VM- β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ var_os β”‚ β”‚ tracking β”‚ β”‚ specific data β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ var_version β”‚ β”‚ information β”‚ β”‚ β€’ Set appropriate β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ NSAPP β”‚ β”‚ β”‚ β”‚ content type β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ METHOD β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ pve_version β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ random_id β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ -``` - -## Status Update Flow - -``` -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ POST_UPDATE_TO_API() Flow β”‚ -β”‚ Send installation completion status to API β”‚ +β”‚ LXC Data Preparation β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ Set LXC type β”‚ β”‚ Collect variables β”‚ β”‚ Set initial β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ status β”‚ β”‚ +β”‚ β”‚ β€’ ct_type: 1 β”‚ β”‚ β€’ DISK_SIZE β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β€’ type: "lxc" β”‚ β”‚ β€’ CORE_COUNT β”‚ β”‚ β€’ status: β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β€’ RAM_SIZE β”‚ β”‚ "installing" β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β€’ var_os, var_versionβ”‚ β”‚ β€’ random_id: β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β€’ NSAPP, METHOD β”‚ β”‚ RANDOM_UUID β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β€’ pve_version β”‚ β”‚ β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Update Prevention Check β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ Duplicate Update Prevention β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Check β”‚ β”‚ Set Flag β”‚ β”‚ Return Early β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ POST_UPDATE_ β”‚ β”‚ if First β”‚ β”‚ if Already β”‚ β”‚ -β”‚ β”‚ β”‚ DONE β”‚ β”‚ Update β”‚ β”‚ Updated β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Check if β”‚ β”‚ β€’ Set β”‚ β”‚ β€’ Return 0 β”‚ β”‚ -β”‚ β”‚ β”‚ already β”‚ β”‚ POST_UPDATE_ β”‚ β”‚ β€’ Skip API call β”‚ β”‚ -β”‚ β”‚ β”‚ updated β”‚ β”‚ DONE=true β”‚ β”‚ β€’ Prevent β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Prevent β”‚ β”‚ β€’ Continue β”‚ β”‚ duplicate β”‚ β”‚ -β”‚ β”‚ β”‚ duplicate β”‚ β”‚ with update β”‚ β”‚ requests β”‚ β”‚ -β”‚ β”‚ β”‚ requests β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ - β–Ό -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Status and Error Processing β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ Status Determination β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Determine β”‚ β”‚ Get Error β”‚ β”‚ Prepare Status β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Status β”‚ β”‚ Description β”‚ β”‚ Data β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ status: β”‚ β”‚ β€’ Call β”‚ β”‚ β€’ Include status β”‚ β”‚ -β”‚ β”‚ β”‚ "success" or β”‚ β”‚ get_error_ β”‚ β”‚ β€’ Include error β”‚ β”‚ -β”‚ β”‚ β”‚ "failed" β”‚ β”‚ description() β”‚ β”‚ description β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Set exit β”‚ β”‚ β€’ Get human- β”‚ β”‚ β€’ Include random β”‚ β”‚ -β”‚ β”‚ β”‚ code based β”‚ β”‚ readable β”‚ β”‚ ID for tracking β”‚ β”‚ -β”‚ β”‚ β”‚ on status β”‚ β”‚ error message β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Default to β”‚ β”‚ β€’ Handle β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ error if β”‚ β”‚ unknown β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ not set β”‚ β”‚ errors β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ - β–Ό -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Status Update API Request β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ Status Update Payload Creation β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Create β”‚ β”‚ Send Status β”‚ β”‚ Mark Update β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Status JSON β”‚ β”‚ Update β”‚ β”‚ Complete β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Include β”‚ β”‚ β€’ POST to β”‚ β”‚ β€’ Set β”‚ β”‚ -β”‚ β”‚ β”‚ status β”‚ β”‚ updatestatus β”‚ β”‚ POST_UPDATE_ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Include β”‚ β”‚ endpoint β”‚ β”‚ DONE=true β”‚ β”‚ -β”‚ β”‚ β”‚ error β”‚ β”‚ β€’ Include JSON β”‚ β”‚ β€’ Prevent further β”‚ β”‚ -β”‚ β”‚ β”‚ description β”‚ β”‚ payload β”‚ β”‚ updates β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Include β”‚ β”‚ β€’ Handle β”‚ β”‚ β€’ Complete β”‚ β”‚ -β”‚ β”‚ β”‚ random_id β”‚ β”‚ response β”‚ β”‚ process β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ gracefully β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ POST β†’ PB_API_URL β”‚ +β”‚ http://db.community-scripts.org/api/collections/_dev_telemetry_data/records β”‚ +β”‚ β”‚ +β”‚ Response (HTTP 200/201): β”‚ +β”‚ { "id": "abc123def456789", ... } β”‚ +β”‚ β”‚ β”‚ +β”‚ └──► PB_RECORD_ID = "abc123def456789" β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ ``` -## Error Description Flow +## VM API Reporting Flow β€” `post_to_api_vm()` ``` β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ GET_ERROR_DESCRIPTION() Flow β”‚ -β”‚ Convert numeric exit codes to human-readable explanations β”‚ +β”‚ post_to_api_vm() Flow β”‚ +β”‚ POST β†’ Create VM telemetry record in PocketBase β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Error Code Classification β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ Error Code Categories β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ General β”‚ β”‚ Network β”‚ β”‚ LXC-Specific β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ System β”‚ β”‚ Errors β”‚ β”‚ Errors β”‚ β”‚ -β”‚ β”‚ β”‚ Errors β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β€’ 18: Connectionβ”‚ β”‚ β€’ 100-101: LXC β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ 0-9: Basic β”‚ β”‚ failed β”‚ β”‚ install errors β”‚ β”‚ -β”‚ β”‚ β”‚ errors β”‚ β”‚ β€’ 22: Invalid β”‚ β”‚ β€’ 200-209: LXC β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ 126-128: β”‚ β”‚ argument β”‚ β”‚ creation errors β”‚ β”‚ -β”‚ β”‚ β”‚ Command β”‚ β”‚ β€’ 28: No space β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ errors β”‚ β”‚ β€’ 35: Timeout β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ 129-143: β”‚ β”‚ β€’ 56: TLS error β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Signal β”‚ β”‚ β€’ 60: SSL cert β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ errors β”‚ β”‚ error β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ 152: Resource β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ limit β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ 255: Unknown β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ critical β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +β”‚ Read /usr/local/community-scripts/diagnostics β”‚ +β”‚ Extract DIAGNOSTICS=yes/no from file β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β–Ό β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Error Message Return β”‚ -β”‚ β”‚ -β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ -β”‚ β”‚ Error Message Formatting β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ β”‚ -β”‚ β”‚ β”‚ Match Error β”‚ β”‚ Return β”‚ β”‚ Default Case β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ Code β”‚ β”‚ Description β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Use case β”‚ β”‚ β€’ Return β”‚ β”‚ β€’ Return "Unknown β”‚ β”‚ -β”‚ β”‚ β”‚ statement β”‚ β”‚ human- β”‚ β”‚ error code β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Match β”‚ β”‚ readable β”‚ β”‚ (exit_code)" β”‚ β”‚ -β”‚ β”‚ β”‚ specific β”‚ β”‚ message β”‚ β”‚ β€’ Handle β”‚ β”‚ -β”‚ β”‚ β”‚ codes β”‚ β”‚ β€’ Include β”‚ β”‚ unrecognized β”‚ β”‚ -β”‚ β”‚ β”‚ β€’ Handle β”‚ β”‚ context β”‚ β”‚ codes β”‚ β”‚ -β”‚ β”‚ β”‚ ranges β”‚ β”‚ information β”‚ β”‚ β€’ Provide fallback β”‚ β”‚ -β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ message β”‚ β”‚ -β”‚ β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ β”‚ -β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ Prerequisites: curl? ──► DIAGNOSTICS="yes"? ──► RANDOM_UUID set? β”‚ +β”‚ (return silently on any failure) β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ VM Data Preparation β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ Set VM type β”‚ β”‚ Process disk size β”‚ β”‚ Set initial β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ status β”‚ β”‚ +β”‚ β”‚ β€’ ct_type: 2 β”‚ β”‚ β€’ Strip 'G' suffix β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β€’ type: "vm" β”‚ β”‚ "20G" β†’ 20 β”‚ β”‚ β€’ status: β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β€’ Store in β”‚ β”‚ "installing" β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ DISK_SIZE_API β”‚ β”‚ β€’ random_id: β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ RANDOM_UUID β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ POST β†’ PB_API_URL β”‚ +β”‚ http://db.community-scripts.org/api/collections/_dev_telemetry_data/records β”‚ +β”‚ β”‚ +β”‚ Response (HTTP 200/201): β”‚ +β”‚ { "id": "xyz789abc012345", ... } β”‚ +β”‚ β”‚ β”‚ +β”‚ └──► PB_RECORD_ID = "xyz789abc012345" β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ ``` +## Status Update Flow β€” `post_update_to_api()` + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ post_update_to_api(status, exit_code) Flow β”‚ +β”‚ PATCH β†’ Update existing PocketBase record with final status β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Duplicate Prevention Check β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ Check β”‚ β”‚ POST_UPDATE_DONE == "true"? β”‚ β”‚ +β”‚ β”‚ POST_UPDATE_ │───►│ β”‚ β”‚ +β”‚ β”‚ DONE flag β”‚ β”‚ YES β†’ return 0 (skip PATCH) β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ NO β†’ continue β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ (first call only) + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Prerequisites: curl? ──► DIAGNOSTICS="yes"? ──► RANDOM_UUID set? β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Status Mapping β”‚ +β”‚ β”‚ +β”‚ Input $1 β”‚ PocketBase status β”‚ exit_code β”‚ error β”‚ +β”‚ ─────────────────┼─────────────────────┼──────────────┼────────────────────── β”‚ +β”‚ "done"/"success" β”‚ "sucess" β”‚ 0 β”‚ "" β”‚ +β”‚ "failed" β”‚ "failed" β”‚ from $2 β”‚ explain_exit_code() β”‚ +β”‚ anything else β”‚ "unknown" β”‚ from $2 β”‚ explain_exit_code() β”‚ +β”‚ β”‚ +β”‚ Note: PocketBase schema spells it "sucess" intentionally β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Record ID Resolution β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ PB_RECORD_ID set? β”‚ β”‚ Fallback: GET lookup β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ YES β†’ use PB_RECORD_ID β”‚ β”‚ GET PB_API_URL β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ ?filter=(random_id='UUID') β”‚ β”‚ +β”‚ β”‚ NO β†’ try GET lookup ───┼───►│ &fields=id β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ &perPage=1 β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ Extract "id" from response β”‚ β”‚ +β”‚ β”‚ β”‚ β”‚ If not found β†’ set flag, return β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ PATCH Request β”‚ +β”‚ β”‚ +β”‚ PATCH β†’ PB_API_URL/{record_id} β”‚ +β”‚ http://db.community-scripts.org/api/collections/_dev_telemetry_data/ β”‚ +β”‚ records/{record_id} β”‚ +β”‚ β”‚ +β”‚ Payload: β”‚ +β”‚ { β”‚ +β”‚ "status": "sucess" | "failed" | "unknown", β”‚ +β”‚ "error": "..." | "", β”‚ +β”‚ "exit_code": 0 | β”‚ +β”‚ } β”‚ +β”‚ β”‚ +β”‚ ──► POST_UPDATE_DONE = true (prevents future calls) β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +## Error Description Flow β€” `explain_exit_code()` + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ explain_exit_code(code) Flow β”‚ +β”‚ Convert numeric exit codes to human-readable descriptions β”‚ +β”‚ Canonical function β€” used by api.func AND error_handler.func β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Exit Code Classification (non-overlapping ranges) β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ Generic/Shell β”‚ β”‚ curl/wget β”‚ β”‚ APT/DPKG β”‚ β”‚ +β”‚ β”‚ 1–2 β”‚ β”‚ 6, 7, 22, 28, 35β”‚ β”‚ 100–102, 255 β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ System/Signals β”‚ β”‚ Systemd/Service β”‚ β”‚ Python/pip/uv β”‚ β”‚ +β”‚ β”‚ 124–143 β”‚ β”‚ 150–154 β”‚ β”‚ 160–162 β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ PostgreSQL β”‚ β”‚ MySQL/MariaDB β”‚ β”‚ MongoDB β”‚ β”‚ +β”‚ β”‚ 170–173 β”‚ β”‚ 180–183 β”‚ β”‚ 190–193 β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β”‚ β”‚ +β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ +β”‚ β”‚ Proxmox β”‚ β”‚ Node.js/npm β”‚ β”‚ +β”‚ β”‚ 200–231 β”‚ β”‚ 243–249 β”‚ β”‚ +β”‚ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ case "$code" in β”‚ +β”‚ ) echo "" ;; β”‚ +β”‚ *) echo "Unknown error" ;; β”‚ +β”‚ esac β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +## Complete Installation Lifecycle + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Installation Script (e.g. build.func / vm-core.func) β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β”‚ 1. source api.func + β”‚ 2. Set DIAGNOSTICS, RANDOM_UUID, NSAPP, etc. + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ post_to_api() / post_to_api_vm() β”‚ +β”‚ β”‚ +β”‚ POST β†’ PB_API_URL β”‚ +β”‚ Body: { ..., "status": "installing", "random_id": "..." } β”‚ +β”‚ β”‚ +β”‚ Response β†’ PB_RECORD_ID = "abc123def456789" β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β”‚ 3. Installation proceeds... + β”‚ (container/VM creation, package install, etc.) + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ post_update_to_api("done", 0) β”‚ +β”‚ or β”‚ +β”‚ post_update_to_api("failed", $exit_code) β”‚ +β”‚ β”‚ +β”‚ PATCH β†’ PB_API_URL/{PB_RECORD_ID} β”‚ +β”‚ Body: { "status": "sucess", "error": "", "exit_code": 0 } β”‚ +β”‚ or { "status": "failed", "error": "...", "exit_code": N }β”‚ +β”‚ β”‚ +β”‚ POST_UPDATE_DONE = true β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + ## Integration Points ### With Installation Scripts -- **build.func**: Sends LXC installation data -- **vm-core.func**: Sends VM installation data -- **install.func**: Reports installation status -- **alpine-install.func**: Reports Alpine installation data +- **build.func**: Calls `post_to_api()` for LXC creation, then `post_update_to_api()` on completion +- **vm-core.func**: Calls `post_to_api_vm()` for VM creation, then `post_update_to_api()` on completion +- **install.func / alpine-install.func**: Reports installation status via `post_update_to_api()` ### With Error Handling -- **error_handler.func**: Provides error explanations -- **core.func**: Uses error descriptions in silent execution -- **Diagnostic reporting**: Tracks error patterns +- **error_handler.func**: Uses `explain_exit_code()` for human-readable error messages +- **Diagnostic reporting**: PocketBase records track error patterns anonymously ### External Dependencies -- **curl**: HTTP client for API communication -- **Community Scripts API**: External API endpoint -- **Network connectivity**: Required for API communication +- **curl**: HTTP client for PocketBase API communication +- **PocketBase**: Backend at `http://db.community-scripts.org` +- **Network connectivity**: Required for API communication (failures are silently ignored) diff --git a/docs/misc/api.func/API_FUNCTIONS_REFERENCE.md b/docs/misc/api.func/API_FUNCTIONS_REFERENCE.md index 732261f49..87c23a8bc 100644 --- a/docs/misc/api.func/API_FUNCTIONS_REFERENCE.md +++ b/docs/misc/api.func/API_FUNCTIONS_REFERENCE.md @@ -2,63 +2,88 @@ ## Overview -This document provides a comprehensive alphabetical reference of all functions in `api.func`, including parameters, dependencies, usage examples, and error handling. +This document provides a comprehensive reference of all functions in `api.func`, including parameters, dependencies, usage examples, and error handling. The backend is **PocketBase** hosted at `http://db.community-scripts.org`. + +## Configuration Variables + +| Variable | Value | Description | +|----------|-------|-------------| +| `PB_URL` | `http://db.community-scripts.org` | PocketBase server URL | +| `PB_COLLECTION` | `_dev_telemetry_data` | PocketBase collection name | +| `PB_API_URL` | `${PB_URL}/api/collections/${PB_COLLECTION}/records` | Full API endpoint | +| `PB_RECORD_ID` | *(runtime)* | Stores the PocketBase record ID returned by POST for later PATCH calls | ## Function Categories ### Error Description Functions -#### `get_error_description()` +#### `explain_exit_code()` + **Purpose**: Convert numeric exit codes to human-readable explanations **Parameters**: -- `$1` - Exit code to explain +- `$1` β€” Exit code to explain **Returns**: Human-readable error explanation string **Side Effects**: None **Dependencies**: None **Environment Variables Used**: None -**Supported Exit Codes**: -- **General System**: 0-9, 18, 22, 28, 35, 56, 60, 125-128, 129-143, 152, 255 -- **LXC-Specific**: 100-101, 200-209 -- **Docker**: 125 +> **Note**: `explain_exit_code()` is the **canonical** function for exit-code mapping. It is used by both `api.func` (telemetry) and `error_handler.func` (error display). + +**Supported Exit Code Ranges** (non-overlapping): + +| Range | Category | +|-------|----------| +| 1–2 | Generic / Shell | +| 6–35 | curl / wget | +| 100–102 | APT / Package manager | +| 124–143 | System / Signals | +| 150–154 | Systemd / Service | +| 160–162 | Python / pip / uv | +| 170–173 | PostgreSQL | +| 180–183 | MySQL / MariaDB | +| 190–193 | MongoDB | +| 200–231 | Proxmox custom codes | +| 243–249 | Node.js / npm | +| 255 | DPKG fatal | **Usage Example**: ```bash -error_msg=$(get_error_description 127) +error_msg=$(explain_exit_code 127) echo "Error 127: $error_msg" -# Output: Error 127: Command not found: Incorrect path or missing dependency. +# Output: Error 127: Command not found ``` **Error Code Examples**: ```bash -get_error_description 0 # " " (space) -get_error_description 1 # "General error: An unspecified error occurred." -get_error_description 127 # "Command not found: Incorrect path or missing dependency." -get_error_description 200 # "LXC creation failed." -get_error_description 255 # "Unknown critical error, often due to missing permissions or broken scripts." +explain_exit_code 1 # "General error / Operation not permitted" +explain_exit_code 22 # "curl: HTTP error returned (404, 429, 500+)" +explain_exit_code 127 # "Command not found" +explain_exit_code 200 # "Proxmox: Failed to create lock file" +explain_exit_code 255 # "DPKG: Fatal internal error" +explain_exit_code 999 # "Unknown error" ``` ### API Communication Functions #### `post_to_api()` -**Purpose**: Send LXC container installation data to community-scripts.org API + +**Purpose**: Create an LXC container telemetry record in PocketBase **Parameters**: None (uses environment variables) **Returns**: None **Side Effects**: -- Sends HTTP POST request to API -- Stores response in RESPONSE variable -- Requires curl command and network connectivity +- Sends HTTP **POST** to `PB_API_URL` +- Stores the returned PocketBase record `id` in `PB_RECORD_ID` for later PATCH updates **Dependencies**: `curl` command -**Environment Variables Used**: `DIAGNOSTICS`, `RANDOM_UUID`, `CT_TYPE`, `DISK_SIZE`, `CORE_COUNT`, `RAM_SIZE`, `var_os`, `var_version`, `DISABLEIP6`, `NSAPP`, `METHOD` +**Environment Variables Used**: `DIAGNOSTICS`, `RANDOM_UUID`, `CT_TYPE`, `DISK_SIZE`, `CORE_COUNT`, `RAM_SIZE`, `var_os`, `var_version`, `NSAPP`, `METHOD` **Prerequisites**: -- `curl` command must be available -- `DIAGNOSTICS` must be set to "yes" +- `curl` must be available +- `DIAGNOSTICS` must be `"yes"` - `RANDOM_UUID` must be set and not empty -**API Endpoint**: `http://api.community-scripts.org/dev/upload` +**API Endpoint**: `POST http://db.community-scripts.org/api/collections/_dev_telemetry_data/records` -**JSON Payload Structure**: +**JSON Payload**: ```json { "ct_type": 1, @@ -68,7 +93,6 @@ get_error_description 255 # "Unknown critical error, often due to missing perm "ram_size": 2048, "os_type": "debian", "os_version": "12", - "disableip6": "true", "nsapp": "plex", "method": "install", "pve_version": "8.0", @@ -77,6 +101,10 @@ get_error_description 255 # "Unknown critical error, often due to missing perm } ``` +**Response Handling**: +- On HTTP 200/201, `PB_RECORD_ID` is extracted from the response JSON (`"id"` field) +- On failure, the function returns silently without blocking the installation + **Usage Example**: ```bash export DIAGNOSTICS="yes" @@ -91,39 +119,39 @@ export NSAPP="plex" export METHOD="install" post_to_api +# PB_RECORD_ID is now set (e.g. "abc123def456789") ``` #### `post_to_api_vm()` -**Purpose**: Send VM installation data to community-scripts.org API + +**Purpose**: Create a VM telemetry record in PocketBase **Parameters**: None (uses environment variables) **Returns**: None **Side Effects**: -- Sends HTTP POST request to API -- Stores response in RESPONSE variable -- Requires curl command and network connectivity +- Sends HTTP **POST** to `PB_API_URL` +- Stores the returned PocketBase record `id` in `PB_RECORD_ID` **Dependencies**: `curl` command, diagnostics file -**Environment Variables Used**: `DIAGNOSTICS`, `RANDOM_UUID`, `DISK_SIZE`, `CORE_COUNT`, `RAM_SIZE`, `var_os`, `var_version`, `NSAPP`, `METHOD` +**Environment Variables Used**: `RANDOM_UUID`, `DISK_SIZE`, `CORE_COUNT`, `RAM_SIZE`, `var_os`, `var_version`, `NSAPP`, `METHOD` **Prerequisites**: - `/usr/local/community-scripts/diagnostics` file must exist -- `DIAGNOSTICS` must be set to "yes" in diagnostics file -- `curl` command must be available +- `DIAGNOSTICS` must be `"yes"` in that file (read at runtime) +- `curl` must be available - `RANDOM_UUID` must be set and not empty -**API Endpoint**: `http://api.community-scripts.org/dev/upload` +**API Endpoint**: `POST http://db.community-scripts.org/api/collections/_dev_telemetry_data/records` -**JSON Payload Structure**: +**JSON Payload**: ```json { "ct_type": 2, "type": "vm", - "disk_size": 8, - "core_count": 2, - "ram_size": 2048, - "os_type": "debian", - "os_version": "12", - "disableip6": "", - "nsapp": "plex", + "disk_size": 20, + "core_count": 4, + "ram_size": 4096, + "os_type": "ubuntu", + "os_version": "22.04", + "nsapp": "nextcloud", "method": "install", "pve_version": "8.0", "status": "installing", @@ -131,50 +159,81 @@ post_to_api } ``` +> **Note**: `DISK_SIZE` is stripped of its `G` suffix before sending (e.g. `"20G"` β†’ `20`). + **Usage Example**: ```bash # Create diagnostics file +mkdir -p /usr/local/community-scripts echo "DIAGNOSTICS=yes" > /usr/local/community-scripts/diagnostics export RANDOM_UUID="$(uuidgen)" -export DISK_SIZE="8G" -export CORE_COUNT=2 -export RAM_SIZE=2048 -export var_os="debian" -export var_version="12" -export NSAPP="plex" +export DISK_SIZE="20G" +export CORE_COUNT=4 +export RAM_SIZE=4096 +export var_os="ubuntu" +export var_version="22.04" +export NSAPP="nextcloud" export METHOD="install" post_to_api_vm +# PB_RECORD_ID is now set ``` #### `post_update_to_api()` -**Purpose**: Send installation completion status to community-scripts.org API + +**Purpose**: Update an existing PocketBase record with installation completion status via PATCH **Parameters**: -- `$1` - Status ("success" or "failed", default: "failed") -- `$2` - Exit code (default: 1) +- `$1` β€” Status (`"done"`, `"success"`, or `"failed"`; default: `"failed"`) +- `$2` β€” Exit code (numeric, default: `1`) **Returns**: None **Side Effects**: -- Sends HTTP POST request to API -- Sets POST_UPDATE_DONE=true to prevent duplicates -- Stores response in RESPONSE variable -**Dependencies**: `curl` command, `get_error_description()` -**Environment Variables Used**: `DIAGNOSTICS`, `RANDOM_UUID` +- Sends HTTP **PATCH** to `PB_API_URL/{record_id}` +- Sets `POST_UPDATE_DONE=true` to prevent duplicate calls +**Dependencies**: `curl`, `explain_exit_code()` +**Environment Variables Used**: `DIAGNOSTICS`, `RANDOM_UUID`, `PB_RECORD_ID` **Prerequisites**: -- `curl` command must be available -- `DIAGNOSTICS` must be set to "yes" +- `curl` must be available +- `DIAGNOSTICS` must be `"yes"` - `RANDOM_UUID` must be set and not empty -- POST_UPDATE_DONE must be false (prevents duplicates) +- `POST_UPDATE_DONE` must not be `"true"` (prevents duplicate updates) -**API Endpoint**: `http://api.community-scripts.org/dev/upload/updatestatus` +**Record Lookup**: +1. If `PB_RECORD_ID` is already set (from a prior `post_to_api` / `post_to_api_vm` call), it is used directly. +2. Otherwise, the function performs a **GET** lookup: + ``` + GET PB_API_URL?filter=(random_id='')&fields=id&perPage=1 + ``` +3. If no record is found, the function sets `POST_UPDATE_DONE=true` and returns. -**JSON Payload Structure**: +**Status Mapping** (PocketBase select field values: `installing`, `sucess`, `failed`, `unknown`): + +| Input Status | PocketBase `status` | `exit_code` | `error` | +|---|---|---|---| +| `"done"` / `"success"` / `"sucess"` | `"sucess"` | `0` | `""` | +| `"failed"` | `"failed"` | *from $2* | *from `explain_exit_code()`* | +| anything else | `"unknown"` | *from $2* | *from `explain_exit_code()`* | + +> **Note**: The PocketBase schema intentionally spells success as `"sucess"`. + +**API Endpoint**: `PATCH http://db.community-scripts.org/api/collections/_dev_telemetry_data/records/{record_id}` + +**JSON Payload**: ```json { - "status": "success", - "error": "Error description from get_error_description()", - "random_id": "uuid-string" + "status": "sucess", + "error": "", + "exit_code": 0 +} +``` + +or for failures: +```json +{ + "status": "failed", + "error": "Command not found", + "exit_code": 127 } ``` @@ -183,10 +242,10 @@ post_to_api_vm export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" -# Report successful installation -post_update_to_api "success" 0 +# After a successful installation +post_update_to_api "done" 0 -# Report failed installation +# After a failed installation post_update_to_api "failed" 127 ``` @@ -196,198 +255,250 @@ post_update_to_api "failed" 127 ``` post_to_api() β”œβ”€β”€ Check curl availability -β”œβ”€β”€ Check DIAGNOSTICS setting -β”œβ”€β”€ Check RANDOM_UUID +β”œβ”€β”€ Check DIAGNOSTICS == "yes" +β”œβ”€β”€ Check RANDOM_UUID is set β”œβ”€β”€ Get PVE version -β”œβ”€β”€ Create JSON payload -└── Send HTTP POST request +β”œβ”€β”€ Create JSON payload (ct_type=1, type="lxc", status="installing") +β”œβ”€β”€ POST to PB_API_URL +└── Extract PB_RECORD_ID from response post_to_api_vm() -β”œβ”€β”€ Check diagnostics file +β”œβ”€β”€ Read DIAGNOSTICS from /usr/local/community-scripts/diagnostics β”œβ”€β”€ Check curl availability -β”œβ”€β”€ Check DIAGNOSTICS setting -β”œβ”€β”€ Check RANDOM_UUID -β”œβ”€β”€ Process disk size +β”œβ”€β”€ Check DIAGNOSTICS == "yes" +β”œβ”€β”€ Check RANDOM_UUID is set +β”œβ”€β”€ Strip 'G' suffix from DISK_SIZE β”œβ”€β”€ Get PVE version -β”œβ”€β”€ Create JSON payload -└── Send HTTP POST request +β”œβ”€β”€ Create JSON payload (ct_type=2, type="vm", status="installing") +β”œβ”€β”€ POST to PB_API_URL +└── Extract PB_RECORD_ID from response -post_update_to_api() -β”œβ”€β”€ Check POST_UPDATE_DONE flag +post_update_to_api(status, exit_code) β”œβ”€β”€ Check curl availability -β”œβ”€β”€ Check DIAGNOSTICS setting -β”œβ”€β”€ Check RANDOM_UUID -β”œβ”€β”€ Determine status and exit code -β”œβ”€β”€ Get error description -β”œβ”€β”€ Create JSON payload -β”œβ”€β”€ Send HTTP POST request +β”œβ”€β”€ Check POST_UPDATE_DONE flag +β”œβ”€β”€ Check DIAGNOSTICS == "yes" +β”œβ”€β”€ Check RANDOM_UUID is set +β”œβ”€β”€ Map status β†’ pb_status ("done"β†’"sucess", "failed"β†’"failed", *β†’"unknown") +β”œβ”€β”€ For failed/unknown: call explain_exit_code(exit_code) +β”œβ”€β”€ Resolve record_id (PB_RECORD_ID or GET lookup by random_id) +β”œβ”€β”€ PATCH to PB_API_URL/{record_id} └── Set POST_UPDATE_DONE=true ``` ### Error Description Flow ``` -get_error_description() -β”œβ”€β”€ Match exit code -β”œβ”€β”€ Return appropriate description -└── Handle unknown codes +explain_exit_code(code) +β”œβ”€β”€ Match code against case statement (non-overlapping ranges) +β”œβ”€β”€ Return description string +└── Default: "Unknown error" ``` ## Error Code Reference -### General System Errors +### Generic / Shell (1–2) | Code | Description | |------|-------------| -| 0 | (space) | -| 1 | General error: An unspecified error occurred. | -| 2 | Incorrect shell usage or invalid command arguments. | -| 3 | Unexecuted function or invalid shell condition. | -| 4 | Error opening a file or invalid path. | -| 5 | I/O error: An input/output failure occurred. | -| 6 | No such device or address. | -| 7 | Insufficient memory or resource exhaustion. | -| 8 | Non-executable file or invalid file format. | -| 9 | Failed child process execution. | -| 18 | Connection to a remote server failed. | -| 22 | Invalid argument or faulty network connection. | -| 28 | No space left on device. | -| 35 | Timeout while establishing a connection. | -| 56 | Faulty TLS connection. | -| 60 | SSL certificate error. | +| 1 | General error / Operation not permitted | +| 2 | Misuse of shell builtins (e.g. syntax error) | -### Command Execution Errors +### curl / wget (6–35) | Code | Description | |------|-------------| -| 125 | Docker error: Container could not start. | -| 126 | Command not executable: Incorrect permissions or missing dependencies. | -| 127 | Command not found: Incorrect path or missing dependency. | -| 128 | Invalid exit signal, e.g., incorrect Git command. | +| 6 | curl: DNS resolution failed (could not resolve host) | +| 7 | curl: Failed to connect (network unreachable / host down) | +| 22 | curl: HTTP error returned (404, 429, 500+) | +| 28 | curl: Operation timeout (network slow or server not responding) | +| 35 | curl: SSL/TLS handshake failed (certificate error) | -### Signal Errors +### APT / Package Manager (100–102) | Code | Description | |------|-------------| -| 129 | Signal 1 (SIGHUP): Process terminated due to hangup. | -| 130 | Signal 2 (SIGINT): Manual termination via Ctrl+C. | -| 132 | Signal 4 (SIGILL): Illegal machine instruction. | -| 133 | Signal 5 (SIGTRAP): Debugging error or invalid breakpoint signal. | -| 134 | Signal 6 (SIGABRT): Program aborted itself. | -| 135 | Signal 7 (SIGBUS): Memory error, invalid memory address. | -| 137 | Signal 9 (SIGKILL): Process forcibly terminated (OOM-killer or 'kill -9'). | -| 139 | Signal 11 (SIGSEGV): Segmentation fault, possibly due to invalid pointer access. | -| 141 | Signal 13 (SIGPIPE): Pipe closed unexpectedly. | -| 143 | Signal 15 (SIGTERM): Process terminated normally. | -| 152 | Signal 24 (SIGXCPU): CPU time limit exceeded. | +| 100 | APT: Package manager error (broken packages / dependency problems) | +| 101 | APT: Configuration error (bad sources.list, malformed config) | +| 102 | APT: Lock held by another process (dpkg/apt still running) | -### LXC-Specific Errors +### System / Signals (124–143) | Code | Description | |------|-------------| -| 100 | LXC install error: Unexpected error in create_lxc.sh. | -| 101 | LXC install error: No network connection detected. | -| 200 | LXC creation failed. | -| 201 | LXC error: Invalid Storage class. | -| 202 | User aborted menu in create_lxc.sh. | -| 203 | CTID not set in create_lxc.sh. | -| 204 | PCT_OSTYPE not set in create_lxc.sh. | -| 205 | CTID cannot be less than 100 in create_lxc.sh. | -| 206 | CTID already in use in create_lxc.sh. | -| 207 | Template not found in create_lxc.sh. | -| 208 | Error downloading template in create_lxc.sh. | -| 209 | Container creation failed, but template is intact in create_lxc.sh. | +| 124 | Command timed out (timeout command) | +| 126 | Command invoked cannot execute (permission problem?) | +| 127 | Command not found | +| 128 | Invalid argument to exit | +| 130 | Terminated by Ctrl+C (SIGINT) | +| 134 | Process aborted (SIGABRT β€” possibly Node.js heap overflow) | +| 137 | Killed (SIGKILL / Out of memory?) | +| 139 | Segmentation fault (core dumped) | +| 141 | Broken pipe (SIGPIPE β€” output closed prematurely) | +| 143 | Terminated (SIGTERM) | -### Other Errors +### Systemd / Service (150–154) | Code | Description | |------|-------------| -| 255 | Unknown critical error, often due to missing permissions or broken scripts. | -| * | Unknown error code (exit_code). | +| 150 | Systemd: Service failed to start | +| 151 | Systemd: Service unit not found | +| 152 | Permission denied (EACCES) | +| 153 | Build/compile failed (make/gcc/cmake) | +| 154 | Node.js: Native addon build failed (node-gyp) | + +### Python / pip / uv (160–162) +| Code | Description | +|------|-------------| +| 160 | Python: Virtualenv / uv environment missing or broken | +| 161 | Python: Dependency resolution failed | +| 162 | Python: Installation aborted (permissions or EXTERNALLY-MANAGED) | + +### PostgreSQL (170–173) +| Code | Description | +|------|-------------| +| 170 | PostgreSQL: Connection failed (server not running / wrong socket) | +| 171 | PostgreSQL: Authentication failed (bad user/password) | +| 172 | PostgreSQL: Database does not exist | +| 173 | PostgreSQL: Fatal error in query / syntax | + +### MySQL / MariaDB (180–183) +| Code | Description | +|------|-------------| +| 180 | MySQL/MariaDB: Connection failed (server not running / wrong socket) | +| 181 | MySQL/MariaDB: Authentication failed (bad user/password) | +| 182 | MySQL/MariaDB: Database does not exist | +| 183 | MySQL/MariaDB: Fatal error in query / syntax | + +### MongoDB (190–193) +| Code | Description | +|------|-------------| +| 190 | MongoDB: Connection failed (server not running) | +| 191 | MongoDB: Authentication failed (bad user/password) | +| 192 | MongoDB: Database not found | +| 193 | MongoDB: Fatal query error | + +### Proxmox Custom Codes (200–231) +| Code | Description | +|------|-------------| +| 200 | Proxmox: Failed to create lock file | +| 203 | Proxmox: Missing CTID variable | +| 204 | Proxmox: Missing PCT_OSTYPE variable | +| 205 | Proxmox: Invalid CTID (<100) | +| 206 | Proxmox: CTID already in use | +| 207 | Proxmox: Password contains unescaped special characters | +| 208 | Proxmox: Invalid configuration (DNS/MAC/Network format) | +| 209 | Proxmox: Container creation failed | +| 210 | Proxmox: Cluster not quorate | +| 211 | Proxmox: Timeout waiting for template lock | +| 212 | Proxmox: Storage type 'iscsidirect' does not support containers (VMs only) | +| 213 | Proxmox: Storage type does not support 'rootdir' content | +| 214 | Proxmox: Not enough storage space | +| 215 | Proxmox: Container created but not listed (ghost state) | +| 216 | Proxmox: RootFS entry missing in config | +| 217 | Proxmox: Storage not accessible | +| 218 | Proxmox: Template file corrupted or incomplete | +| 219 | Proxmox: CephFS does not support containers β€” use RBD | +| 220 | Proxmox: Unable to resolve template path | +| 221 | Proxmox: Template file not readable | +| 222 | Proxmox: Template download failed | +| 223 | Proxmox: Template not available after download | +| 224 | Proxmox: PBS storage is for backups only | +| 225 | Proxmox: No template available for OS/Version | +| 231 | Proxmox: LXC stack upgrade failed | + +### Node.js / npm (243–249) +| Code | Description | +|------|-------------| +| 243 | Node.js: Out of memory (JavaScript heap out of memory) | +| 245 | Node.js: Invalid command-line option | +| 246 | Node.js: Internal JavaScript Parse Error | +| 247 | Node.js: Fatal internal error | +| 248 | Node.js: Invalid C++ addon / N-API failure | +| 249 | npm/pnpm/yarn: Unknown fatal error | + +### DPKG (255) +| Code | Description | +|------|-------------| +| 255 | DPKG: Fatal internal error | + +### Default +| Code | Description | +|------|-------------| +| * | Unknown error | ## Environment Variable Dependencies ### Required Variables -- **`DIAGNOSTICS`**: Enable/disable diagnostic reporting ("yes"/"no") -- **`RANDOM_UUID`**: Unique identifier for tracking +- **`DIAGNOSTICS`**: Enable/disable diagnostic reporting (`"yes"` / `"no"`) +- **`RANDOM_UUID`**: Unique identifier for session tracking -### Optional Variables -- **`CT_TYPE`**: Container type (1 for LXC, 2 for VM) -- **`DISK_SIZE`**: Disk size in GB (or GB with 'G' suffix for VM) +### Container / VM Variables +- **`CT_TYPE`**: Container type (`1` for LXC, `2` for VM) +- **`DISK_SIZE`**: Disk size in GB (VMs may include `G` suffix) - **`CORE_COUNT`**: Number of CPU cores - **`RAM_SIZE`**: RAM size in MB - **`var_os`**: Operating system type - **`var_version`**: OS version -- **`DISABLEIP6`**: IPv6 disable setting -- **`NSAPP`**: Namespace application name +- **`NSAPP`**: Application name - **`METHOD`**: Installation method ### Internal Variables -- **`POST_UPDATE_DONE`**: Prevents duplicate status updates -- **`API_URL`**: Community scripts API endpoint -- **`JSON_PAYLOAD`**: API request payload -- **`RESPONSE`**: API response -- **`DISK_SIZE_API`**: Processed disk size for VM API +- **`PB_URL`**: PocketBase server URL +- **`PB_COLLECTION`**: PocketBase collection name +- **`PB_API_URL`**: Full PocketBase API endpoint +- **`PB_RECORD_ID`**: PocketBase record ID (set after POST, used for PATCH) +- **`POST_UPDATE_DONE`**: Flag to prevent duplicate status updates +- **`JSON_PAYLOAD`**: API request payload (local to each function) +- **`RESPONSE`**: API response (local to each function) ## Error Handling Patterns ### API Communication Errors -- All API functions handle curl failures gracefully -- Network errors don't block installation process -- Missing prerequisites cause early return -- Duplicate updates are prevented +- All API functions return silently on failure β€” network errors never block installation +- Missing prerequisites (no curl, diagnostics disabled, no UUID) cause early return +- `POST_UPDATE_DONE` flag prevents duplicate PATCH updates +- PocketBase record lookup falls back to `GET ?filter=(random_id='...')` if `PB_RECORD_ID` is unset ### Error Description Errors -- Unknown error codes return generic message -- All error codes are handled with case statement -- Fallback message includes the actual error code - -### Prerequisites Validation -- Check curl availability before API calls -- Validate DIAGNOSTICS setting -- Ensure RANDOM_UUID is set -- Check for duplicate updates +- Unknown error codes return `"Unknown error"` +- All recognized codes are handled via a `case` statement with non-overlapping ranges +- The fallback message is generic (no error code is embedded) ## Integration Examples -### With build.func +### With build.func (LXC) ```bash #!/usr/bin/env bash source core.func source api.func source build.func -# Set up API reporting export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" -# Report installation start +# Report LXC installation start β†’ POST creates record post_to_api -# Container creation... -# ... build.func code ... +# ... container creation via build.func ... -# Report completion +# Report completion β†’ PATCH updates record if [[ $? -eq 0 ]]; then - post_update_to_api "success" 0 + post_update_to_api "done" 0 else post_update_to_api "failed" $? fi ``` -### With vm-core.func +### With vm-core.func (VM) ```bash #!/usr/bin/env bash source core.func source api.func source vm-core.func -# Set up API reporting -export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" -# Report VM installation start +# Report VM installation start β†’ POST creates record post_to_api_vm -# VM creation... -# ... vm-core.func code ... +# ... VM creation via vm-core.func ... -# Report completion -post_update_to_api "success" 0 +# Report completion β†’ PATCH updates record +post_update_to_api "done" 0 ``` ### With error_handler.func @@ -397,37 +508,30 @@ source core.func source error_handler.func source api.func -# Use error descriptions error_code=127 -error_msg=$(get_error_description $error_code) +error_msg=$(explain_exit_code $error_code) echo "Error $error_code: $error_msg" -# Report error to API +# Report error to PocketBase post_update_to_api "failed" $error_code ``` ## Best Practices ### API Usage -1. Always check prerequisites before API calls -2. Use unique identifiers for tracking -3. Handle API failures gracefully -4. Don't block installation on API failures +1. Always check prerequisites before API calls (handled internally by each function) +2. Call `post_to_api` / `post_to_api_vm` **once** at installation start to get a `PB_RECORD_ID` +3. Call `post_update_to_api` **once** at the end to finalize the record via PATCH +4. Never block the installation on API failures ### Error Reporting -1. Use appropriate error codes -2. Provide meaningful error descriptions -3. Report both success and failure cases -4. Prevent duplicate status updates +1. Use `explain_exit_code()` for human-readable error messages +2. Pass the actual numeric exit code to `post_update_to_api` +3. Report both success (`"done"`) and failure (`"failed"`) cases +4. The `POST_UPDATE_DONE` flag automatically prevents duplicate updates ### Diagnostic Reporting -1. Respect user privacy settings -2. Only send data when diagnostics enabled -3. Use anonymous tracking identifiers -4. Include relevant system information - -### Error Handling -1. Handle unknown error codes gracefully -2. Provide fallback error messages -3. Include error code in unknown error messages -4. Use consistent error message format +1. Respect user privacy β€” only send data when `DIAGNOSTICS="yes"` +2. Use anonymous random UUIDs for session tracking (no personal data) +3. Include relevant system information (PVE version, OS, app name) +4. The diagnostics file at `/usr/local/community-scripts/diagnostics` controls VM reporting diff --git a/docs/misc/api.func/API_INTEGRATION.md b/docs/misc/api.func/API_INTEGRATION.md index f325dace2..f3e3ba2c9 100644 --- a/docs/misc/api.func/API_INTEGRATION.md +++ b/docs/misc/api.func/API_INTEGRATION.md @@ -2,26 +2,42 @@ ## Overview -This document describes how `api.func` integrates with other components in the Proxmox Community Scripts project, including dependencies, data flow, and API surface. +This document describes how `api.func` integrates with other components in the Proxmox Community Scripts project. The telemetry backend is **PocketBase** at `http://db.community-scripts.org`, using the `_dev_telemetry_data` collection. + +## Architecture + +``` +Installation Scripts ──► api.func ──► PocketBase (db.community-scripts.org) + β”‚ + β”œβ”€ POST β†’ create record (status: "installing") + β”œβ”€ PATCH β†’ update record (status: "sucess"/"failed") + └─ GET β†’ lookup record by random_id (fallback) +``` + +### Key Design Points +- **POST** creates a new telemetry record and returns a PocketBase `id` +- **PATCH** updates the existing record using that `id` (or a GET lookup by `random_id`) +- All communication is fire-and-forget β€” failures never block the installation +- `explain_exit_code()` is the canonical function for exit-code-to-description mapping ## Dependencies ### External Dependencies #### Required Commands -- **`curl`**: HTTP client for API communication -- **`uuidgen`**: Generate unique identifiers (optional, can use other methods) +- **`curl`**: HTTP client for PocketBase API communication #### Optional Commands -- **None**: No other external command dependencies +- **`uuidgen`**: Generate unique identifiers (any UUID source works) +- **`pveversion`**: Retrieve Proxmox VE version (gracefully skipped if missing) ### Internal Dependencies #### Environment Variables from Other Scripts -- **build.func**: Provides container creation variables +- **build.func**: Provides container creation variables (`CT_TYPE`, `DISK_SIZE`, etc.) - **vm-core.func**: Provides VM creation variables -- **core.func**: Provides system information variables -- **Installation scripts**: Provide application-specific variables +- **core.func**: Provides system information +- **Installation scripts**: Provide application-specific variables (`NSAPP`, `METHOD`) ## Integration Points @@ -29,48 +45,41 @@ This document describes how `api.func` integrates with other components in the P #### LXC Container Reporting ```bash -# build.func uses api.func for container reporting source core.func source api.func source build.func -# Set up API reporting export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" -# Container creation with API reporting -create_container() { - # Set container parameters - export CT_TYPE=1 - export DISK_SIZE="$var_disk" - export CORE_COUNT="$var_cpu" - export RAM_SIZE="$var_ram" - export var_os="$var_os" - export var_version="$var_version" - export NSAPP="$APP" - export METHOD="install" +# Set container parameters +export CT_TYPE=1 +export DISK_SIZE="$var_disk" +export CORE_COUNT="$var_cpu" +export RAM_SIZE="$var_ram" +export var_os="$var_os" +export var_version="$var_version" +export NSAPP="$APP" +export METHOD="install" - # Report installation start - post_to_api +# POST β†’ creates record in PocketBase, saves PB_RECORD_ID +post_to_api - # Container creation using build.func - # ... build.func container creation logic ... +# ... container creation via build.func ... - # Report completion - if [[ $? -eq 0 ]]; then - post_update_to_api "success" 0 - else - post_update_to_api "failed" $? - fi -} +# PATCH β†’ updates the record with final status +if [[ $? -eq 0 ]]; then + post_update_to_api "done" 0 +else + post_update_to_api "failed" $? +fi ``` #### Error Reporting Integration ```bash -# build.func uses api.func for error reporting handle_container_error() { local exit_code=$1 - local error_msg=$(get_error_description $exit_code) + local error_msg=$(explain_exit_code $exit_code) echo "Container creation failed: $error_msg" post_update_to_api "failed" $exit_code @@ -81,93 +90,54 @@ handle_container_error() { #### VM Installation Reporting ```bash -# vm-core.func uses api.func for VM reporting source core.func source api.func source vm-core.func -# Set up VM API reporting +# VM reads DIAGNOSTICS from file mkdir -p /usr/local/community-scripts echo "DIAGNOSTICS=yes" > /usr/local/community-scripts/diagnostics export RANDOM_UUID="$(uuidgen)" -# VM creation with API reporting -create_vm() { - # Set VM parameters - export DISK_SIZE="${var_disk}G" - export CORE_COUNT="$var_cpu" - export RAM_SIZE="$var_ram" - export var_os="$var_os" - export var_version="$var_version" - export NSAPP="$APP" - export METHOD="install" +# Set VM parameters +export DISK_SIZE="${var_disk}G" +export CORE_COUNT="$var_cpu" +export RAM_SIZE="$var_ram" +export var_os="$var_os" +export var_version="$var_version" +export NSAPP="$APP" +export METHOD="install" - # Report VM installation start - post_to_api_vm +# POST β†’ creates record in PocketBase (ct_type=2, type="vm") +post_to_api_vm - # VM creation using vm-core.func - # ... vm-core.func VM creation logic ... +# ... VM creation via vm-core.func ... - # Report completion - post_update_to_api "success" 0 -} -``` - -### With core.func - -#### System Information Integration -```bash -# core.func provides system information for api.func -source core.func -source api.func - -# Get system information for API reporting -get_system_info_for_api() { - # Get PVE version using core.func utilities - local pve_version=$(pveversion | awk -F'[/ ]' '{print $2}') - - # Set API parameters - export var_os="$var_os" - export var_version="$var_version" - - # Use core.func error handling with api.func reporting - if silent apt-get update; then - post_update_to_api "success" 0 - else - post_update_to_api "failed" $? - fi -} +# PATCH β†’ finalizes record +post_update_to_api "done" 0 ``` ### With error_handler.func #### Error Description Integration ```bash -# error_handler.func uses api.func for error descriptions source core.func source error_handler.func source api.func -# Enhanced error handler with API reporting enhanced_error_handler() { local exit_code=${1:-$?} local command=${2:-${BASH_COMMAND:-unknown}} - # Get error description from api.func - local error_msg=$(get_error_description $exit_code) + # explain_exit_code() is the canonical error description function + local error_msg=$(explain_exit_code $exit_code) - # Display error information echo "Error $exit_code: $error_msg" echo "Command: $command" - # Report error to API - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" + # PATCH the telemetry record with failure details post_update_to_api "failed" $exit_code - - # Use standard error handler - error_handler $exit_code $command } ``` @@ -175,32 +145,28 @@ enhanced_error_handler() { #### Installation Process Reporting ```bash -# install.func uses api.func for installation reporting source core.func source api.func source install.func -# Installation with API reporting install_package_with_reporting() { local package="$1" - # Set up API reporting export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" export NSAPP="$package" export METHOD="install" - # Report installation start + # POST β†’ create telemetry record post_to_api - # Package installation using install.func if install_package "$package"; then echo "$package installed successfully" - post_update_to_api "success" 0 + post_update_to_api "done" 0 return 0 else local exit_code=$? - local error_msg=$(get_error_description $exit_code) + local error_msg=$(explain_exit_code $exit_code) echo "$package installation failed: $error_msg" post_update_to_api "failed" $exit_code return $exit_code @@ -208,270 +174,105 @@ install_package_with_reporting() { } ``` -### With alpine-install.func - -#### Alpine Installation Reporting -```bash -# alpine-install.func uses api.func for Alpine reporting -source core.func -source api.func -source alpine-install.func - -# Alpine installation with API reporting -install_alpine_with_reporting() { - local app="$1" - - # Set up API reporting - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - export NSAPP="$app" - export METHOD="install" - export var_os="alpine" - - # Report Alpine installation start - post_to_api - - # Alpine installation using alpine-install.func - if install_alpine_app "$app"; then - echo "Alpine $app installed successfully" - post_update_to_api "success" 0 - return 0 - else - local exit_code=$? - local error_msg=$(get_error_description $exit_code) - echo "Alpine $app installation failed: $error_msg" - post_update_to_api "failed" $exit_code - return $exit_code - fi -} -``` - -### With alpine-tools.func - -#### Alpine Tools Reporting -```bash -# alpine-tools.func uses api.func for Alpine tools reporting -source core.func -source api.func -source alpine-tools.func - -# Alpine tools with API reporting -run_alpine_tool_with_reporting() { - local tool="$1" - - # Set up API reporting - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - export NSAPP="alpine-tools" - export METHOD="tool" - - # Report tool execution start - post_to_api - - # Run Alpine tool using alpine-tools.func - if run_alpine_tool "$tool"; then - echo "Alpine tool $tool executed successfully" - post_update_to_api "success" 0 - return 0 - else - local exit_code=$? - local error_msg=$(get_error_description $exit_code) - echo "Alpine tool $tool failed: $error_msg" - post_update_to_api "failed" $exit_code - return $exit_code - fi -} -``` - -### With passthrough.func - -#### Hardware Passthrough Reporting -```bash -# passthrough.func uses api.func for hardware reporting -source core.func -source api.func -source passthrough.func - -# Hardware passthrough with API reporting -configure_passthrough_with_reporting() { - local hardware_type="$1" - - # Set up API reporting - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - export NSAPP="passthrough" - export METHOD="hardware" - - # Report passthrough configuration start - post_to_api - - # Configure passthrough using passthrough.func - if configure_passthrough "$hardware_type"; then - echo "Hardware passthrough configured successfully" - post_update_to_api "success" 0 - return 0 - else - local exit_code=$? - local error_msg=$(get_error_description $exit_code) - echo "Hardware passthrough failed: $error_msg" - post_update_to_api "failed" $exit_code - return $exit_code - fi -} -``` - -### With tools.func - -#### Maintenance Operations Reporting -```bash -# tools.func uses api.func for maintenance reporting -source core.func -source api.func -source tools.func - -# Maintenance operations with API reporting -run_maintenance_with_reporting() { - local operation="$1" - - # Set up API reporting - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - export NSAPP="maintenance" - export METHOD="tool" - - # Report maintenance start - post_to_api - - # Run maintenance using tools.func - if run_maintenance_operation "$operation"; then - echo "Maintenance operation $operation completed successfully" - post_update_to_api "success" 0 - return 0 - else - local exit_code=$? - local error_msg=$(get_error_description $exit_code) - echo "Maintenance operation $operation failed: $error_msg" - post_update_to_api "failed" $exit_code - return $exit_code - fi -} -``` - ## Data Flow ### Input Data -#### Environment Variables from Other Scripts -- **`CT_TYPE`**: Container type (1 for LXC, 2 for VM) -- **`DISK_SIZE`**: Disk size in GB -- **`CORE_COUNT`**: Number of CPU cores -- **`RAM_SIZE`**: RAM size in MB -- **`var_os`**: Operating system type -- **`var_version`**: OS version -- **`DISABLEIP6`**: IPv6 disable setting -- **`NSAPP`**: Namespace application name -- **`METHOD`**: Installation method -- **`DIAGNOSTICS`**: Enable/disable diagnostic reporting -- **`RANDOM_UUID`**: Unique identifier for tracking +#### Environment Variables +| Variable | Source | Description | +|----------|--------|-------------| +| `CT_TYPE` | build.func | Container type (1=LXC, 2=VM) | +| `DISK_SIZE` | build.func / vm-core.func | Disk size in GB (VMs may have `G` suffix) | +| `CORE_COUNT` | build.func / vm-core.func | CPU core count | +| `RAM_SIZE` | build.func / vm-core.func | RAM in MB | +| `var_os` | core.func | Operating system type | +| `var_version` | core.func | OS version | +| `NSAPP` | Installation scripts | Application name | +| `METHOD` | Installation scripts | Installation method | +| `DIAGNOSTICS` | User config / diagnostics file | Enable/disable telemetry | +| `RANDOM_UUID` | Caller | Session tracking UUID | #### Function Parameters -- **Exit codes**: Passed to `get_error_description()` and `post_update_to_api()` -- **Status information**: Passed to `post_update_to_api()` -- **API endpoints**: Hardcoded in functions +- **Exit codes**: Passed to `explain_exit_code()` and `post_update_to_api()` +- **Status strings**: Passed to `post_update_to_api()` (`"done"`, `"failed"`) #### System Information -- **PVE version**: Retrieved from `pveversion` command -- **Disk size processing**: Processed for VM API (removes 'G' suffix) -- **Error codes**: Retrieved from command exit codes +- **PVE version**: Retrieved from `pveversion` command at runtime +- **Disk size**: VM disk size is stripped of `G` suffix before sending -### Processing Data +### Processing -#### API Request Preparation -- **JSON payload creation**: Format data for API consumption -- **Data validation**: Ensure required fields are present -- **Error handling**: Handle missing or invalid data -- **Content type setting**: Set appropriate HTTP headers +#### Record Creation (POST) +1. Validate prerequisites (curl, DIAGNOSTICS, RANDOM_UUID) +2. Gather PVE version +3. Build JSON payload with all telemetry fields +4. `POST` to `PB_API_URL` +5. Extract `PB_RECORD_ID` from PocketBase response (HTTP 200/201) -#### Error Processing -- **Error code mapping**: Map numeric codes to descriptions -- **Error message formatting**: Format error descriptions -- **Unknown error handling**: Handle unrecognized error codes -- **Fallback messages**: Provide default error messages - -#### API Communication -- **HTTP request preparation**: Prepare curl commands -- **Response handling**: Capture HTTP response codes -- **Error handling**: Handle network and API errors -- **Duplicate prevention**: Prevent duplicate status updates +#### Record Update (PATCH) +1. Validate prerequisites + check `POST_UPDATE_DONE` flag +2. Map status string β†’ PocketBase select value (`"done"` β†’ `"sucess"`) +3. For failures: call `explain_exit_code()` to get error description +4. Resolve record ID: use `PB_RECORD_ID` or fall back to GET lookup +5. `PATCH` to `PB_API_URL/{record_id}` with status, error, exit_code +6. Set `POST_UPDATE_DONE=true` ### Output Data -#### API Communication -- **HTTP requests**: Sent to community-scripts.org API -- **Response codes**: Captured from API responses -- **Error information**: Reported to API -- **Status updates**: Sent to API +#### PocketBase Records +- **POST response**: Returns record with `id` field β†’ stored in `PB_RECORD_ID` +- **PATCH response**: Updates record fields (status, error, exit_code) +- **GET response**: Used for record ID lookup by `random_id` filter -#### Error Information -- **Error descriptions**: Human-readable error messages -- **Error codes**: Mapped to descriptions -- **Context information**: Error context and details -- **Fallback messages**: Default error messages - -#### System State -- **POST_UPDATE_DONE**: Prevents duplicate updates -- **RESPONSE**: Stores API response -- **JSON_PAYLOAD**: Stores formatted API data -- **API_URL**: Stores API endpoint +#### Internal State +| Variable | Description | +|----------|-------------| +| `PB_RECORD_ID` | PocketBase record ID for PATCH calls | +| `POST_UPDATE_DONE` | Flag preventing duplicate updates | ## API Surface ### Public Functions -#### Error Description -- **`get_error_description()`**: Convert exit codes to explanations -- **Parameters**: Exit code to explain -- **Returns**: Human-readable explanation string -- **Usage**: Called by other functions and scripts +| Function | Purpose | HTTP Method | +|----------|---------|-------------| +| `explain_exit_code(code)` | Map exit code to description | β€” | +| `post_to_api()` | Create LXC telemetry record | POST | +| `post_to_api_vm()` | Create VM telemetry record | POST | +| `post_update_to_api(status, exit_code)` | Update record with final status | PATCH | -#### API Communication -- **`post_to_api()`**: Send LXC installation data -- **`post_to_api_vm()`**: Send VM installation data -- **`post_update_to_api()`**: Send status updates -- **Parameters**: Status and exit code (for updates) -- **Returns**: None -- **Usage**: Called by installation scripts +### PocketBase Collection Schema -### Internal Functions +Collection: `_dev_telemetry_data` -#### None -- All functions in api.func are public -- No internal helper functions -- Direct implementation of all functionality +| Field | Type | Required | Description | +|-------|------|----------|-------------| +| `id` | text (auto) | yes | PocketBase record ID (15 chars) | +| `random_id` | text | yes | Session UUID (min 8 chars, unique) | +| `type` | select | yes | `"lxc"`, `"vm"`, `"addon"`, `"pve"` | +| `ct_type` | number | yes | 1 (LXC) or 2 (VM) | +| `nsapp` | text | yes | Application name | +| `status` | select | yes | `"installing"`, `"sucess"`, `"failed"`, `"unknown"` | +| `disk_size` | number | no | Disk size in GB | +| `core_count` | number | no | CPU cores | +| `ram_size` | number | no | RAM in MB | +| `os_type` | text | no | OS type | +| `os_version` | text | no | OS version | +| `pve_version` | text | no | Proxmox VE version | +| `method` | text | no | Installation method | +| `error` | text | no | Error description | +| `exit_code` | number | no | Numeric exit code | +| `created` | autodate | auto | Record creation timestamp | +| `updated` | autodate | auto | Last update timestamp | -### Global Variables +> **Note**: The `status` field intentionally uses the spelling `"sucess"` (not `"success"`). -#### Configuration Variables -- **`DIAGNOSTICS`**: Diagnostic reporting setting -- **`RANDOM_UUID`**: Unique tracking identifier -- **`POST_UPDATE_DONE`**: Duplicate update prevention - -#### Data Variables -- **`CT_TYPE`**: Container type -- **`DISK_SIZE`**: Disk size -- **`CORE_COUNT`**: CPU core count -- **`RAM_SIZE`**: RAM size -- **`var_os`**: Operating system -- **`var_version`**: OS version -- **`DISABLEIP6`**: IPv6 setting -- **`NSAPP`**: Application namespace -- **`METHOD`**: Installation method - -#### Internal Variables -- **`API_URL`**: API endpoint URL -- **`JSON_PAYLOAD`**: API request payload -- **`RESPONSE`**: API response -- **`DISK_SIZE_API`**: Processed disk size for VM API +### Configuration Variables +| Variable | Value | +|----------|-------| +| `PB_URL` | `http://db.community-scripts.org` | +| `PB_COLLECTION` | `_dev_telemetry_data` | +| `PB_API_URL` | `${PB_URL}/api/collections/${PB_COLLECTION}/records` | ## Integration Patterns @@ -479,45 +280,39 @@ run_maintenance_with_reporting() { ```bash #!/usr/bin/env bash -# Standard integration pattern -# 1. Source core.func first +# 1. Source dependencies source core.func - -# 2. Source api.func source api.func -# 3. Set up API reporting +# 2. Enable telemetry export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" -# 4. Set application parameters +# 3. Set application parameters export NSAPP="$APP" export METHOD="install" -# 5. Report installation start +# 4. POST β†’ create telemetry record in PocketBase post_to_api -# 6. Perform installation +# 5. Perform installation # ... installation logic ... -# 7. Report completion -post_update_to_api "success" 0 +# 6. PATCH β†’ update record with final status +post_update_to_api "done" 0 ``` ### Minimal Integration Pattern ```bash #!/usr/bin/env bash -# Minimal integration pattern - source api.func -# Basic error reporting export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" -# Report failure +# Report failure (PATCH via record lookup) post_update_to_api "failed" 127 ``` @@ -525,13 +320,10 @@ post_update_to_api "failed" 127 ```bash #!/usr/bin/env bash -# Advanced integration pattern - source core.func source api.func source error_handler.func -# Set up comprehensive API reporting export DIAGNOSTICS="yes" export RANDOM_UUID="$(uuidgen)" export CT_TYPE=1 @@ -542,12 +334,12 @@ export var_os="debian" export var_version="12" export METHOD="install" -# Enhanced error handling with API reporting +# Enhanced error handler with PocketBase reporting enhanced_error_handler() { local exit_code=${1:-$?} local command=${2:-${BASH_COMMAND:-unknown}} - local error_msg=$(get_error_description $exit_code) + local error_msg=$(explain_exit_code $exit_code) echo "Error $exit_code: $error_msg" post_update_to_api "failed" $exit_code @@ -556,88 +348,39 @@ enhanced_error_handler() { trap 'enhanced_error_handler' ERR -# Advanced operations with API reporting +# POST β†’ create record post_to_api + # ... operations ... -post_update_to_api "success" 0 + +# PATCH β†’ finalize +post_update_to_api "done" 0 ``` ## Error Handling Integration ### Automatic Error Reporting -- **Error Descriptions**: Provides human-readable error messages -- **API Integration**: Reports errors to community-scripts.org API -- **Error Tracking**: Tracks error patterns for project improvement -- **Diagnostic Data**: Contributes to anonymous usage analytics - -### Manual Error Reporting -- **Custom Error Codes**: Use appropriate error codes for different scenarios -- **Error Context**: Provide context information for errors -- **Status Updates**: Report both success and failure cases -- **Error Analysis**: Analyze error patterns and trends +- **Error Descriptions**: `explain_exit_code()` provides human-readable messages for all recognized exit codes +- **PocketBase Integration**: Errors are recorded via PATCH with `status`, `error`, and `exit_code` fields +- **Error Tracking**: Anonymous telemetry helps track common failure patterns +- **Diagnostic Data**: Contributes to project-wide analytics without PII ### API Communication Errors -- **Network Failures**: Handle API communication failures gracefully -- **Missing Prerequisites**: Check prerequisites before API calls -- **Duplicate Prevention**: Prevent duplicate status updates -- **Error Recovery**: Handle API errors without blocking installation +- **Network Failures**: All API calls use `|| true` β€” failures are swallowed silently +- **Missing Prerequisites**: Functions return early if curl, DIAGNOSTICS, or UUID are missing +- **Duplicate Prevention**: `POST_UPDATE_DONE` flag ensures only one PATCH per session +- **Record Lookup Fallback**: If `PB_RECORD_ID` is unset, a GET filter query resolves the record ## Performance Considerations ### API Communication Overhead -- **Minimal Impact**: API calls add minimal overhead -- **Asynchronous**: API calls don't block installation process -- **Error Handling**: API failures don't affect installation -- **Optional**: API reporting is optional and can be disabled +- **Minimal Impact**: Only 2 HTTP calls per installation (1 POST + 1 PATCH) +- **Non-blocking**: API failures never block the installation process +- **Fire-and-forget**: curl stderr is suppressed (`2>/dev/null`) +- **Optional**: Telemetry is entirely opt-in via `DIAGNOSTICS` setting -### Memory Usage -- **Minimal Footprint**: API functions use minimal memory -- **Variable Reuse**: Global variables reused across functions -- **No Memory Leaks**: Proper cleanup prevents memory leaks -- **Efficient Processing**: Efficient JSON payload creation - -### Execution Speed -- **Fast API Calls**: Quick API communication -- **Efficient Error Processing**: Fast error code processing -- **Minimal Delay**: Minimal delay in API operations -- **Non-blocking**: API calls don't block installation - -## Security Considerations - -### Data Privacy -- **Anonymous Reporting**: Only anonymous data is sent -- **No Sensitive Data**: No sensitive information is transmitted -- **User Control**: Users can disable diagnostic reporting -- **Data Minimization**: Only necessary data is sent - -### API Security -- **HTTPS**: API communication uses secure protocols -- **Data Validation**: API data is validated before sending -- **Error Handling**: API errors are handled securely -- **No Credentials**: No authentication credentials are sent - -### Network Security -- **Secure Communication**: Uses secure HTTP protocols -- **Error Handling**: Network errors are handled gracefully -- **No Data Leakage**: No sensitive data is leaked -- **Secure Endpoints**: Uses trusted API endpoints - -## Future Integration Considerations - -### Extensibility -- **New API Endpoints**: Easy to add new API endpoints -- **Additional Data**: Easy to add new data fields -- **Error Codes**: Easy to add new error code descriptions -- **API Versions**: Easy to support new API versions - -### Compatibility -- **API Versioning**: Compatible with different API versions -- **Data Format**: Compatible with different data formats -- **Error Codes**: Compatible with different error code systems -- **Network Protocols**: Compatible with different network protocols - -### Performance -- **Optimization**: API communication can be optimized -- **Caching**: API responses can be cached -- **Batch Operations**: Multiple operations can be batched -- **Async Processing**: API calls can be made asynchronous +### Security Considerations +- **Anonymous**: No personal data is transmitted β€” only system specs and app names +- **No Auth Required**: PocketBase collection rules allow anonymous create/update +- **User Control**: Users can disable telemetry by setting `DIAGNOSTICS=no` +- **HTTP**: API uses HTTP (not HTTPS) for compatibility with minimal containers diff --git a/docs/misc/api.func/API_USAGE_EXAMPLES.md b/docs/misc/api.func/API_USAGE_EXAMPLES.md deleted file mode 100644 index 616ebc927..000000000 --- a/docs/misc/api.func/API_USAGE_EXAMPLES.md +++ /dev/null @@ -1,794 +0,0 @@ -# api.func Usage Examples - -## Overview - -This document provides practical usage examples for `api.func` functions, covering common scenarios, integration patterns, and best practices. - -## Basic API Setup - -### Standard API Initialization - -```bash -#!/usr/bin/env bash -# Standard API setup for LXC containers - -source api.func - -# Set up diagnostic reporting -export DIAGNOSTICS="yes" -export RANDOM_UUID="$(uuidgen)" - -# Set container parameters -export CT_TYPE=1 -export DISK_SIZE=8 -export CORE_COUNT=2 -export RAM_SIZE=2048 -export var_os="debian" -export var_version="12" -export NSAPP="plex" -export METHOD="install" - -# Report installation start -post_to_api - -# Your installation code here -# ... installation logic ... - -# Report completion -if [[ $? -eq 0 ]]; then - post_update_to_api "success" 0 -else - post_update_to_api "failed" $? -fi -``` - -### VM API Setup - -```bash -#!/usr/bin/env bash -# API setup for VMs - -source api.func - -# Create diagnostics file for VM -mkdir -p /usr/local/community-scripts -echo "DIAGNOSTICS=yes" > /usr/local/community-scripts/diagnostics - -# Set up VM parameters -export RANDOM_UUID="$(uuidgen)" -export DISK_SIZE="20G" -export CORE_COUNT=4 -export RAM_SIZE=4096 -export var_os="ubuntu" -export var_version="22.04" -export NSAPP="nextcloud" -export METHOD="install" - -# Report VM installation start -post_to_api_vm - -# Your VM installation code here -# ... VM creation logic ... - -# Report completion -post_update_to_api "success" 0 -``` - -## Error Description Examples - -### Basic Error Explanation - -```bash -#!/usr/bin/env bash -source api.func - -# Explain common error codes -echo "Error 0: '$(get_error_description 0)'" -echo "Error 1: $(get_error_description 1)" -echo "Error 127: $(get_error_description 127)" -echo "Error 200: $(get_error_description 200)" -echo "Error 255: $(get_error_description 255)" -``` - -### Error Code Testing - -```bash -#!/usr/bin/env bash -source api.func - -# Test all error codes -test_error_codes() { - local codes=(0 1 2 127 128 130 137 139 143 200 203 205 255) - - for code in "${codes[@]}"; do - echo "Code $code: $(get_error_description $code)" - done -} - -test_error_codes -``` - -### Error Handling with Descriptions - -```bash -#!/usr/bin/env bash -source api.func - -# Function with error handling -run_command_with_error_handling() { - local command="$1" - local description="$2" - - echo "Running: $description" - - if $command; then - echo "Success: $description" - return 0 - else - local exit_code=$? - local error_msg=$(get_error_description $exit_code) - echo "Error $exit_code: $error_msg" - return $exit_code - fi -} - -# Usage -run_command_with_error_handling "apt-get update" "Package list update" -run_command_with_error_handling "nonexistent_command" "Test command" -``` - -## API Communication Examples - -### LXC Installation Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Complete LXC installation with API reporting -install_lxc_with_reporting() { - local app="$1" - local ctid="$2" - - # Set up API reporting - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - export CT_TYPE=1 - export DISK_SIZE=10 - export CORE_COUNT=2 - export RAM_SIZE=2048 - export var_os="debian" - export var_version="12" - export NSAPP="$app" - export METHOD="install" - - # Report installation start - post_to_api - - # Installation process - echo "Installing $app container (ID: $ctid)..." - - # Simulate installation - sleep 2 - - # Check if installation succeeded - if [[ $? -eq 0 ]]; then - echo "Installation completed successfully" - post_update_to_api "success" 0 - return 0 - else - echo "Installation failed" - post_update_to_api "failed" $? - return 1 - fi -} - -# Install multiple containers -install_lxc_with_reporting "plex" "100" -install_lxc_with_reporting "nextcloud" "101" -install_lxc_with_reporting "nginx" "102" -``` - -### VM Installation Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Complete VM installation with API reporting -install_vm_with_reporting() { - local app="$1" - local vmid="$2" - - # Create diagnostics file - mkdir -p /usr/local/community-scripts - echo "DIAGNOSTICS=yes" > /usr/local/community-scripts/diagnostics - - # Set up API reporting - export RANDOM_UUID="$(uuidgen)" - export DISK_SIZE="20G" - export CORE_COUNT=4 - export RAM_SIZE=4096 - export var_os="ubuntu" - export var_version="22.04" - export NSAPP="$app" - export METHOD="install" - - # Report VM installation start - post_to_api_vm - - # VM installation process - echo "Installing $app VM (ID: $vmid)..." - - # Simulate VM creation - sleep 3 - - # Check if VM creation succeeded - if [[ $? -eq 0 ]]; then - echo "VM installation completed successfully" - post_update_to_api "success" 0 - return 0 - else - echo "VM installation failed" - post_update_to_api "failed" $? - return 1 - fi -} - -# Install multiple VMs -install_vm_with_reporting "nextcloud" "200" -install_vm_with_reporting "wordpress" "201" -``` - -## Status Update Examples - -### Success Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Report successful installation -report_success() { - local operation="$1" - - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - - echo "Reporting successful $operation" - post_update_to_api "success" 0 -} - -# Usage -report_success "container installation" -report_success "package installation" -report_success "service configuration" -``` - -### Failure Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Report failed installation -report_failure() { - local operation="$1" - local exit_code="$2" - - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - - local error_msg=$(get_error_description $exit_code) - echo "Reporting failed $operation: $error_msg" - post_update_to_api "failed" $exit_code -} - -# Usage -report_failure "container creation" 200 -report_failure "package installation" 127 -report_failure "service start" 1 -``` - -### Conditional Status Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Conditional status reporting -report_installation_status() { - local operation="$1" - local exit_code="$2" - - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - - if [[ $exit_code -eq 0 ]]; then - echo "Reporting successful $operation" - post_update_to_api "success" 0 - else - local error_msg=$(get_error_description $exit_code) - echo "Reporting failed $operation: $error_msg" - post_update_to_api "failed" $exit_code - fi -} - -# Usage -report_installation_status "container creation" 0 -report_installation_status "package installation" 127 -``` - -## Advanced Usage Examples - -### Batch Installation with API Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Batch installation with comprehensive API reporting -batch_install_with_reporting() { - local apps=("plex" "nextcloud" "nginx" "mysql") - local ctids=(100 101 102 103) - - # Set up API reporting - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - export CT_TYPE=1 - export DISK_SIZE=8 - export CORE_COUNT=2 - export RAM_SIZE=2048 - export var_os="debian" - export var_version="12" - export METHOD="install" - - local success_count=0 - local failure_count=0 - - for i in "${!apps[@]}"; do - local app="${apps[$i]}" - local ctid="${ctids[$i]}" - - echo "Installing $app (ID: $ctid)..." - - # Set app-specific parameters - export NSAPP="$app" - - # Report installation start - post_to_api - - # Simulate installation - if install_app "$app" "$ctid"; then - echo "$app installed successfully" - post_update_to_api "success" 0 - ((success_count++)) - else - echo "$app installation failed" - post_update_to_api "failed" $? - ((failure_count++)) - fi - - echo "---" - done - - echo "Batch installation completed: $success_count successful, $failure_count failed" -} - -# Mock installation function -install_app() { - local app="$1" - local ctid="$2" - - # Simulate installation - sleep 1 - - # Simulate occasional failures - if [[ $((RANDOM % 10)) -eq 0 ]]; then - return 1 - fi - - return 0 -} - -batch_install_with_reporting -``` - -### Error Analysis and Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Analyze and report errors -analyze_and_report_errors() { - local log_file="$1" - - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - - if [[ ! -f "$log_file" ]]; then - echo "Log file not found: $log_file" - return 1 - fi - - # Extract error codes from log - local error_codes=$(grep -o 'exit code [0-9]\+' "$log_file" | grep -o '[0-9]\+' | sort -u) - - if [[ -z "$error_codes" ]]; then - echo "No errors found in log" - post_update_to_api "success" 0 - return 0 - fi - - echo "Found error codes: $error_codes" - - # Report each unique error - for code in $error_codes; do - local error_msg=$(get_error_description $code) - echo "Error $code: $error_msg" - post_update_to_api "failed" $code - done -} - -# Usage -analyze_and_report_errors "/var/log/installation.log" -``` - -### API Health Check - -```bash -#!/usr/bin/env bash -source api.func - -# Check API connectivity and functionality -check_api_health() { - echo "Checking API health..." - - # Test prerequisites - if ! command -v curl >/dev/null 2>&1; then - echo "ERROR: curl not available" - return 1 - fi - - # Test error description function - local test_error=$(get_error_description 127) - if [[ -z "$test_error" ]]; then - echo "ERROR: Error description function not working" - return 1 - fi - - echo "Error description test: $test_error" - - # Test API connectivity (without sending data) - local api_url="http://api.community-scripts.org/dev/upload" - if curl -s --head "$api_url" >/dev/null 2>&1; then - echo "API endpoint is reachable" - else - echo "WARNING: API endpoint not reachable" - fi - - echo "API health check completed" -} - -check_api_health -``` - -## Integration Examples - -### With build.func - -```bash -#!/usr/bin/env bash -# Integration with build.func - -source core.func -source api.func -source build.func - -# Set up API reporting -export DIAGNOSTICS="yes" -export RANDOM_UUID="$(uuidgen)" - -# Container creation with API reporting -create_container_with_reporting() { - local app="$1" - local ctid="$2" - - # Set container parameters - export APP="$app" - export CTID="$ctid" - export var_hostname="${app}-server" - export var_os="debian" - export var_version="12" - export var_cpu="2" - export var_ram="2048" - export var_disk="10" - export var_net="vmbr0" - export var_gateway="192.168.1.1" - export var_ip="192.168.1.$ctid" - export var_template_storage="local" - export var_container_storage="local" - - # Report installation start - post_to_api - - # Create container using build.func - if source build.func; then - echo "Container $app created successfully" - post_update_to_api "success" 0 - return 0 - else - echo "Container $app creation failed" - post_update_to_api "failed" $? - return 1 - fi -} - -# Create containers -create_container_with_reporting "plex" "100" -create_container_with_reporting "nextcloud" "101" -``` - -### With vm-core.func - -```bash -#!/usr/bin/env bash -# Integration with vm-core.func - -source core.func -source api.func -source vm-core.func - -# Set up VM API reporting -mkdir -p /usr/local/community-scripts -echo "DIAGNOSTICS=yes" > /usr/local/community-scripts/diagnostics - -export RANDOM_UUID="$(uuidgen)" - -# VM creation with API reporting -create_vm_with_reporting() { - local app="$1" - local vmid="$2" - - # Set VM parameters - export APP="$app" - export VMID="$vmid" - export var_hostname="${app}-vm" - export var_os="ubuntu" - export var_version="22.04" - export var_cpu="4" - export var_ram="4096" - export var_disk="20" - - # Report VM installation start - post_to_api_vm - - # Create VM using vm-core.func - if source vm-core.func; then - echo "VM $app created successfully" - post_update_to_api "success" 0 - return 0 - else - echo "VM $app creation failed" - post_update_to_api "failed" $? - return 1 - fi -} - -# Create VMs -create_vm_with_reporting "nextcloud" "200" -create_vm_with_reporting "wordpress" "201" -``` - -### With error_handler.func - -```bash -#!/usr/bin/env bash -# Integration with error_handler.func - -source core.func -source error_handler.func -source api.func - -# Enhanced error handling with API reporting -enhanced_error_handler() { - local exit_code=${1:-$?} - local command=${2:-${BASH_COMMAND:-unknown}} - - # Get error description from api.func - local error_msg=$(get_error_description $exit_code) - - # Display error information - echo "Error $exit_code: $error_msg" - echo "Command: $command" - - # Report error to API - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - post_update_to_api "failed" $exit_code - - # Use standard error handler - error_handler $exit_code $command -} - -# Set up enhanced error handling -trap 'enhanced_error_handler' ERR - -# Test enhanced error handling -nonexistent_command -``` - -## Best Practices Examples - -### Comprehensive API Integration - -```bash -#!/usr/bin/env bash -# Comprehensive API integration example - -source core.func -source api.func - -# Set up comprehensive API reporting -setup_api_reporting() { - # Enable diagnostics - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - - # Set common parameters - export CT_TYPE=1 - export DISK_SIZE=8 - export CORE_COUNT=2 - export RAM_SIZE=2048 - export var_os="debian" - export var_version="12" - export METHOD="install" - - echo "API reporting configured" -} - -# Installation with comprehensive reporting -install_with_comprehensive_reporting() { - local app="$1" - local ctid="$2" - - # Set up API reporting - setup_api_reporting - export NSAPP="$app" - - # Report installation start - post_to_api - - # Installation process - echo "Installing $app..." - - # Simulate installation steps - local steps=("Downloading" "Installing" "Configuring" "Starting") - for step in "${steps[@]}"; do - echo "$step $app..." - sleep 1 - done - - # Check installation result - if [[ $? -eq 0 ]]; then - echo "$app installation completed successfully" - post_update_to_api "success" 0 - return 0 - else - echo "$app installation failed" - post_update_to_api "failed" $? - return 1 - fi -} - -# Install multiple applications -apps=("plex" "nextcloud" "nginx" "mysql") -ctids=(100 101 102 103) - -for i in "${!apps[@]}"; do - install_with_comprehensive_reporting "${apps[$i]}" "${ctids[$i]}" - echo "---" -done -``` - -### Error Recovery with API Reporting - -```bash -#!/usr/bin/env bash -source api.func - -# Error recovery with API reporting -retry_with_api_reporting() { - local operation="$1" - local max_attempts=3 - local attempt=1 - - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - - while [[ $attempt -le $max_attempts ]]; do - echo "Attempt $attempt of $max_attempts: $operation" - - if $operation; then - echo "Operation succeeded on attempt $attempt" - post_update_to_api "success" 0 - return 0 - else - local exit_code=$? - local error_msg=$(get_error_description $exit_code) - echo "Attempt $attempt failed: $error_msg" - - post_update_to_api "failed" $exit_code - - ((attempt++)) - - if [[ $attempt -le $max_attempts ]]; then - echo "Retrying in 5 seconds..." - sleep 5 - fi - fi - done - - echo "Operation failed after $max_attempts attempts" - return 1 -} - -# Usage -retry_with_api_reporting "apt-get update" -retry_with_api_reporting "apt-get install -y package" -``` - -### API Reporting with Logging - -```bash -#!/usr/bin/env bash -source api.func - -# API reporting with detailed logging -install_with_logging_and_api() { - local app="$1" - local log_file="/var/log/${app}_installation.log" - - # Set up API reporting - export DIAGNOSTICS="yes" - export RANDOM_UUID="$(uuidgen)" - export NSAPP="$app" - - # Start logging - exec > >(tee -a "$log_file") - exec 2>&1 - - echo "Starting $app installation at $(date)" - - # Report installation start - post_to_api - - # Installation process - echo "Installing $app..." - - # Simulate installation - if install_app "$app"; then - echo "$app installation completed successfully at $(date)" - post_update_to_api "success" 0 - return 0 - else - local exit_code=$? - local error_msg=$(get_error_description $exit_code) - echo "$app installation failed at $(date): $error_msg" - post_update_to_api "failed" $exit_code - return $exit_code - fi -} - -# Mock installation function -install_app() { - local app="$1" - echo "Installing $app..." - sleep 2 - return 0 -} - -# Install with logging and API reporting -install_with_logging_and_api "plex" -``` diff --git a/docs/misc/api.func/README.md b/docs/misc/api.func/README.md index 6cf90d23d..7bd39218a 100644 --- a/docs/misc/api.func/README.md +++ b/docs/misc/api.func/README.md @@ -2,22 +2,27 @@ ## Overview -The `api.func` file provides Proxmox API integration and diagnostic reporting functionality for the Community Scripts project. It handles API communication, error reporting, and status updates to the community-scripts.org API. +The `api.func` file provides PocketBase API integration and diagnostic reporting for the Community Scripts project. It handles telemetry communication, error reporting, and status updates to the PocketBase backend at `db.community-scripts.org`. ## Purpose and Use Cases -- **API Communication**: Send installation and status data to community-scripts.org API +- **API Communication**: Send installation and status data to PocketBase - **Diagnostic Reporting**: Report installation progress and errors for analytics -- **Error Description**: Provide detailed error code explanations +- **Error Description**: Provide detailed error code explanations (canonical source of truth) - **Status Updates**: Track installation success/failure status - **Analytics**: Contribute anonymous usage data for project improvement ## Quick Reference ### Key Function Groups -- **Error Handling**: `get_error_description()` - Convert exit codes to human-readable messages -- **API Communication**: `post_to_api()`, `post_to_api_vm()` - Send installation data -- **Status Updates**: `post_update_to_api()` - Report installation completion status +- **Error Handling**: `explain_exit_code()` - Convert exit codes to human-readable messages +- **API Communication**: `post_to_api()`, `post_to_api_vm()` - Send installation data to PocketBase +- **Status Updates**: `post_update_to_api()` - Report installation completion status via PATCH + +### PocketBase Configuration +- **URL**: `http://db.community-scripts.org` +- **Collection**: `_dev_telemetry_data` +- **API Endpoint**: `/api/collections/_dev_telemetry_data/records` ### Dependencies - **External**: `curl` command for HTTP requests @@ -26,7 +31,7 @@ The `api.func` file provides Proxmox API integration and diagnostic reporting fu ### Integration Points - Used by: All installation scripts for diagnostic reporting - Uses: Environment variables from build.func and other scripts -- Provides: API communication and error reporting services +- Provides: API communication, error reporting, and exit code descriptions ## Documentation Files @@ -44,17 +49,18 @@ How api.func integrates with other components and provides API services. ## Key Features -### Error Code Descriptions -- **Comprehensive Coverage**: 50+ error codes with detailed explanations -- **LXC-Specific Errors**: Container creation and management errors -- **System Errors**: General system and network errors +### Exit Code Descriptions +- **Canonical source**: Single authoritative `explain_exit_code()` for the entire project +- **Non-overlapping ranges**: Clean separation between error categories +- **Comprehensive Coverage**: 60+ error codes with detailed explanations +- **System Errors**: General system, curl, and network errors - **Signal Errors**: Process termination and signal errors -### API Communication -- **LXC Reporting**: Send LXC container installation data -- **VM Reporting**: Send VM installation data -- **Status Updates**: Report installation success/failure -- **Diagnostic Data**: Anonymous usage analytics +### PocketBase Integration +- **Record Creation**: POST to create telemetry records with status `installing` +- **Record Updates**: PATCH to update with final status, exit code, and error +- **ID Tracking**: Stores `PB_RECORD_ID` for efficient updates +- **Fallback Lookup**: Searches by `random_id` filter if record ID is lost ### Diagnostic Integration - **Optional Reporting**: Only sends data when diagnostics enabled @@ -67,15 +73,13 @@ How api.func integrates with other components and provides API services. ### Basic API Setup ```bash #!/usr/bin/env bash -# Basic API setup - source api.func # Set up diagnostic reporting export DIAGNOSTICS="yes" -export RANDOM_UUID="$(uuidgen)" +export RANDOM_UUID="$(cat /proc/sys/kernel/random/uuid)" -# Report installation start +# Report installation start (creates PocketBase record) post_to_api ``` @@ -85,9 +89,9 @@ post_to_api source api.func # Get error description -error_msg=$(get_error_description 127) -echo "Error 127: $error_msg" -# Output: Error 127: Command not found: Incorrect path or missing dependency. +error_msg=$(explain_exit_code 137) +echo "Error 137: $error_msg" +# Output: Error 137: Killed (SIGKILL / Out of memory?) ``` ### Status Updates @@ -96,9 +100,9 @@ echo "Error 127: $error_msg" source api.func # Report successful installation -post_update_to_api "success" 0 +post_update_to_api "done" 0 -# Report failed installation +# Report failed installation with exit code post_update_to_api "failed" 127 ``` @@ -106,7 +110,7 @@ post_update_to_api "failed" 127 ### Required Variables - `DIAGNOSTICS`: Enable/disable diagnostic reporting ("yes"/"no") -- `RANDOM_UUID`: Unique identifier for tracking +- `RANDOM_UUID`: Unique identifier for session tracking ### Optional Variables - `CT_TYPE`: Container type (1 for LXC, 2 for VM) @@ -115,33 +119,31 @@ post_update_to_api "failed" 127 - `RAM_SIZE`: RAM size in MB - `var_os`: Operating system type - `var_version`: OS version -- `DISABLEIP6`: IPv6 disable setting -- `NSAPP`: Namespace application name +- `NSAPP`: Application name - `METHOD`: Installation method ### Internal Variables - `POST_UPDATE_DONE`: Prevents duplicate status updates -- `API_URL`: Community scripts API endpoint -- `JSON_PAYLOAD`: API request payload -- `RESPONSE`: API response +- `PB_URL`: PocketBase base URL +- `PB_API_URL`: Full API endpoint URL +- `PB_RECORD_ID`: Stored PocketBase record ID for updates -## Error Code Categories +## Error Code Categories (Non-Overlapping Ranges) -### General System Errors -- **0-9**: Basic system errors -- **18, 22, 28, 35**: Network and I/O errors -- **56, 60**: TLS/SSL errors -- **125-128**: Command execution errors -- **129-143**: Signal errors -- **152**: Resource limit errors -- **255**: Unknown critical errors - -### LXC-Specific Errors -- **100-101**: LXC installation errors -- **200-209**: LXC creation and management errors - -### Docker Errors -- **125**: Docker container start errors +| Range | Category | +|-------|----------| +| 1-2 | Generic shell errors | +| 6-35 | curl/wget network errors | +| 100-102 | APT/DPKG package errors | +| 124-143 | Command execution & signal errors | +| 150-154 | Systemd/service errors | +| 160-162 | Python/pip/uv errors | +| 170-173 | PostgreSQL errors | +| 180-183 | MySQL/MariaDB errors | +| 190-193 | MongoDB errors | +| 200-231 | Proxmox custom codes | +| 243-249 | Node.js/npm errors | +| 255 | DPKG fatal error | ## Best Practices @@ -152,48 +154,56 @@ post_update_to_api "failed" 127 4. Report both success and failure cases ### Error Handling -1. Use appropriate error codes -2. Provide meaningful error descriptions +1. Use the correct non-overlapping exit code ranges +2. Use `explain_exit_code()` from api.func (canonical source) 3. Handle API communication failures gracefully 4. Don't block installation on API failures ### API Usage -1. Check for curl availability -2. Handle network failures gracefully -3. Use appropriate HTTP methods -4. Include all required data +1. Check for curl availability before API calls +2. Handle network failures gracefully (all calls use `|| true`) +3. Store and reuse PB_RECORD_ID for updates +4. Use proper PocketBase REST methods (POST for create, PATCH for update) ## Troubleshooting ### Common Issues 1. **API Communication Fails**: Check network connectivity and curl availability -2. **Diagnostics Not Working**: Verify DIAGNOSTICS setting and RANDOM_UUID -3. **Missing Error Descriptions**: Check error code coverage -4. **Duplicate Updates**: POST_UPDATE_DONE prevents duplicates +2. **Diagnostics Not Working**: Verify `DIAGNOSTICS=yes` in `/usr/local/community-scripts/diagnostics` +3. **Status Update Fails**: Check that `PB_RECORD_ID` was captured or `random_id` filter works +4. **Duplicate Updates**: `POST_UPDATE_DONE` flag prevents duplicates ### Debug Mode Enable diagnostic reporting for debugging: ```bash export DIAGNOSTICS="yes" -export RANDOM_UUID="$(uuidgen)" +export RANDOM_UUID="$(cat /proc/sys/kernel/random/uuid)" ``` ### API Testing -Test API communication: +Test PocketBase connectivity: +```bash +curl -s http://db.community-scripts.org/api/health +``` + +Test record creation: ```bash source api.func export DIAGNOSTICS="yes" export RANDOM_UUID="test-$(date +%s)" +export NSAPP="test" +export CT_TYPE=1 post_to_api +echo "Record ID: $PB_RECORD_ID" ``` ## Related Documentation -- [core.func](../core.func/) - Core utilities and error handling -- [error_handler.func](../error_handler.func/) - Error handling utilities +- [core.func](../core.func/) - Core utilities +- [error_handler.func](../error_handler.func/) - Error handling (fallback `explain_exit_code`) - [build.func](../build.func/) - Container creation with API integration -- [tools.func](../tools.func/) - Extended utilities with API integration +- [tools.func](../tools.func/) - Extended utilities --- -*This documentation covers the api.func file which provides API communication and diagnostic reporting for all Proxmox Community Scripts.* +*This documentation covers the api.func file which provides PocketBase communication and diagnostic reporting for all Proxmox Community Scripts.* diff --git a/misc/api.func b/misc/api.func index 657d786ba..4c323a954 100644 --- a/misc/api.func +++ b/misc/api.func @@ -3,11 +3,11 @@ # License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/LICENSE # ============================================================================== -# API.FUNC - TELEMETRY & DIAGNOSTICS API +# API.FUNC - TELEMETRY & DIAGNOSTICS API (PocketBase) # ============================================================================== # -# Provides functions for sending anonymous telemetry data to Community-Scripts -# API for analytics and diagnostics purposes. +# Provides functions for sending anonymous telemetry data to PocketBase +# backend at db.community-scripts.org for analytics and diagnostics. # # Features: # - Container/VM creation statistics @@ -18,6 +18,7 @@ # Usage: # source <(curl -fsSL .../api.func) # post_to_api # Report container creation +# post_to_api_vm # Report VM creation # post_update_to_api # Report installation status # # Privacy: @@ -27,6 +28,16 @@ # # ============================================================================== +# ============================================================================== +# PocketBase Configuration +# ============================================================================== +PB_URL="http://db.community-scripts.org" +PB_COLLECTION="_dev_telemetry_data" +PB_API_URL="${PB_URL}/api/collections/${PB_COLLECTION}/records" + +# Store PocketBase record ID for update operations +PB_RECORD_ID="" + # ============================================================================== # SECTION 1: ERROR CODE DESCRIPTIONS # ============================================================================== @@ -35,6 +46,8 @@ # explain_exit_code() # # - Maps numeric exit codes to human-readable error descriptions +# - Canonical source of truth for ALL exit code mappings +# - Used by both api.func (telemetry) and error_handler.func (error display) # - Supports: # * Generic/Shell errors (1, 2, 124, 126-130, 134, 137, 139, 141, 143) # * curl/wget errors (6, 7, 22, 28, 35) @@ -47,7 +60,6 @@ # * Proxmox custom codes (200-231) # * Node.js/npm errors (243, 245-249) # - Returns description string for given exit code -# - Shared function with error_handler.func for consistency # ------------------------------------------------------------------------------ explain_exit_code() { local code="$1" @@ -160,7 +172,8 @@ explain_exit_code() { # ------------------------------------------------------------------------------ # post_to_api() # -# - Sends LXC container creation statistics to Community-Scripts API +# - Sends LXC container creation statistics to PocketBase +# - Creates a new record in the _dev_telemetry_data collection # - Only executes if: # * curl is available # * DIAGNOSTICS=yes @@ -168,62 +181,71 @@ explain_exit_code() { # - Payload includes: # * Container type, disk size, CPU cores, RAM # * OS type and version -# * IPv6 disable status # * Application name (NSAPP) # * Installation method # * PVE version # * Status: "installing" # * Random UUID for session tracking +# - Stores PB_RECORD_ID for later updates # - Anonymous telemetry (no personal data) # ------------------------------------------------------------------------------ post_to_api() { - if ! command -v curl &>/dev/null; then return fi - if [ "$DIAGNOSTICS" = "no" ]; then + if [[ "${DIAGNOSTICS:-no}" == "no" ]]; then return fi - if [ -z "$RANDOM_UUID" ]; then + if [[ -z "${RANDOM_UUID:-}" ]]; then return fi - local API_URL="http://api.community-scripts.org/dev/upload" local pve_version="not found" - pve_version=$(pveversion | awk -F'[/ ]' '{print $2}') + if command -v pveversion &>/dev/null; then + pve_version=$(pveversion | awk -F'[/ ]' '{print $2}') + fi + local JSON_PAYLOAD JSON_PAYLOAD=$( cat </dev/null) || true + + # Extract PocketBase record ID from response for later updates + local http_code body + http_code=$(echo "$RESPONSE" | tail -n1) + body=$(echo "$RESPONSE" | sed '$d') + + if [[ "$http_code" == "200" ]] || [[ "$http_code" == "201" ]]; then + PB_RECORD_ID=$(echo "$body" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4) || true + fi } # ------------------------------------------------------------------------------ # post_to_api_vm() # -# - Sends VM creation statistics to Community-Scripts API +# - Sends VM creation statistics to PocketBase # - Similar to post_to_api() but for virtual machines (not containers) # - Reads DIAGNOSTICS from /usr/local/community-scripts/diagnostics file # - Payload differences: @@ -233,66 +255,78 @@ EOF # - Only executes if DIAGNOSTICS=yes and RANDOM_UUID is set # ------------------------------------------------------------------------------ post_to_api_vm() { - if [[ ! -f /usr/local/community-scripts/diagnostics ]]; then return fi DIAGNOSTICS=$(grep -i "^DIAGNOSTICS=" /usr/local/community-scripts/diagnostics | awk -F'=' '{print $2}') + if ! command -v curl &>/dev/null; then return fi - if [ "$DIAGNOSTICS" = "no" ]; then + if [[ "${DIAGNOSTICS:-no}" == "no" ]]; then return fi - if [ -z "$RANDOM_UUID" ]; then + if [[ -z "${RANDOM_UUID:-}" ]]; then return fi - local API_URL="http://api.community-scripts.org/dev/upload" local pve_version="not found" - pve_version=$(pveversion | awk -F'[/ ]' '{print $2}') + if command -v pveversion &>/dev/null; then + pve_version=$(pveversion | awk -F'[/ ]' '{print $2}') + fi - DISK_SIZE_API=${DISK_SIZE%G} + local DISK_SIZE_API="${DISK_SIZE%G}" + local JSON_PAYLOAD JSON_PAYLOAD=$( cat </dev/null) || true + + # Extract PocketBase record ID from response for later updates + local http_code body + http_code=$(echo "$RESPONSE" | tail -n1) + body=$(echo "$RESPONSE" | sed '$d') + + if [[ "$http_code" == "200" ]] || [[ "$http_code" == "201" ]]; then + PB_RECORD_ID=$(echo "$body" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4) || true fi } # ------------------------------------------------------------------------------ # post_update_to_api() # -# - Reports installation completion status to API +# - Reports installation completion status to PocketBase via PATCH # - Prevents duplicate submissions via POST_UPDATE_DONE flag # - Arguments: -# * $1: status ("success" or "failed") -# * $2: exit_code (default: 1 for failed, 0 for success) +# * $1: status ("done" or "failed") +# * $2: exit_code (numeric, default: 1 for failed, 0 for done) +# - Uses PB_RECORD_ID if available, otherwise looks up by random_id # - Payload includes: -# * Final status (success/failed) -# * Error description via get_error_description() -# * Random UUID for session correlation +# * Final status (mapped: "done"β†’"sucess", "failed"β†’"failed") +# * Error description via explain_exit_code() +# * Numeric exit code # - Only executes once per session # - Silently returns if: # * curl not available @@ -300,7 +334,6 @@ EOF # * DIAGNOSTICS=no # ------------------------------------------------------------------------------ post_update_to_api() { - if ! command -v curl &>/dev/null; then return fi @@ -308,42 +341,79 @@ post_update_to_api() { # Initialize flag if not set (prevents 'unbound variable' error with set -u) POST_UPDATE_DONE=${POST_UPDATE_DONE:-false} - if [ "$POST_UPDATE_DONE" = true ]; then + if [[ "$POST_UPDATE_DONE" == "true" ]]; then return 0 fi - exit_code=${2:-1} - local API_URL="http://api.community-scripts.org/dev/upload/updatestatus" + + if [[ "${DIAGNOSTICS:-no}" == "no" ]]; then + return + fi + + if [[ -z "${RANDOM_UUID:-}" ]]; then + return + fi + local status="${1:-failed}" - if [[ "$status" == "failed" ]]; then - local exit_code="${2:-1}" - elif [[ "$status" == "success" ]]; then - local exit_code="${2:-0}" + local raw_exit_code="${2:-1}" + local exit_code error pb_status + + # Map status to PocketBase select values: installing, sucess, failed, unknown + case "$status" in + done | success | sucess) + pb_status="sucess" + exit_code=0 + error="" + ;; + failed) pb_status="failed" ;; + *) pb_status="unknown" ;; + esac + + # For failed status, resolve exit code and error description + if [[ "$pb_status" == "failed" ]] || [[ "$pb_status" == "unknown" ]]; then + # If exit_code is numeric, use it; otherwise default to 1 + if [[ "$raw_exit_code" =~ ^[0-9]+$ ]]; then + exit_code="$raw_exit_code" + else + exit_code=1 + fi + error=$(explain_exit_code "$exit_code") + if [[ -z "$error" ]]; then + error="Unknown error" + fi fi - if [[ -z "$exit_code" ]]; then - exit_code=1 - fi - - error=$(explain_exit_code "$exit_code") - - if [ -z "$error" ]; then - error="Unknown error" + # Resolve PocketBase record ID if not already known + local record_id="${PB_RECORD_ID:-}" + + if [[ -z "$record_id" ]]; then + # Look up record by random_id filter + local lookup_url="${PB_API_URL}?filter=(random_id='${RANDOM_UUID}')&fields=id&perPage=1" + local lookup_response + lookup_response=$(curl -s -L "${lookup_url}" 2>/dev/null) || true + + record_id=$(echo "$lookup_response" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4) || true + + if [[ -z "$record_id" ]]; then + POST_UPDATE_DONE=true + return + fi fi + local JSON_PAYLOAD JSON_PAYLOAD=$( cat </dev/null || true POST_UPDATE_DONE=true } diff --git a/misc/build.func b/misc/build.func index 6a73e688c..ff6908d3a 100644 --- a/misc/build.func +++ b/misc/build.func @@ -5157,9 +5157,9 @@ EOF # api_exit_script() # # - Exit trap handler for reporting to API telemetry -# - Captures exit code and reports to API using centralized error descriptions -# - Uses explain_exit_code() from error_handler.func for consistent error messages -# - Posts failure status with exit code to API (error description added automatically) +# - Captures exit code and reports to PocketBase using centralized error descriptions +# - Uses explain_exit_code() from api.func for consistent error messages +# - Posts failure status with exit code to API (error description resolved automatically) # - Only executes on non-zero exit codes # ------------------------------------------------------------------------------ api_exit_script() { @@ -5172,6 +5172,6 @@ api_exit_script() { if command -v pveversion >/dev/null 2>&1; then trap 'api_exit_script' EXIT fi -trap 'post_update_to_api "failed" "$BASH_COMMAND"' ERR -trap 'post_update_to_api "failed" "INTERRUPTED"' SIGINT -trap 'post_update_to_api "failed" "TERMINATED"' SIGTERM +trap 'post_update_to_api "failed" "$?"' ERR +trap 'post_update_to_api "failed" "130"' SIGINT +trap 'post_update_to_api "failed" "143"' SIGTERM diff --git a/misc/error_handler.func b/misc/error_handler.func index 9af40f0fa..7fec71817 100644 --- a/misc/error_handler.func +++ b/misc/error_handler.func @@ -27,100 +27,90 @@ # ------------------------------------------------------------------------------ # explain_exit_code() # -# - Maps numeric exit codes to human-readable error descriptions -# - Supports: -# * Generic/Shell errors (1, 2, 126, 127, 128, 130, 137, 139, 143) -# * Package manager errors (APT, DPKG: 100, 101, 255) -# * Node.js/npm errors (243-249, 254) -# * Python/pip/uv errors (210-212) -# * PostgreSQL errors (231-234) -# * MySQL/MariaDB errors (241-244) -# * MongoDB errors (251-254) -# * Proxmox custom codes (200-231) -# - Returns description string for given exit code +# - Canonical version is defined in api.func (sourced before this file) +# - This section only provides a fallback if api.func was not loaded +# - See api.func SECTION 1 for the authoritative exit code mappings # ------------------------------------------------------------------------------ -explain_exit_code() { - local code="$1" - case "$code" in - # --- Generic / Shell --- - 1) echo "General error / Operation not permitted" ;; - 2) echo "Misuse of shell builtins (e.g. syntax error)" ;; - 126) echo "Command invoked cannot execute (permission problem?)" ;; - 127) echo "Command not found" ;; - 128) echo "Invalid argument to exit" ;; - 130) echo "Terminated by Ctrl+C (SIGINT)" ;; - 137) echo "Killed (SIGKILL / Out of memory?)" ;; - 139) echo "Segmentation fault (core dumped)" ;; - 143) echo "Terminated (SIGTERM)" ;; - - # --- Package manager / APT / DPKG --- - 100) echo "APT: Package manager error (broken packages / dependency problems)" ;; - 101) echo "APT: Configuration error (bad sources.list, malformed config)" ;; - 255) echo "DPKG: Fatal internal error" ;; - - # --- Node.js / npm / pnpm / yarn --- - 243) echo "Node.js: Out of memory (JavaScript heap out of memory)" ;; - 245) echo "Node.js: Invalid command-line option" ;; - 246) echo "Node.js: Internal JavaScript Parse Error" ;; - 247) echo "Node.js: Fatal internal error" ;; - 248) echo "Node.js: Invalid C++ addon / N-API failure" ;; - 249) echo "Node.js: Inspector error" ;; - 254) echo "npm/pnpm/yarn: Unknown fatal error" ;; - - # --- Python / pip / uv --- - 210) echo "Python: Virtualenv / uv environment missing or broken" ;; - 211) echo "Python: Dependency resolution failed" ;; - 212) echo "Python: Installation aborted (permissions or EXTERNALLY-MANAGED)" ;; - - # --- PostgreSQL --- - 231) echo "PostgreSQL: Connection failed (server not running / wrong socket)" ;; - 232) echo "PostgreSQL: Authentication failed (bad user/password)" ;; - 233) echo "PostgreSQL: Database does not exist" ;; - 234) echo "PostgreSQL: Fatal error in query / syntax" ;; - - # --- MySQL / MariaDB --- - 241) echo "MySQL/MariaDB: Connection failed (server not running / wrong socket)" ;; - 242) echo "MySQL/MariaDB: Authentication failed (bad user/password)" ;; - 243) echo "MySQL/MariaDB: Database does not exist" ;; - 244) echo "MySQL/MariaDB: Fatal error in query / syntax" ;; - - # --- MongoDB --- - 251) echo "MongoDB: Connection failed (server not running)" ;; - 252) echo "MongoDB: Authentication failed (bad user/password)" ;; - 253) echo "MongoDB: Database not found" ;; - 254) echo "MongoDB: Fatal query error" ;; - - # --- Proxmox Custom Codes --- - 200) echo "Proxmox: Failed to create lock file" ;; - 203) echo "Proxmox: Missing CTID variable" ;; - 204) echo "Proxmox: Missing PCT_OSTYPE variable" ;; - 205) echo "Proxmox: Invalid CTID (<100)" ;; - 206) echo "Proxmox: CTID already in use" ;; - 207) echo "Proxmox: Password contains unescaped special characters" ;; - 208) echo "Proxmox: Invalid configuration (DNS/MAC/Network format)" ;; - 209) echo "Proxmox: Container creation failed" ;; - 210) echo "Proxmox: Cluster not quorate" ;; - 211) echo "Proxmox: Timeout waiting for template lock" ;; - 212) echo "Proxmox: Storage type 'iscsidirect' does not support containers (VMs only)" ;; - 213) echo "Proxmox: Storage type does not support 'rootdir' content" ;; - 214) echo "Proxmox: Not enough storage space" ;; - 215) echo "Proxmox: Container created but not listed (ghost state)" ;; - 216) echo "Proxmox: RootFS entry missing in config" ;; - 217) echo "Proxmox: Storage not accessible" ;; - 219) echo "Proxmox: CephFS does not support containers - use RBD" ;; - 224) echo "Proxmox: PBS storage is for backups only" ;; - 218) echo "Proxmox: Template file corrupted or incomplete" ;; - 220) echo "Proxmox: Unable to resolve template path" ;; - 221) echo "Proxmox: Template file not readable" ;; - 222) echo "Proxmox: Template download failed" ;; - 223) echo "Proxmox: Template not available after download" ;; - 225) echo "Proxmox: No template available for OS/Version" ;; - 231) echo "Proxmox: LXC stack upgrade failed" ;; - - # --- Default --- - *) echo "Unknown error" ;; - esac -} +if ! declare -f explain_exit_code &>/dev/null; then + explain_exit_code() { + local code="$1" + case "$code" in + 1) echo "General error / Operation not permitted" ;; + 2) echo "Misuse of shell builtins (e.g. syntax error)" ;; + 6) echo "curl: DNS resolution failed (could not resolve host)" ;; + 7) echo "curl: Failed to connect (network unreachable / host down)" ;; + 22) echo "curl: HTTP error returned (404, 429, 500+)" ;; + 28) echo "curl: Operation timeout (network slow or server not responding)" ;; + 35) echo "curl: SSL/TLS handshake failed (certificate error)" ;; + 100) echo "APT: Package manager error (broken packages / dependency problems)" ;; + 101) echo "APT: Configuration error (bad sources.list, malformed config)" ;; + 102) echo "APT: Lock held by another process (dpkg/apt still running)" ;; + 124) echo "Command timed out (timeout command)" ;; + 126) echo "Command invoked cannot execute (permission problem?)" ;; + 127) echo "Command not found" ;; + 128) echo "Invalid argument to exit" ;; + 130) echo "Terminated by Ctrl+C (SIGINT)" ;; + 134) echo "Process aborted (SIGABRT - possibly Node.js heap overflow)" ;; + 137) echo "Killed (SIGKILL / Out of memory?)" ;; + 139) echo "Segmentation fault (core dumped)" ;; + 141) echo "Broken pipe (SIGPIPE - output closed prematurely)" ;; + 143) echo "Terminated (SIGTERM)" ;; + 150) echo "Systemd: Service failed to start" ;; + 151) echo "Systemd: Service unit not found" ;; + 152) echo "Permission denied (EACCES)" ;; + 153) echo "Build/compile failed (make/gcc/cmake)" ;; + 154) echo "Node.js: Native addon build failed (node-gyp)" ;; + 160) echo "Python: Virtualenv / uv environment missing or broken" ;; + 161) echo "Python: Dependency resolution failed" ;; + 162) echo "Python: Installation aborted (permissions or EXTERNALLY-MANAGED)" ;; + 170) echo "PostgreSQL: Connection failed (server not running / wrong socket)" ;; + 171) echo "PostgreSQL: Authentication failed (bad user/password)" ;; + 172) echo "PostgreSQL: Database does not exist" ;; + 173) echo "PostgreSQL: Fatal error in query / syntax" ;; + 180) echo "MySQL/MariaDB: Connection failed (server not running / wrong socket)" ;; + 181) echo "MySQL/MariaDB: Authentication failed (bad user/password)" ;; + 182) echo "MySQL/MariaDB: Database does not exist" ;; + 183) echo "MySQL/MariaDB: Fatal error in query / syntax" ;; + 190) echo "MongoDB: Connection failed (server not running)" ;; + 191) echo "MongoDB: Authentication failed (bad user/password)" ;; + 192) echo "MongoDB: Database not found" ;; + 193) echo "MongoDB: Fatal query error" ;; + 200) echo "Proxmox: Failed to create lock file" ;; + 203) echo "Proxmox: Missing CTID variable" ;; + 204) echo "Proxmox: Missing PCT_OSTYPE variable" ;; + 205) echo "Proxmox: Invalid CTID (<100)" ;; + 206) echo "Proxmox: CTID already in use" ;; + 207) echo "Proxmox: Password contains unescaped special characters" ;; + 208) echo "Proxmox: Invalid configuration (DNS/MAC/Network format)" ;; + 209) echo "Proxmox: Container creation failed" ;; + 210) echo "Proxmox: Cluster not quorate" ;; + 211) echo "Proxmox: Timeout waiting for template lock" ;; + 212) echo "Proxmox: Storage type 'iscsidirect' does not support containers (VMs only)" ;; + 213) echo "Proxmox: Storage type does not support 'rootdir' content" ;; + 214) echo "Proxmox: Not enough storage space" ;; + 215) echo "Proxmox: Container created but not listed (ghost state)" ;; + 216) echo "Proxmox: RootFS entry missing in config" ;; + 217) echo "Proxmox: Storage not accessible" ;; + 218) echo "Proxmox: Template file corrupted or incomplete" ;; + 219) echo "Proxmox: CephFS does not support containers - use RBD" ;; + 220) echo "Proxmox: Unable to resolve template path" ;; + 221) echo "Proxmox: Template file not readable" ;; + 222) echo "Proxmox: Template download failed" ;; + 223) echo "Proxmox: Template not available after download" ;; + 224) echo "Proxmox: PBS storage is for backups only" ;; + 225) echo "Proxmox: No template available for OS/Version" ;; + 231) echo "Proxmox: LXC stack upgrade failed" ;; + 243) echo "Node.js: Out of memory (JavaScript heap out of memory)" ;; + 245) echo "Node.js: Invalid command-line option" ;; + 246) echo "Node.js: Internal JavaScript Parse Error" ;; + 247) echo "Node.js: Fatal internal error" ;; + 248) echo "Node.js: Invalid C++ addon / N-API failure" ;; + 249) echo "npm/pnpm/yarn: Unknown fatal error" ;; + 255) echo "DPKG: Fatal internal error" ;; + *) echo "Unknown error" ;; + esac + } +fi # ============================================================================== # SECTION 2: ERROR HANDLERS diff --git a/misc/ingest.go b/misc/ingest.go new file mode 100644 index 000000000..e69de29bb From 0e16e3fd633e1e63c49e301ac4ec0bfec3e6a169 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:34:22 +0100 Subject: [PATCH 30/87] Update ingest.go --- misc/ingest.go | 566 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 566 insertions(+) diff --git a/misc/ingest.go b/misc/ingest.go index e69de29bb..99f322121 100644 --- a/misc/ingest.go +++ b/misc/ingest.go @@ -0,0 +1,566 @@ +package main + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "net" + "net/http" + "os" + "strings" + "sync" + "time" +) + +type Config struct { + ListenAddr string + TrustedProxiesCIDR []string + + // PocketBase + PBBaseURL string + PBAuthCollection string // "_dev_telemetry_service" + PBIdentity string // email + PBPassword string + PBTargetColl string // "_dev_telemetry_data" + + // Limits + MaxBodyBytes int64 + RateLimitRPM int // requests per minute per key + RateBurst int // burst tokens + RateKeyMode string // "ip" or "header" + RateKeyHeader string // e.g. "X-Telemetry-Key" + RequestTimeout time.Duration // upstream timeout + EnableReqLogging bool // default false (GDPR-friendly) +} + +type TelemetryIn struct { + Script string `json:"script"` + Version string `json:"version"` + Event string `json:"event"` + OsType string `json:"os_type"` + OsVersion string `json:"os_version,omitempty"` + PveVer string `json:"pve_version,omitempty"` + Arch string `json:"arch"` + Method string `json:"method,omitempty"` + Status string `json:"status,omitempty"` + ExitCode int `json:"exit_code,omitempty"` + Error string `json:"error,omitempty"` // must be sanitized/short +} + +type TelemetryOut struct { + Script string `json:"script"` + Version string `json:"version"` + Event string `json:"event"` + OsType string `json:"os_type"` + OsVersion string `json:"os_version,omitempty"` + PveVer string `json:"pve_version,omitempty"` + Arch string `json:"arch"` + Method string `json:"method,omitempty"` + Status string `json:"status,omitempty"` + ExitCode int `json:"exit_code,omitempty"` + Error string `json:"error,omitempty"` + + TS int64 `json:"ts"` + IngestDay string `json:"ingest_day"` + Hash string `json:"hash"` +} + +type PBClient struct { + baseURL string + authCollection string + identity string + password string + targetColl string + + mu sync.Mutex + token string + exp time.Time + http *http.Client +} + +func NewPBClient(cfg Config) *PBClient { + return &PBClient{ + baseURL: strings.TrimRight(cfg.PBBaseURL, "/"), + authCollection: cfg.PBAuthCollection, + identity: cfg.PBIdentity, + password: cfg.PBPassword, + targetColl: cfg.PBTargetColl, + http: &http.Client{ + Timeout: cfg.RequestTimeout, + }, + } +} + +func (p *PBClient) ensureAuth(ctx context.Context) error { + p.mu.Lock() + defer p.mu.Unlock() + + // refresh if token missing or expiring soon + if p.token != "" && time.Until(p.exp) > 60*time.Second { + return nil + } + + body := map[string]string{ + "identity": p.identity, + "password": p.password, + } + b, _ := json.Marshal(body) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, + fmt.Sprintf("%s/api/collections/%s/auth-with-password", p.baseURL, p.authCollection), + bytes.NewReader(b), + ) + if err != nil { + return err + } + req.Header.Set("Content-Type", "application/json") + + resp, err := p.http.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + rb, _ := io.ReadAll(io.LimitReader(resp.Body, 4<<10)) + return fmt.Errorf("pocketbase auth failed: %s: %s", resp.Status, strings.TrimSpace(string(rb))) + } + + var out struct { + Token string `json:"token"` + // record omitted + } + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return err + } + if out.Token == "" { + return errors.New("pocketbase auth token missing") + } + + // PocketBase JWT exp can be parsed, but keep it simple: set 50 min + p.token = out.Token + p.exp = time.Now().Add(50 * time.Minute) + return nil +} + +func (p *PBClient) CreateTelemetry(ctx context.Context, payload TelemetryOut) error { + if err := p.ensureAuth(ctx); err != nil { + return err + } + + b, _ := json.Marshal(payload) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, + fmt.Sprintf("%s/api/collections/%s/records", p.baseURL, p.targetColl), + bytes.NewReader(b), + ) + if err != nil { + return err + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+p.token) + + resp, err := p.http.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + rb, _ := io.ReadAll(io.LimitReader(resp.Body, 8<<10)) + return fmt.Errorf("pocketbase create failed: %s: %s", resp.Status, strings.TrimSpace(string(rb))) + } + return nil +} + +// -------- Rate limiter (token bucket / minute window, simple) -------- + +type bucket struct { + tokens int + reset time.Time +} + +type RateLimiter struct { + mu sync.Mutex + buckets map[string]*bucket + rpm int + burst int + window time.Duration + cleanInt time.Duration +} + +func NewRateLimiter(rpm, burst int) *RateLimiter { + rl := &RateLimiter{ + buckets: make(map[string]*bucket), + rpm: rpm, + burst: burst, + window: time.Minute, + cleanInt: 5 * time.Minute, + } + go rl.cleanupLoop() + return rl +} + +func (r *RateLimiter) cleanupLoop() { + t := time.NewTicker(r.cleanInt) + defer t.Stop() + for range t.C { + now := time.Now() + r.mu.Lock() + for k, b := range r.buckets { + if now.After(b.reset.Add(2 * r.window)) { + delete(r.buckets, k) + } + } + r.mu.Unlock() + } +} + +func (r *RateLimiter) Allow(key string) bool { + if r.rpm <= 0 { + return true + } + now := time.Now() + r.mu.Lock() + defer r.mu.Unlock() + + b, ok := r.buckets[key] + if !ok || now.After(b.reset) { + r.buckets[key] = &bucket{tokens: min(r.burst, r.rpm), reset: now.Add(r.window)} + b = r.buckets[key] + } + if b.tokens <= 0 { + return false + } + b.tokens-- + return true +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// -------- Utility: GDPR-safe key extraction -------- + +type ProxyTrust struct { + nets []*net.IPNet +} + +func NewProxyTrust(cidrs []string) (*ProxyTrust, error) { + var nets []*net.IPNet + for _, c := range cidrs { + _, n, err := net.ParseCIDR(strings.TrimSpace(c)) + if err != nil { + return nil, err + } + nets = append(nets, n) + } + return &ProxyTrust{nets: nets}, nil +} + +func (pt *ProxyTrust) isTrusted(ip net.IP) bool { + for _, n := range pt.nets { + if n.Contains(ip) { + return true + } + } + return false +} + +func getClientIP(r *http.Request, pt *ProxyTrust) net.IP { + // If behind reverse proxy, trust X-Forwarded-For only if remote is trusted proxy. + host, _, _ := net.SplitHostPort(r.RemoteAddr) + remote := net.ParseIP(host) + if remote == nil { + return nil + } + + if pt != nil && pt.isTrusted(remote) { + xff := r.Header.Get("X-Forwarded-For") + if xff != "" { + parts := strings.Split(xff, ",") + ip := net.ParseIP(strings.TrimSpace(parts[0])) + if ip != nil { + return ip + } + } + } + return remote +} + +// -------- Validation (strict allowlist) -------- + +var ( + allowedEvents = map[string]bool{"install": true, "update": true, "error": true} + allowedOsType = map[string]bool{"pve": true, "lxc": true, "vm": true, "debian": true, "ubuntu": true, "alpine": true} + allowedArch = map[string]bool{"amd64": true, "arm64": true} +) + +func sanitizeShort(s string, max int) string { + s = strings.TrimSpace(s) + if s == "" { + return "" + } + // remove line breaks and high-risk chars + s = strings.ReplaceAll(s, "\n", " ") + s = strings.ReplaceAll(s, "\r", " ") + if len(s) > max { + s = s[:max] + } + return s +} + +func validate(in *TelemetryIn) error { + in.Script = sanitizeShort(in.Script, 64) + in.Version = sanitizeShort(in.Version, 32) + in.Event = sanitizeShort(in.Event, 16) + in.OsType = sanitizeShort(in.OsType, 16) + in.Arch = sanitizeShort(in.Arch, 16) + in.Method = sanitizeShort(in.Method, 32) + in.Status = sanitizeShort(in.Status, 16) + in.OsVersion = sanitizeShort(in.OsVersion, 32) + in.PveVer = sanitizeShort(in.PveVer, 32) + + // IMPORTANT: "error" must be short and not contain identifiers/logs + in.Error = sanitizeShort(in.Error, 120) + + if in.Script == "" || in.Version == "" || in.Event == "" || in.OsType == "" || in.Arch == "" { + return errors.New("missing required fields") + } + if !allowedEvents[in.Event] { + return errors.New("invalid event") + } + if !allowedOsType[in.OsType] { + return errors.New("invalid os_type") + } + if !allowedArch[in.Arch] { + return errors.New("invalid arch") + } + // exit_code only relevant for error, but allow 0..255 + if in.ExitCode < 0 || in.ExitCode > 255 { + return errors.New("invalid exit_code") + } + return nil +} + +func computeHash(out TelemetryOut) string { + // hash over non-identifying fields (no IP) to enable dedupe if needed + key := fmt.Sprintf("%s|%s|%s|%s|%s|%s|%d", + out.Script, out.Version, out.Event, out.OsType, out.Arch, out.IngestDay, out.ExitCode, + ) + sum := sha256.Sum256([]byte(key)) + return hex.EncodeToString(sum[:]) +} + +// -------- HTTP server -------- + +func main() { + cfg := Config{ + ListenAddr: env("LISTEN_ADDR", ":8080"), + TrustedProxiesCIDR: splitCSV(env("TRUSTED_PROXIES_CIDR", "")), + + PBBaseURL: mustEnv("PB_URL"), + PBAuthCollection: env("PB_AUTH_COLLECTION", "_dev_telemetry_service"), + PBIdentity: mustEnv("PB_IDENTITY"), + PBPassword: mustEnv("PB_PASSWORD"), + PBTargetColl: env("PB_TARGET_COLLECTION", "_dev_telemetry_data"), + + MaxBodyBytes: envInt64("MAX_BODY_BYTES", 1024), + RateLimitRPM: envInt("RATE_LIMIT_RPM", 60), + RateBurst: envInt("RATE_BURST", 20), + RateKeyMode: env("RATE_KEY_MODE", "ip"), // "ip" or "header" + RateKeyHeader: env("RATE_KEY_HEADER", "X-Telemetry-Key"), + RequestTimeout: time.Duration(envInt("UPSTREAM_TIMEOUT_MS", 4000)) * time.Millisecond, + EnableReqLogging: envBool("ENABLE_REQUEST_LOGGING", false), + } + + var pt *ProxyTrust + if strings.TrimSpace(env("TRUSTED_PROXIES_CIDR", "")) != "" { + p, err := NewProxyTrust(cfg.TrustedProxiesCIDR) + if err != nil { + log.Fatalf("invalid TRUSTED_PROXIES_CIDR: %v", err) + } + pt = p + } + + pb := NewPBClient(cfg) + rl := NewRateLimiter(cfg.RateLimitRPM, cfg.RateBurst) + + mux := http.NewServeMux() + + mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(200) + _, _ = w.Write([]byte("ok")) + }) + + mux.HandleFunc("/telemetry", func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + + // rate key: IP or header (header allows non-identifying keys, but header can be abused too) + var key string + switch cfg.RateKeyMode { + case "header": + key = strings.TrimSpace(r.Header.Get(cfg.RateKeyHeader)) + if key == "" { + key = "missing" + } + default: + ip := getClientIP(r, pt) + if ip == nil { + key = "unknown" + } else { + // GDPR: do NOT store IP anywhere permanent; use it only in-memory for RL key + key = ip.String() + } + } + if !rl.Allow(key) { + http.Error(w, "rate limited", http.StatusTooManyRequests) + return + } + + r.Body = http.MaxBytesReader(w, r.Body, cfg.MaxBodyBytes) + raw, err := io.ReadAll(r.Body) + if err != nil { + http.Error(w, "invalid body", http.StatusBadRequest) + return + } + + // strict JSON decode (no unknown fields) + var in TelemetryIn + dec := json.NewDecoder(bytes.NewReader(raw)) + dec.DisallowUnknownFields() + if err := dec.Decode(&in); err != nil { + http.Error(w, "invalid json", http.StatusBadRequest) + return + } + if err := validate(&in); err != nil { + http.Error(w, "invalid payload", http.StatusBadRequest) + return + } + + now := time.Now().UTC() + out := TelemetryOut{ + Script: in.Script, + Version: in.Version, + Event: in.Event, + OsType: in.OsType, + OsVersion: in.OsVersion, + PveVer: in.PveVer, + Arch: in.Arch, + Method: in.Method, + Status: in.Status, + ExitCode: in.ExitCode, + Error: in.Error, + + TS: now.Unix(), + IngestDay: now.Format("2006-01-02"), + } + out.Hash = computeHash(out) + + ctx, cancel := context.WithTimeout(r.Context(), cfg.RequestTimeout) + defer cancel() + + if err := pb.CreateTelemetry(ctx, out); err != nil { + // GDPR: don't log raw payload, don't log IPs; log only generic error + log.Printf("pocketbase write failed: %v", err) + http.Error(w, "upstream error", http.StatusBadGateway) + return + } + + if cfg.EnableReqLogging { + log.Printf("telemetry accepted script=%s event=%s", out.Script, out.Event) + } + + w.WriteHeader(http.StatusAccepted) + _, _ = w.Write([]byte("accepted")) + }) + + srv := &http.Server{ + Addr: cfg.ListenAddr, + Handler: securityHeaders(mux), + ReadHeaderTimeout: 3 * time.Second, + } + + log.Printf("telemetry-ingest listening on %s", cfg.ListenAddr) + log.Fatal(srv.ListenAndServe()) +} + +func securityHeaders(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Minimal security headers (no cookies anyway) + w.Header().Set("X-Content-Type-Options", "nosniff") + w.Header().Set("X-Frame-Options", "DENY") + w.Header().Set("Referrer-Policy", "no-referrer") + next.ServeHTTP(w, r) + }) +} + +func env(k, def string) string { + v := os.Getenv(k) + if v == "" { + return def + } + return v +} +func mustEnv(k string) string { + v := os.Getenv(k) + if v == "" { + log.Fatalf("missing env %s", k) + } + return v +} +func envInt(k string, def int) int { + v := os.Getenv(k) + if v == "" { + return def + } + var i int + _, _ = fmt.Sscanf(v, "%d", &i) + if i == 0 && v != "0" { + return def + } + return i +} +func envInt64(k string, def int64) int64 { + v := os.Getenv(k) + if v == "" { + return def + } + var i int64 + _, _ = fmt.Sscanf(v, "%d", &i) + if i == 0 && v != "0" { + return def + } + return i +} +func envBool(k string, def bool) bool { + v := strings.ToLower(strings.TrimSpace(os.Getenv(k))) + if v == "" { + return def + } + return v == "1" || v == "true" || v == "yes" || v == "on" +} +func splitCSV(s string) []string { + s = strings.TrimSpace(s) + if s == "" { + return nil + } + parts := strings.Split(s, ",") + var out []string + for _, p := range parts { + p = strings.TrimSpace(p) + if p != "" { + out = append(out, p) + } + } + return out +} From 7f0ca0f9d06377ba445eef7791272d8010a8bc97 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:38:29 +0100 Subject: [PATCH 31/87] Add Dockerfile and move ingest service Containerize the telemetry ingest service and reorganize source layout. Added misc/data/Dockerfile with a multi-stage build (golang:1.23-alpine -> alpine:3.23) to produce /app/telemetry-ingest, run as a non-root user, expose :8080, and provide default env vars for configuration (LISTEN_ADDR, MAX_BODY_BYTES, RATE_LIMIT_RPM, RATE_BURST, RATE_KEY_MODE, ENABLE_REQUEST_LOGGING, UPSTREAM_TIMEOUT_MS). Renamed misc/ingest.go to misc/data/service.go to reflect the new directory structure. --- misc/data/Dockerfile | 21 +++++++++++++++++++++ misc/{ingest.go => data/service.go} | 0 2 files changed, 21 insertions(+) create mode 100644 misc/data/Dockerfile rename misc/{ingest.go => data/service.go} (100%) diff --git a/misc/data/Dockerfile b/misc/data/Dockerfile new file mode 100644 index 000000000..f947b3a2d --- /dev/null +++ b/misc/data/Dockerfile @@ -0,0 +1,21 @@ +# build stage +FROM golang:1.23-alpine AS build +WORKDIR /src +COPY . . +RUN go build -trimpath -ldflags "-s -w" -o /out/telemetry-ingest ./main.go + +# runtime stage +FROM alpine:3.23 +RUN adduser -D -H -s /sbin/nologin app +USER app +WORKDIR /app +COPY --from=build /out/telemetry-ingest /app/telemetry-ingest +EXPOSE 8080 +ENV LISTEN_ADDR=":8080" \ + MAX_BODY_BYTES="1024" \ + RATE_LIMIT_RPM="60" \ + RATE_BURST="20" \ + RATE_KEY_MODE="ip" \ + ENABLE_REQUEST_LOGGING="false" \ + UPSTREAM_TIMEOUT_MS="4000" +CMD ["/app/telemetry-ingest"] diff --git a/misc/ingest.go b/misc/data/service.go similarity index 100% rename from misc/ingest.go rename to misc/data/service.go From 7bd2ba7b5417ad4ee47e5dadfba0d1d062feb667 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:53:11 +0100 Subject: [PATCH 32/87] Add telemetry-ingest service and Docker updates Introduce a new telemetry ingestion service (misc/data/service.go) that implements an HTTP server to accept telemetry payloads, validate and sanitize inputs, apply rate limiting, compute dedupe hashes, and forward records to PocketBase with token-based auth. Add module file (misc/data/go.mod) setting module telemetry-ingest and Go version 1.25.5. Update Dockerfile to use golang:1.25-alpine and remove baked-in environment defaults (so runtime envs are required), keeping the build stage and final CMD. These changes add the core ingestion logic, dependency module, and align the build image/version. --- misc/data/Dockerfile | 9 +- misc/data/go.mod | 3 + misc/data/service.go | 566 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 570 insertions(+), 8 deletions(-) create mode 100644 misc/data/go.mod diff --git a/misc/data/Dockerfile b/misc/data/Dockerfile index f947b3a2d..9967acd25 100644 --- a/misc/data/Dockerfile +++ b/misc/data/Dockerfile @@ -1,5 +1,5 @@ # build stage -FROM golang:1.23-alpine AS build +FROM golang:1.25-alpine AS build WORKDIR /src COPY . . RUN go build -trimpath -ldflags "-s -w" -o /out/telemetry-ingest ./main.go @@ -11,11 +11,4 @@ USER app WORKDIR /app COPY --from=build /out/telemetry-ingest /app/telemetry-ingest EXPOSE 8080 -ENV LISTEN_ADDR=":8080" \ - MAX_BODY_BYTES="1024" \ - RATE_LIMIT_RPM="60" \ - RATE_BURST="20" \ - RATE_KEY_MODE="ip" \ - ENABLE_REQUEST_LOGGING="false" \ - UPSTREAM_TIMEOUT_MS="4000" CMD ["/app/telemetry-ingest"] diff --git a/misc/data/go.mod b/misc/data/go.mod new file mode 100644 index 000000000..ec6e8f2dd --- /dev/null +++ b/misc/data/go.mod @@ -0,0 +1,3 @@ +module telemetry-ingest + +go 1.25.5 diff --git a/misc/data/service.go b/misc/data/service.go index e69de29bb..99f322121 100644 --- a/misc/data/service.go +++ b/misc/data/service.go @@ -0,0 +1,566 @@ +package main + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "net" + "net/http" + "os" + "strings" + "sync" + "time" +) + +type Config struct { + ListenAddr string + TrustedProxiesCIDR []string + + // PocketBase + PBBaseURL string + PBAuthCollection string // "_dev_telemetry_service" + PBIdentity string // email + PBPassword string + PBTargetColl string // "_dev_telemetry_data" + + // Limits + MaxBodyBytes int64 + RateLimitRPM int // requests per minute per key + RateBurst int // burst tokens + RateKeyMode string // "ip" or "header" + RateKeyHeader string // e.g. "X-Telemetry-Key" + RequestTimeout time.Duration // upstream timeout + EnableReqLogging bool // default false (GDPR-friendly) +} + +type TelemetryIn struct { + Script string `json:"script"` + Version string `json:"version"` + Event string `json:"event"` + OsType string `json:"os_type"` + OsVersion string `json:"os_version,omitempty"` + PveVer string `json:"pve_version,omitempty"` + Arch string `json:"arch"` + Method string `json:"method,omitempty"` + Status string `json:"status,omitempty"` + ExitCode int `json:"exit_code,omitempty"` + Error string `json:"error,omitempty"` // must be sanitized/short +} + +type TelemetryOut struct { + Script string `json:"script"` + Version string `json:"version"` + Event string `json:"event"` + OsType string `json:"os_type"` + OsVersion string `json:"os_version,omitempty"` + PveVer string `json:"pve_version,omitempty"` + Arch string `json:"arch"` + Method string `json:"method,omitempty"` + Status string `json:"status,omitempty"` + ExitCode int `json:"exit_code,omitempty"` + Error string `json:"error,omitempty"` + + TS int64 `json:"ts"` + IngestDay string `json:"ingest_day"` + Hash string `json:"hash"` +} + +type PBClient struct { + baseURL string + authCollection string + identity string + password string + targetColl string + + mu sync.Mutex + token string + exp time.Time + http *http.Client +} + +func NewPBClient(cfg Config) *PBClient { + return &PBClient{ + baseURL: strings.TrimRight(cfg.PBBaseURL, "/"), + authCollection: cfg.PBAuthCollection, + identity: cfg.PBIdentity, + password: cfg.PBPassword, + targetColl: cfg.PBTargetColl, + http: &http.Client{ + Timeout: cfg.RequestTimeout, + }, + } +} + +func (p *PBClient) ensureAuth(ctx context.Context) error { + p.mu.Lock() + defer p.mu.Unlock() + + // refresh if token missing or expiring soon + if p.token != "" && time.Until(p.exp) > 60*time.Second { + return nil + } + + body := map[string]string{ + "identity": p.identity, + "password": p.password, + } + b, _ := json.Marshal(body) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, + fmt.Sprintf("%s/api/collections/%s/auth-with-password", p.baseURL, p.authCollection), + bytes.NewReader(b), + ) + if err != nil { + return err + } + req.Header.Set("Content-Type", "application/json") + + resp, err := p.http.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + rb, _ := io.ReadAll(io.LimitReader(resp.Body, 4<<10)) + return fmt.Errorf("pocketbase auth failed: %s: %s", resp.Status, strings.TrimSpace(string(rb))) + } + + var out struct { + Token string `json:"token"` + // record omitted + } + if err := json.NewDecoder(resp.Body).Decode(&out); err != nil { + return err + } + if out.Token == "" { + return errors.New("pocketbase auth token missing") + } + + // PocketBase JWT exp can be parsed, but keep it simple: set 50 min + p.token = out.Token + p.exp = time.Now().Add(50 * time.Minute) + return nil +} + +func (p *PBClient) CreateTelemetry(ctx context.Context, payload TelemetryOut) error { + if err := p.ensureAuth(ctx); err != nil { + return err + } + + b, _ := json.Marshal(payload) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, + fmt.Sprintf("%s/api/collections/%s/records", p.baseURL, p.targetColl), + bytes.NewReader(b), + ) + if err != nil { + return err + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+p.token) + + resp, err := p.http.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + rb, _ := io.ReadAll(io.LimitReader(resp.Body, 8<<10)) + return fmt.Errorf("pocketbase create failed: %s: %s", resp.Status, strings.TrimSpace(string(rb))) + } + return nil +} + +// -------- Rate limiter (token bucket / minute window, simple) -------- + +type bucket struct { + tokens int + reset time.Time +} + +type RateLimiter struct { + mu sync.Mutex + buckets map[string]*bucket + rpm int + burst int + window time.Duration + cleanInt time.Duration +} + +func NewRateLimiter(rpm, burst int) *RateLimiter { + rl := &RateLimiter{ + buckets: make(map[string]*bucket), + rpm: rpm, + burst: burst, + window: time.Minute, + cleanInt: 5 * time.Minute, + } + go rl.cleanupLoop() + return rl +} + +func (r *RateLimiter) cleanupLoop() { + t := time.NewTicker(r.cleanInt) + defer t.Stop() + for range t.C { + now := time.Now() + r.mu.Lock() + for k, b := range r.buckets { + if now.After(b.reset.Add(2 * r.window)) { + delete(r.buckets, k) + } + } + r.mu.Unlock() + } +} + +func (r *RateLimiter) Allow(key string) bool { + if r.rpm <= 0 { + return true + } + now := time.Now() + r.mu.Lock() + defer r.mu.Unlock() + + b, ok := r.buckets[key] + if !ok || now.After(b.reset) { + r.buckets[key] = &bucket{tokens: min(r.burst, r.rpm), reset: now.Add(r.window)} + b = r.buckets[key] + } + if b.tokens <= 0 { + return false + } + b.tokens-- + return true +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// -------- Utility: GDPR-safe key extraction -------- + +type ProxyTrust struct { + nets []*net.IPNet +} + +func NewProxyTrust(cidrs []string) (*ProxyTrust, error) { + var nets []*net.IPNet + for _, c := range cidrs { + _, n, err := net.ParseCIDR(strings.TrimSpace(c)) + if err != nil { + return nil, err + } + nets = append(nets, n) + } + return &ProxyTrust{nets: nets}, nil +} + +func (pt *ProxyTrust) isTrusted(ip net.IP) bool { + for _, n := range pt.nets { + if n.Contains(ip) { + return true + } + } + return false +} + +func getClientIP(r *http.Request, pt *ProxyTrust) net.IP { + // If behind reverse proxy, trust X-Forwarded-For only if remote is trusted proxy. + host, _, _ := net.SplitHostPort(r.RemoteAddr) + remote := net.ParseIP(host) + if remote == nil { + return nil + } + + if pt != nil && pt.isTrusted(remote) { + xff := r.Header.Get("X-Forwarded-For") + if xff != "" { + parts := strings.Split(xff, ",") + ip := net.ParseIP(strings.TrimSpace(parts[0])) + if ip != nil { + return ip + } + } + } + return remote +} + +// -------- Validation (strict allowlist) -------- + +var ( + allowedEvents = map[string]bool{"install": true, "update": true, "error": true} + allowedOsType = map[string]bool{"pve": true, "lxc": true, "vm": true, "debian": true, "ubuntu": true, "alpine": true} + allowedArch = map[string]bool{"amd64": true, "arm64": true} +) + +func sanitizeShort(s string, max int) string { + s = strings.TrimSpace(s) + if s == "" { + return "" + } + // remove line breaks and high-risk chars + s = strings.ReplaceAll(s, "\n", " ") + s = strings.ReplaceAll(s, "\r", " ") + if len(s) > max { + s = s[:max] + } + return s +} + +func validate(in *TelemetryIn) error { + in.Script = sanitizeShort(in.Script, 64) + in.Version = sanitizeShort(in.Version, 32) + in.Event = sanitizeShort(in.Event, 16) + in.OsType = sanitizeShort(in.OsType, 16) + in.Arch = sanitizeShort(in.Arch, 16) + in.Method = sanitizeShort(in.Method, 32) + in.Status = sanitizeShort(in.Status, 16) + in.OsVersion = sanitizeShort(in.OsVersion, 32) + in.PveVer = sanitizeShort(in.PveVer, 32) + + // IMPORTANT: "error" must be short and not contain identifiers/logs + in.Error = sanitizeShort(in.Error, 120) + + if in.Script == "" || in.Version == "" || in.Event == "" || in.OsType == "" || in.Arch == "" { + return errors.New("missing required fields") + } + if !allowedEvents[in.Event] { + return errors.New("invalid event") + } + if !allowedOsType[in.OsType] { + return errors.New("invalid os_type") + } + if !allowedArch[in.Arch] { + return errors.New("invalid arch") + } + // exit_code only relevant for error, but allow 0..255 + if in.ExitCode < 0 || in.ExitCode > 255 { + return errors.New("invalid exit_code") + } + return nil +} + +func computeHash(out TelemetryOut) string { + // hash over non-identifying fields (no IP) to enable dedupe if needed + key := fmt.Sprintf("%s|%s|%s|%s|%s|%s|%d", + out.Script, out.Version, out.Event, out.OsType, out.Arch, out.IngestDay, out.ExitCode, + ) + sum := sha256.Sum256([]byte(key)) + return hex.EncodeToString(sum[:]) +} + +// -------- HTTP server -------- + +func main() { + cfg := Config{ + ListenAddr: env("LISTEN_ADDR", ":8080"), + TrustedProxiesCIDR: splitCSV(env("TRUSTED_PROXIES_CIDR", "")), + + PBBaseURL: mustEnv("PB_URL"), + PBAuthCollection: env("PB_AUTH_COLLECTION", "_dev_telemetry_service"), + PBIdentity: mustEnv("PB_IDENTITY"), + PBPassword: mustEnv("PB_PASSWORD"), + PBTargetColl: env("PB_TARGET_COLLECTION", "_dev_telemetry_data"), + + MaxBodyBytes: envInt64("MAX_BODY_BYTES", 1024), + RateLimitRPM: envInt("RATE_LIMIT_RPM", 60), + RateBurst: envInt("RATE_BURST", 20), + RateKeyMode: env("RATE_KEY_MODE", "ip"), // "ip" or "header" + RateKeyHeader: env("RATE_KEY_HEADER", "X-Telemetry-Key"), + RequestTimeout: time.Duration(envInt("UPSTREAM_TIMEOUT_MS", 4000)) * time.Millisecond, + EnableReqLogging: envBool("ENABLE_REQUEST_LOGGING", false), + } + + var pt *ProxyTrust + if strings.TrimSpace(env("TRUSTED_PROXIES_CIDR", "")) != "" { + p, err := NewProxyTrust(cfg.TrustedProxiesCIDR) + if err != nil { + log.Fatalf("invalid TRUSTED_PROXIES_CIDR: %v", err) + } + pt = p + } + + pb := NewPBClient(cfg) + rl := NewRateLimiter(cfg.RateLimitRPM, cfg.RateBurst) + + mux := http.NewServeMux() + + mux.HandleFunc("/healthz", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(200) + _, _ = w.Write([]byte("ok")) + }) + + mux.HandleFunc("/telemetry", func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + + // rate key: IP or header (header allows non-identifying keys, but header can be abused too) + var key string + switch cfg.RateKeyMode { + case "header": + key = strings.TrimSpace(r.Header.Get(cfg.RateKeyHeader)) + if key == "" { + key = "missing" + } + default: + ip := getClientIP(r, pt) + if ip == nil { + key = "unknown" + } else { + // GDPR: do NOT store IP anywhere permanent; use it only in-memory for RL key + key = ip.String() + } + } + if !rl.Allow(key) { + http.Error(w, "rate limited", http.StatusTooManyRequests) + return + } + + r.Body = http.MaxBytesReader(w, r.Body, cfg.MaxBodyBytes) + raw, err := io.ReadAll(r.Body) + if err != nil { + http.Error(w, "invalid body", http.StatusBadRequest) + return + } + + // strict JSON decode (no unknown fields) + var in TelemetryIn + dec := json.NewDecoder(bytes.NewReader(raw)) + dec.DisallowUnknownFields() + if err := dec.Decode(&in); err != nil { + http.Error(w, "invalid json", http.StatusBadRequest) + return + } + if err := validate(&in); err != nil { + http.Error(w, "invalid payload", http.StatusBadRequest) + return + } + + now := time.Now().UTC() + out := TelemetryOut{ + Script: in.Script, + Version: in.Version, + Event: in.Event, + OsType: in.OsType, + OsVersion: in.OsVersion, + PveVer: in.PveVer, + Arch: in.Arch, + Method: in.Method, + Status: in.Status, + ExitCode: in.ExitCode, + Error: in.Error, + + TS: now.Unix(), + IngestDay: now.Format("2006-01-02"), + } + out.Hash = computeHash(out) + + ctx, cancel := context.WithTimeout(r.Context(), cfg.RequestTimeout) + defer cancel() + + if err := pb.CreateTelemetry(ctx, out); err != nil { + // GDPR: don't log raw payload, don't log IPs; log only generic error + log.Printf("pocketbase write failed: %v", err) + http.Error(w, "upstream error", http.StatusBadGateway) + return + } + + if cfg.EnableReqLogging { + log.Printf("telemetry accepted script=%s event=%s", out.Script, out.Event) + } + + w.WriteHeader(http.StatusAccepted) + _, _ = w.Write([]byte("accepted")) + }) + + srv := &http.Server{ + Addr: cfg.ListenAddr, + Handler: securityHeaders(mux), + ReadHeaderTimeout: 3 * time.Second, + } + + log.Printf("telemetry-ingest listening on %s", cfg.ListenAddr) + log.Fatal(srv.ListenAndServe()) +} + +func securityHeaders(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Minimal security headers (no cookies anyway) + w.Header().Set("X-Content-Type-Options", "nosniff") + w.Header().Set("X-Frame-Options", "DENY") + w.Header().Set("Referrer-Policy", "no-referrer") + next.ServeHTTP(w, r) + }) +} + +func env(k, def string) string { + v := os.Getenv(k) + if v == "" { + return def + } + return v +} +func mustEnv(k string) string { + v := os.Getenv(k) + if v == "" { + log.Fatalf("missing env %s", k) + } + return v +} +func envInt(k string, def int) int { + v := os.Getenv(k) + if v == "" { + return def + } + var i int + _, _ = fmt.Sscanf(v, "%d", &i) + if i == 0 && v != "0" { + return def + } + return i +} +func envInt64(k string, def int64) int64 { + v := os.Getenv(k) + if v == "" { + return def + } + var i int64 + _, _ = fmt.Sscanf(v, "%d", &i) + if i == 0 && v != "0" { + return def + } + return i +} +func envBool(k string, def bool) bool { + v := strings.ToLower(strings.TrimSpace(os.Getenv(k))) + if v == "" { + return def + } + return v == "1" || v == "true" || v == "yes" || v == "on" +} +func splitCSV(s string) []string { + s = strings.TrimSpace(s) + if s == "" { + return nil + } + parts := strings.Split(s, ",") + var out []string + for _, p := range parts { + p = strings.TrimSpace(p) + if p != "" { + out = append(out, p) + } + } + return out +} From 7759b5329732d9d0a3428cac1b36b315b8e2a1d6 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:53:43 +0100 Subject: [PATCH 33/87] Update Dockerfile --- misc/data/Dockerfile | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/misc/data/Dockerfile b/misc/data/Dockerfile index 9967acd25..6fd4377bc 100644 --- a/misc/data/Dockerfile +++ b/misc/data/Dockerfile @@ -1,13 +1,9 @@ -# build stage FROM golang:1.25-alpine AS build WORKDIR /src COPY . . -RUN go build -trimpath -ldflags "-s -w" -o /out/telemetry-ingest ./main.go +RUN go build -trimpath -ldflags "-s -w" -o /out/telemetry-ingest . -# runtime stage FROM alpine:3.23 -RUN adduser -D -H -s /sbin/nologin app -USER app WORKDIR /app COPY --from=build /out/telemetry-ingest /app/telemetry-ingest EXPOSE 8080 From 313da7c00cfbb4ba4f1224376300f103fe5175c8 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 16:06:44 +0100 Subject: [PATCH 34/87] Switch telemetry to ingest service Replace direct PocketBase integration with a fire-and-forget telemetry ingest endpoint and tighten validation. misc/api.func: point to telemetry.community-scripts.org, add TELEMETRY_TIMEOUT, use DIAGNOSTICS=no opt-out, include random_id/NSAPP/status in payloads, unify LXC/VM POSTs, avoid blocking or failing scripts, remove PocketBase record lookup/patch logic. misc/data/service.go: update TelemetryIn/TelemetryOut schemas to match new payload, add stricter sanitization and enum/range validation, adjust hashing/deduplication usage, and update request logging to reflect nsapp/status. Overall: safer, non-blocking telemetry with improved schema validation and GDPR-friendly behavior. --- misc/api.func | 207 ++++++++++++++++--------------------------- misc/data/service.go | 153 +++++++++++++++++++++----------- 2 files changed, 177 insertions(+), 183 deletions(-) diff --git a/misc/api.func b/misc/api.func index 4c323a954..6e6224427 100644 --- a/misc/api.func +++ b/misc/api.func @@ -3,11 +3,11 @@ # License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/LICENSE # ============================================================================== -# API.FUNC - TELEMETRY & DIAGNOSTICS API (PocketBase) +# API.FUNC - TELEMETRY & DIAGNOSTICS API # ============================================================================== # -# Provides functions for sending anonymous telemetry data to PocketBase -# backend at db.community-scripts.org for analytics and diagnostics. +# Provides functions for sending anonymous telemetry data via the community +# telemetry ingest service at telemetry.community-scripts.org. # # Features: # - Container/VM creation statistics @@ -17,26 +17,25 @@ # # Usage: # source <(curl -fsSL .../api.func) -# post_to_api # Report container creation +# post_to_api # Report LXC container creation # post_to_api_vm # Report VM creation # post_update_to_api # Report installation status # # Privacy: # - Only anonymous statistics (no personal data) -# - User can opt-out via diagnostics settings +# - User can opt-out via DIAGNOSTICS=no # - Random UUID for session tracking only +# - Data retention: 30 days # # ============================================================================== # ============================================================================== -# PocketBase Configuration +# Telemetry Configuration # ============================================================================== -PB_URL="http://db.community-scripts.org" -PB_COLLECTION="_dev_telemetry_data" -PB_API_URL="${PB_URL}/api/collections/${PB_COLLECTION}/records" +TELEMETRY_URL="http://telemetry.community-scripts.org/telemetry" -# Store PocketBase record ID for update operations -PB_RECORD_ID="" +# Timeout for telemetry requests (seconds) +TELEMETRY_TIMEOUT=5 # ============================================================================== # SECTION 1: ERROR CODE DESCRIPTIONS @@ -172,8 +171,7 @@ explain_exit_code() { # ------------------------------------------------------------------------------ # post_to_api() # -# - Sends LXC container creation statistics to PocketBase -# - Creates a new record in the _dev_telemetry_data collection +# - Sends LXC container creation statistics to telemetry ingest service # - Only executes if: # * curl is available # * DIAGNOSTICS=yes @@ -186,232 +184,181 @@ explain_exit_code() { # * PVE version # * Status: "installing" # * Random UUID for session tracking -# - Stores PB_RECORD_ID for later updates # - Anonymous telemetry (no personal data) +# - Never blocks or fails script execution # ------------------------------------------------------------------------------ post_to_api() { - if ! command -v curl &>/dev/null; then - return - fi + # Silent fail - telemetry should never break scripts + command -v curl &>/dev/null || return 0 + [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 + [[ -z "${RANDOM_UUID:-}" ]] && return 0 - if [[ "${DIAGNOSTICS:-no}" == "no" ]]; then - return - fi + # Set type for later status updates + TELEMETRY_TYPE="lxc" - if [[ -z "${RANDOM_UUID:-}" ]]; then - return - fi - - local pve_version="not found" + local pve_version="" if command -v pveversion &>/dev/null; then - pve_version=$(pveversion | awk -F'[/ ]' '{print $2}') + pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true fi local JSON_PAYLOAD JSON_PAYLOAD=$( cat </dev/null) || true - - # Extract PocketBase record ID from response for later updates - local http_code body - http_code=$(echo "$RESPONSE" | tail -n1) - body=$(echo "$RESPONSE" | sed '$d') - - if [[ "$http_code" == "200" ]] || [[ "$http_code" == "201" ]]; then - PB_RECORD_ID=$(echo "$body" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4) || true - fi + -d "$JSON_PAYLOAD" &>/dev/null || true } # ------------------------------------------------------------------------------ # post_to_api_vm() # -# - Sends VM creation statistics to PocketBase -# - Similar to post_to_api() but for virtual machines (not containers) +# - Sends VM creation statistics to telemetry ingest service # - Reads DIAGNOSTICS from /usr/local/community-scripts/diagnostics file -# - Payload differences: +# - Payload differences from LXC: # * ct_type=2 (VM instead of LXC) # * type="vm" -# * Disk size without 'G' suffix (parsed from DISK_SIZE variable) +# * Disk size without 'G' suffix # - Only executes if DIAGNOSTICS=yes and RANDOM_UUID is set +# - Never blocks or fails script execution # ------------------------------------------------------------------------------ post_to_api_vm() { - if [[ ! -f /usr/local/community-scripts/diagnostics ]]; then - return - fi - DIAGNOSTICS=$(grep -i "^DIAGNOSTICS=" /usr/local/community-scripts/diagnostics | awk -F'=' '{print $2}') - - if ! command -v curl &>/dev/null; then - return + # Read diagnostics setting from file + if [[ -f /usr/local/community-scripts/diagnostics ]]; then + DIAGNOSTICS=$(grep -i "^DIAGNOSTICS=" /usr/local/community-scripts/diagnostics 2>/dev/null | awk -F'=' '{print $2}') || true fi - if [[ "${DIAGNOSTICS:-no}" == "no" ]]; then - return - fi + # Silent fail - telemetry should never break scripts + command -v curl &>/dev/null || return 0 + [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 + [[ -z "${RANDOM_UUID:-}" ]] && return 0 - if [[ -z "${RANDOM_UUID:-}" ]]; then - return - fi + # Set type for later status updates + TELEMETRY_TYPE="vm" - local pve_version="not found" + local pve_version="" if command -v pveversion &>/dev/null; then - pve_version=$(pveversion | awk -F'[/ ]' '{print $2}') + pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true fi + # Remove 'G' suffix from disk size local DISK_SIZE_API="${DISK_SIZE%G}" local JSON_PAYLOAD JSON_PAYLOAD=$( cat </dev/null) || true - - # Extract PocketBase record ID from response for later updates - local http_code body - http_code=$(echo "$RESPONSE" | tail -n1) - body=$(echo "$RESPONSE" | sed '$d') - - if [[ "$http_code" == "200" ]] || [[ "$http_code" == "201" ]]; then - PB_RECORD_ID=$(echo "$body" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4) || true - fi + -d "$JSON_PAYLOAD" &>/dev/null || true } # ------------------------------------------------------------------------------ # post_update_to_api() # -# - Reports installation completion status to PocketBase via PATCH +# - Reports installation completion status to telemetry ingest service # - Prevents duplicate submissions via POST_UPDATE_DONE flag # - Arguments: # * $1: status ("done" or "failed") # * $2: exit_code (numeric, default: 1 for failed, 0 for done) -# - Uses PB_RECORD_ID if available, otherwise looks up by random_id # - Payload includes: # * Final status (mapped: "done"β†’"sucess", "failed"β†’"failed") # * Error description via explain_exit_code() # * Numeric exit code # - Only executes once per session -# - Silently returns if: -# * curl not available -# * Already reported (POST_UPDATE_DONE=true) -# * DIAGNOSTICS=no +# - Never blocks or fails script execution # ------------------------------------------------------------------------------ post_update_to_api() { - if ! command -v curl &>/dev/null; then - return - fi + # Silent fail - telemetry should never break scripts + command -v curl &>/dev/null || return 0 - # Initialize flag if not set (prevents 'unbound variable' error with set -u) + # Prevent duplicate submissions POST_UPDATE_DONE=${POST_UPDATE_DONE:-false} + [[ "$POST_UPDATE_DONE" == "true" ]] && return 0 - if [[ "$POST_UPDATE_DONE" == "true" ]]; then - return 0 - fi - - if [[ "${DIAGNOSTICS:-no}" == "no" ]]; then - return - fi - - if [[ -z "${RANDOM_UUID:-}" ]]; then - return - fi + [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 + [[ -z "${RANDOM_UUID:-}" ]] && return 0 local status="${1:-failed}" local raw_exit_code="${2:-1}" - local exit_code error pb_status + local exit_code=0 error="" pb_status - # Map status to PocketBase select values: installing, sucess, failed, unknown + # Map status to telemetry values: installing, sucess, failed, unknown case "$status" in done | success | sucess) pb_status="sucess" exit_code=0 error="" ;; - failed) pb_status="failed" ;; - *) pb_status="unknown" ;; + failed) + pb_status="failed" + ;; + *) + pb_status="unknown" + ;; esac - # For failed status, resolve exit code and error description + # For failed/unknown status, resolve exit code and error description if [[ "$pb_status" == "failed" ]] || [[ "$pb_status" == "unknown" ]]; then - # If exit_code is numeric, use it; otherwise default to 1 if [[ "$raw_exit_code" =~ ^[0-9]+$ ]]; then exit_code="$raw_exit_code" else exit_code=1 fi error=$(explain_exit_code "$exit_code") - if [[ -z "$error" ]]; then - error="Unknown error" - fi - fi - - # Resolve PocketBase record ID if not already known - local record_id="${PB_RECORD_ID:-}" - - if [[ -z "$record_id" ]]; then - # Look up record by random_id filter - local lookup_url="${PB_API_URL}?filter=(random_id='${RANDOM_UUID}')&fields=id&perPage=1" - local lookup_response - lookup_response=$(curl -s -L "${lookup_url}" 2>/dev/null) || true - - record_id=$(echo "$lookup_response" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4) || true - - if [[ -z "$record_id" ]]; then - POST_UPDATE_DONE=true - return - fi + [[ -z "$error" ]] && error="Unknown error" fi local JSON_PAYLOAD JSON_PAYLOAD=$( cat </dev/null || true diff --git a/misc/data/service.go b/misc/data/service.go index 99f322121..08a1cfad9 100644 --- a/misc/data/service.go +++ b/misc/data/service.go @@ -39,36 +39,47 @@ type Config struct { EnableReqLogging bool // default false (GDPR-friendly) } +// TelemetryIn matches payload from api.func (bash client) type TelemetryIn struct { - Script string `json:"script"` - Version string `json:"version"` - Event string `json:"event"` - OsType string `json:"os_type"` - OsVersion string `json:"os_version,omitempty"` + // Required + RandomID string `json:"random_id"` // Session UUID + Type string `json:"type"` // "lxc" or "vm" + NSAPP string `json:"nsapp"` // Application name (e.g., "jellyfin") + Status string `json:"status"` // "installing", "sucess", "failed", "unknown" + + // Container/VM specs + CTType int `json:"ct_type,omitempty"` // 1=unprivileged, 2=privileged/VM + DiskSize int `json:"disk_size,omitempty"` // GB + CoreCount int `json:"core_count,omitempty"` // CPU cores + RAMSize int `json:"ram_size,omitempty"` // MB + + // System info + OsType string `json:"os_type,omitempty"` // "debian", "ubuntu", "alpine", etc. + OsVersion string `json:"os_version,omitempty"` // "12", "24.04", etc. PveVer string `json:"pve_version,omitempty"` - Arch string `json:"arch"` - Method string `json:"method,omitempty"` - Status string `json:"status,omitempty"` - ExitCode int `json:"exit_code,omitempty"` - Error string `json:"error,omitempty"` // must be sanitized/short + + // Optional + Method string `json:"method,omitempty"` // "default", "advanced" + Error string `json:"error,omitempty"` // Error description (max 120 chars) + ExitCode int `json:"exit_code,omitempty"` // 0-255 } +// TelemetryOut is sent to PocketBase (matches _dev_telemetry_data collection) type TelemetryOut struct { - Script string `json:"script"` - Version string `json:"version"` - Event string `json:"event"` - OsType string `json:"os_type"` + RandomID string `json:"random_id"` + Type string `json:"type"` + NSAPP string `json:"nsapp"` + Status string `json:"status"` + CTType int `json:"ct_type,omitempty"` + DiskSize int `json:"disk_size,omitempty"` + CoreCount int `json:"core_count,omitempty"` + RAMSize int `json:"ram_size,omitempty"` + OsType string `json:"os_type,omitempty"` OsVersion string `json:"os_version,omitempty"` PveVer string `json:"pve_version,omitempty"` - Arch string `json:"arch"` Method string `json:"method,omitempty"` - Status string `json:"status,omitempty"` - ExitCode int `json:"exit_code,omitempty"` Error string `json:"error,omitempty"` - - TS int64 `json:"ts"` - IngestDay string `json:"ingest_day"` - Hash string `json:"hash"` + ExitCode int `json:"exit_code,omitempty"` } type PBClient struct { @@ -297,9 +308,21 @@ func getClientIP(r *http.Request, pt *ProxyTrust) net.IP { // -------- Validation (strict allowlist) -------- var ( - allowedEvents = map[string]bool{"install": true, "update": true, "error": true} - allowedOsType = map[string]bool{"pve": true, "lxc": true, "vm": true, "debian": true, "ubuntu": true, "alpine": true} - allowedArch = map[string]bool{"amd64": true, "arm64": true} + // Allowed values for 'type' field + allowedType = map[string]bool{"lxc": true, "vm": true} + + // Allowed values for 'status' field (note: "sucess" is intentional, matches PB schema) + allowedStatus = map[string]bool{"installing": true, "sucess": true, "failed": true, "unknown": true} + + // Allowed values for 'os_type' field + allowedOsType = map[string]bool{ + "debian": true, "ubuntu": true, "alpine": true, "devuan": true, + "fedora": true, "rocky": true, "alma": true, "centos": true, + "opensuse": true, "gentoo": true, "openeuler": true, + } + + // Allowed values for 'method' field + allowedMethod = map[string]bool{"default": true, "advanced": true, "": true} ) func sanitizeShort(s string, max int) string { @@ -317,42 +340,66 @@ func sanitizeShort(s string, max int) string { } func validate(in *TelemetryIn) error { - in.Script = sanitizeShort(in.Script, 64) - in.Version = sanitizeShort(in.Version, 32) - in.Event = sanitizeShort(in.Event, 16) - in.OsType = sanitizeShort(in.OsType, 16) - in.Arch = sanitizeShort(in.Arch, 16) - in.Method = sanitizeShort(in.Method, 32) + // Sanitize all string fields + in.RandomID = sanitizeShort(in.RandomID, 64) + in.Type = sanitizeShort(in.Type, 8) + in.NSAPP = sanitizeShort(in.NSAPP, 64) in.Status = sanitizeShort(in.Status, 16) + in.OsType = sanitizeShort(in.OsType, 32) in.OsVersion = sanitizeShort(in.OsVersion, 32) in.PveVer = sanitizeShort(in.PveVer, 32) + in.Method = sanitizeShort(in.Method, 32) // IMPORTANT: "error" must be short and not contain identifiers/logs in.Error = sanitizeShort(in.Error, 120) - if in.Script == "" || in.Version == "" || in.Event == "" || in.OsType == "" || in.Arch == "" { - return errors.New("missing required fields") + // Required fields + if in.RandomID == "" || in.Type == "" || in.NSAPP == "" || in.Status == "" { + return errors.New("missing required fields: random_id, type, nsapp, status") } - if !allowedEvents[in.Event] { - return errors.New("invalid event") + + // Validate enums + if !allowedType[in.Type] { + return errors.New("invalid type (must be 'lxc' or 'vm')") } - if !allowedOsType[in.OsType] { + if !allowedStatus[in.Status] { + return errors.New("invalid status") + } + + // os_type is optional but if provided must be valid + if in.OsType != "" && !allowedOsType[in.OsType] { return errors.New("invalid os_type") } - if !allowedArch[in.Arch] { - return errors.New("invalid arch") + + // method is optional but if provided must be valid + if !allowedMethod[in.Method] { + return errors.New("invalid method") + } + + // Validate numeric ranges + if in.CTType < 0 || in.CTType > 2 { + return errors.New("invalid ct_type (must be 0, 1, or 2)") + } + if in.DiskSize < 0 || in.DiskSize > 100000 { + return errors.New("invalid disk_size") + } + if in.CoreCount < 0 || in.CoreCount > 256 { + return errors.New("invalid core_count") + } + if in.RAMSize < 0 || in.RAMSize > 1048576 { + return errors.New("invalid ram_size") } - // exit_code only relevant for error, but allow 0..255 if in.ExitCode < 0 || in.ExitCode > 255 { return errors.New("invalid exit_code") } + return nil } +// computeHash generates a hash for deduplication (GDPR-safe, no IP) func computeHash(out TelemetryOut) string { - // hash over non-identifying fields (no IP) to enable dedupe if needed - key := fmt.Sprintf("%s|%s|%s|%s|%s|%s|%d", - out.Script, out.Version, out.Event, out.OsType, out.Arch, out.IngestDay, out.ExitCode, + key := fmt.Sprintf("%s|%s|%s|%s|%d", + out.RandomID, out.NSAPP, out.Type, out.Status, out.ExitCode, ) sum := sha256.Sum256([]byte(key)) return hex.EncodeToString(sum[:]) @@ -447,24 +494,24 @@ func main() { return } - now := time.Now().UTC() + // Map input to PocketBase schema out := TelemetryOut{ - Script: in.Script, - Version: in.Version, - Event: in.Event, + RandomID: in.RandomID, + Type: in.Type, + NSAPP: in.NSAPP, + Status: in.Status, + CTType: in.CTType, + DiskSize: in.DiskSize, + CoreCount: in.CoreCount, + RAMSize: in.RAMSize, OsType: in.OsType, OsVersion: in.OsVersion, PveVer: in.PveVer, - Arch: in.Arch, Method: in.Method, - Status: in.Status, - ExitCode: in.ExitCode, Error: in.Error, - - TS: now.Unix(), - IngestDay: now.Format("2006-01-02"), + ExitCode: in.ExitCode, } - out.Hash = computeHash(out) + _ = computeHash(out) // For future deduplication ctx, cancel := context.WithTimeout(r.Context(), cfg.RequestTimeout) defer cancel() @@ -477,7 +524,7 @@ func main() { } if cfg.EnableReqLogging { - log.Printf("telemetry accepted script=%s event=%s", out.Script, out.Event) + log.Printf("telemetry accepted nsapp=%s status=%s", out.NSAPP, out.Status) } w.WriteHeader(http.StatusAccepted) From fafdf88e6a23b2e2514cbaebbe88254be1634cb2 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 16:12:35 +0100 Subject: [PATCH 35/87] Update build.func --- misc/build.func | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/misc/build.func b/misc/build.func index ff6908d3a..a9b1dd491 100644 --- a/misc/build.func +++ b/misc/build.func @@ -1882,7 +1882,7 @@ advanced_settings() { fi ;; - # ═══════════════════════════════════════════════════════════════════════════ + # ══════════════��════════════════════════════════════════════════════════════ # STEP 3: Container ID # ═══════════════════════════════════════════════════════════════════════════ 3) @@ -2810,16 +2810,16 @@ EOF dev_mode_menu() { local motd=OFF keep=OFF trace=OFF pause=OFF breakpoint=OFF logs=OFF dryrun=OFF verbose=OFF - IFS=',' read -r -a _modes <<< "$dev_mode" + IFS=',' read -r -a _modes <<<"$dev_mode" for m in "${_modes[@]}"; do case "$m" in - motd) motd=ON ;; - keep) keep=ON ;; - trace) trace=ON ;; - pause) pause=ON ;; - breakpoint) breakpoint=ON ;; - logs) logs=ON ;; - dryrun) dryrun=ON ;; + motd) motd=ON ;; + keep) keep=ON ;; + trace) trace=ON ;; + pause) pause=ON ;; + breakpoint) breakpoint=ON ;; + logs) logs=ON ;; + dryrun) dryrun=ON ;; esac done @@ -2853,7 +2853,10 @@ dev_mode_menu() { fi done - dev_mode=$(IFS=,; echo "${modes_out[*]}") + dev_mode=$( + IFS=, + echo "${modes_out[*]}" + ) unset DEV_MODE_MOTD DEV_MODE_KEEP DEV_MODE_TRACE DEV_MODE_PAUSE DEV_MODE_BREAKPOINT DEV_MODE_LOGS DEV_MODE_DRYRUN parse_dev_mode if [[ "${DEV_MODE_LOGS:-false}" == "true" ]]; then From 4a8a4180d9dfd225a859b0163aa8ef55c81c73dc Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 16:20:43 +0100 Subject: [PATCH 36/87] use https --- misc/api.func | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/api.func b/misc/api.func index 6e6224427..3893efa3c 100644 --- a/misc/api.func +++ b/misc/api.func @@ -32,7 +32,7 @@ # ============================================================================== # Telemetry Configuration # ============================================================================== -TELEMETRY_URL="http://telemetry.community-scripts.org/telemetry" +TELEMETRY_URL="https://telemetry.community-scripts.org/telemetry" # Timeout for telemetry requests (seconds) TELEMETRY_TIMEOUT=5 From bcc6bb9f5fd8c151251ff6d722f92dad5cd5327e Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 16:46:25 +0100 Subject: [PATCH 37/87] Update build.func --- misc/build.func | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/misc/build.func b/misc/build.func index a9b1dd491..44958bfe4 100644 --- a/misc/build.func +++ b/misc/build.func @@ -3606,6 +3606,9 @@ $PCT_OPTIONS_STRING" exit 214 fi msg_ok "Storage space validated" + + # Report installation start to API (early - captures failed installs too) + post_to_api fi create_lxc_container || exit $? @@ -5088,9 +5091,6 @@ create_lxc_container() { } msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created." - - # Report container creation to API - post_to_api } # ============================================================================== From 7c3688cd0af8ee989bb26a8ee748ab834a3f867f Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 16:48:47 +0100 Subject: [PATCH 38/87] debug --- misc/api.func | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/misc/api.func b/misc/api.func index 3893efa3c..a0dd71e16 100644 --- a/misc/api.func +++ b/misc/api.func @@ -188,10 +188,15 @@ explain_exit_code() { # - Never blocks or fails script execution # ------------------------------------------------------------------------------ post_to_api() { + # DEBUG: Show function entry + echo "[DEBUG] post_to_api() called" >&2 + # Silent fail - telemetry should never break scripts - command -v curl &>/dev/null || return 0 - [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 - [[ -z "${RANDOM_UUID:-}" ]] && return 0 + command -v curl &>/dev/null || { echo "[DEBUG] curl not found, skipping" >&2; return 0; } + [[ "${DIAGNOSTICS:-no}" == "no" ]] && { echo "[DEBUG] DIAGNOSTICS=no, skipping" >&2; return 0; } + [[ -z "${RANDOM_UUID:-}" ]] && { echo "[DEBUG] RANDOM_UUID empty, skipping" >&2; return 0; } + + echo "[DEBUG] Checks passed: DIAGNOSTICS=$DIAGNOSTICS RANDOM_UUID=$RANDOM_UUID NSAPP=$NSAPP" >&2 # Set type for later status updates TELEMETRY_TYPE="lxc" @@ -221,10 +226,16 @@ post_to_api() { EOF ) + echo "[DEBUG] Sending to: $TELEMETRY_URL" >&2 + echo "[DEBUG] Payload: $JSON_PAYLOAD" >&2 + # Fire-and-forget: never block, never fail - curl -fsS -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ + local http_code + http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ -H "Content-Type: application/json" \ - -d "$JSON_PAYLOAD" &>/dev/null || true + -d "$JSON_PAYLOAD" -o /dev/stderr 2>&1) || true + + echo "[DEBUG] HTTP response code: $http_code" >&2 } # ------------------------------------------------------------------------------ From ce375b02aaed5a6062e8725fc5b848e3677de8e1 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 16:51:51 +0100 Subject: [PATCH 39/87] Refactor telemetry checks, relax method validation Reformat telemetry pre-checks in misc/api.func for clearer multi-line condition handling and remove stray whitespace around the curl response logging. Tweak misc/build.func comment line (minor encoding/visual cleanup). In misc/data/service.go remove the strict allowedMethod map and its validation so the 'method' field is treated as optional/flexible; keep sanitization and numeric validations unchanged. These changes improve readability and allow custom method values without breaking behavior. --- misc/api.func | 17 +++++++++++++---- misc/build.func | 2 +- misc/data/service.go | 9 ++------- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/misc/api.func b/misc/api.func index a0dd71e16..fe01924c8 100644 --- a/misc/api.func +++ b/misc/api.func @@ -192,9 +192,18 @@ post_to_api() { echo "[DEBUG] post_to_api() called" >&2 # Silent fail - telemetry should never break scripts - command -v curl &>/dev/null || { echo "[DEBUG] curl not found, skipping" >&2; return 0; } - [[ "${DIAGNOSTICS:-no}" == "no" ]] && { echo "[DEBUG] DIAGNOSTICS=no, skipping" >&2; return 0; } - [[ -z "${RANDOM_UUID:-}" ]] && { echo "[DEBUG] RANDOM_UUID empty, skipping" >&2; return 0; } + command -v curl &>/dev/null || { + echo "[DEBUG] curl not found, skipping" >&2 + return 0 + } + [[ "${DIAGNOSTICS:-no}" == "no" ]] && { + echo "[DEBUG] DIAGNOSTICS=no, skipping" >&2 + return 0 + } + [[ -z "${RANDOM_UUID:-}" ]] && { + echo "[DEBUG] RANDOM_UUID empty, skipping" >&2 + return 0 + } echo "[DEBUG] Checks passed: DIAGNOSTICS=$DIAGNOSTICS RANDOM_UUID=$RANDOM_UUID NSAPP=$NSAPP" >&2 @@ -234,7 +243,7 @@ EOF http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ -H "Content-Type: application/json" \ -d "$JSON_PAYLOAD" -o /dev/stderr 2>&1) || true - + echo "[DEBUG] HTTP response code: $http_code" >&2 } diff --git a/misc/build.func b/misc/build.func index 44958bfe4..c719e1fdb 100644 --- a/misc/build.func +++ b/misc/build.func @@ -1882,7 +1882,7 @@ advanced_settings() { fi ;; - # ══════════════��════════════════════════════════════════════════════════════ + # ══════════════���════════════════════════════════════════════════════════════ # STEP 3: Container ID # ═══════════════════════════════════════════════════════════════════════════ 3) diff --git a/misc/data/service.go b/misc/data/service.go index 08a1cfad9..94f4de9a5 100644 --- a/misc/data/service.go +++ b/misc/data/service.go @@ -320,9 +320,6 @@ var ( "fedora": true, "rocky": true, "alma": true, "centos": true, "opensuse": true, "gentoo": true, "openeuler": true, } - - // Allowed values for 'method' field - allowedMethod = map[string]bool{"default": true, "advanced": true, "": true} ) func sanitizeShort(s string, max int) string { @@ -371,10 +368,8 @@ func validate(in *TelemetryIn) error { return errors.New("invalid os_type") } - // method is optional but if provided must be valid - if !allowedMethod[in.Method] { - return errors.New("invalid method") - } + // method is optional and flexible - just sanitized, no strict validation + // Values like "default", "advanced", "mydefaults-global", "mydefaults-app" are all valid // Validate numeric ranges if in.CTType < 0 || in.CTType > 2 { From 5aa85ace6aa9560d2a6d847b940443e78b559e1b Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 16:53:25 +0100 Subject: [PATCH 40/87] Add telemetry debug/logging and failure report Enhance post_update_to_api robustness and observability: add debug traces for entry, missing curl, duplicate submissions, DIAGNOSTICS/RANDOM_UUID checks, payload/URL output, and HTTP response code capture; make curl non-blocking and tolerant of failures. Also invoke post_update_to_api on installation failure so build/install errors are reported to telemetry. Includes a small comment glyph fix in build.func. Changes keep telemetry as a silent, best-effort path that won't break script execution. --- misc/api.func | 33 +++++++++++++++++++++++++++------ misc/build.func | 5 ++++- 2 files changed, 31 insertions(+), 7 deletions(-) diff --git a/misc/api.func b/misc/api.func index fe01924c8..55a4d6889 100644 --- a/misc/api.func +++ b/misc/api.func @@ -323,15 +323,30 @@ EOF # - Never blocks or fails script execution # ------------------------------------------------------------------------------ post_update_to_api() { + # DEBUG: Show function entry + echo "[DEBUG] post_update_to_api() called with status=$1 exit_code=$2" >&2 + # Silent fail - telemetry should never break scripts - command -v curl &>/dev/null || return 0 + command -v curl &>/dev/null || { + echo "[DEBUG] curl not found, skipping" >&2 + return 0 + } # Prevent duplicate submissions POST_UPDATE_DONE=${POST_UPDATE_DONE:-false} - [[ "$POST_UPDATE_DONE" == "true" ]] && return 0 + [[ "$POST_UPDATE_DONE" == "true" ]] && { + echo "[DEBUG] Already sent update, skipping" >&2 + return 0 + } - [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 - [[ -z "${RANDOM_UUID:-}" ]] && return 0 + [[ "${DIAGNOSTICS:-no}" == "no" ]] && { + echo "[DEBUG] DIAGNOSTICS=no, skipping" >&2 + return 0 + } + [[ -z "${RANDOM_UUID:-}" ]] && { + echo "[DEBUG] RANDOM_UUID empty, skipping" >&2 + return 0 + } local status="${1:-failed}" local raw_exit_code="${2:-1}" @@ -377,10 +392,16 @@ post_update_to_api() { EOF ) + echo "[DEBUG] Sending update to: $TELEMETRY_URL" >&2 + echo "[DEBUG] Update payload: $JSON_PAYLOAD" >&2 + # Fire-and-forget: never block, never fail - curl -fsS -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ + local http_code + http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ -H "Content-Type: application/json" \ - -d "$JSON_PAYLOAD" &>/dev/null || true + -d "$JSON_PAYLOAD" -o /dev/stderr 2>&1) || true + + echo "[DEBUG] HTTP response code: $http_code" >&2 POST_UPDATE_DONE=true } diff --git a/misc/build.func b/misc/build.func index c719e1fdb..6649b7e3d 100644 --- a/misc/build.func +++ b/misc/build.func @@ -1882,7 +1882,7 @@ advanced_settings() { fi ;; - # ══════════════���════════════════════════════════════════════════════════════ + # ══════════════����════════════════════════════════════════════════════════════ # STEP 3: Container ID # ═══════════════════════════════════════════════════════════════════════════ 3) @@ -4014,6 +4014,9 @@ EOF' if [[ $install_exit_code -ne 0 ]]; then msg_error "Installation failed in container ${CTID} (exit code: ${install_exit_code})" + # Report failure to telemetry API + post_update_to_api "failed" "$install_exit_code" + # Copy both logs from container before potential deletion local build_log_copied=false local install_log_copied=false From 878672a8df9ec2b9946ea2a44011ed8cbe5000b8 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 17:04:15 +0100 Subject: [PATCH 41/87] Update api.func --- misc/api.func | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/misc/api.func b/misc/api.func index 55a4d6889..516400904 100644 --- a/misc/api.func +++ b/misc/api.func @@ -378,6 +378,12 @@ post_update_to_api() { [[ -z "$error" ]] && error="Unknown error" fi + # Get PVE version for complete record + local pve_version="" + if command -v pveversion &>/dev/null; then + pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true + fi + local JSON_PAYLOAD JSON_PAYLOAD=$( cat < Date: Mon, 9 Feb 2026 17:07:30 +0100 Subject: [PATCH 42/87] Support PATCH updates for telemetry status Send only changing fields for status updates and add server-side update flow. - Trimmed telemetry JSON payload in misc/api.func to include only status, error, and exit_code (removed static fields and pve_version) so updates are minimal. - Added TelemetryStatusUpdate type and new PBClient methods: FindRecordByRandomID, UpdateTelemetryStatus, and UpsertTelemetry in misc/data/service.go. UpsertTelemetry creates a record for status="installing", otherwise finds the record by random_id and PATCHes only status/error/exit_code (fallbacks to create if not found). - Relaxed validation logic in validate(): detect updates (status != "installing") and skip certain strict numeric checks for update requests while keeping required fields and other validations. - Main handler now calls UpsertTelemetry instead of CreateTelemetry and logs generic errors. These changes allow idempotent, minimal updates to existing telemetry records and avoid repeatedly sending/storing unchanged metadata. --- misc/api.func | 16 +----- misc/data/service.go | 127 +++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 124 insertions(+), 19 deletions(-) diff --git a/misc/api.func b/misc/api.func index 516400904..73d809346 100644 --- a/misc/api.func +++ b/misc/api.func @@ -378,12 +378,8 @@ post_update_to_api() { [[ -z "$error" ]] && error="Unknown error" fi - # Get PVE version for complete record - local pve_version="" - if command -v pveversion &>/dev/null; then - pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true - fi - + # Update payload: only fields that change (status, error, exit_code) + # The Go service will find the record by random_id and PATCH only these fields local JSON_PAYLOAD JSON_PAYLOAD=$( cat <= 300 { + return "", fmt.Errorf("pocketbase search failed: %s", resp.Status) + } + + var result struct { + Items []struct { + ID string `json:"id"` + } `json:"items"` + } + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return "", err + } + + if len(result.Items) == 0 { + return "", nil // Not found + } + return result.Items[0].ID, nil +} + +// UpdateTelemetryStatus updates only status, error, and exit_code of an existing record +func (p *PBClient) UpdateTelemetryStatus(ctx context.Context, recordID string, update TelemetryStatusUpdate) error { + if err := p.ensureAuth(ctx); err != nil { + return err + } + + b, _ := json.Marshal(update) + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, + fmt.Sprintf("%s/api/collections/%s/records/%s", p.baseURL, p.targetColl, recordID), + bytes.NewReader(b), + ) + if err != nil { + return err + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+p.token) + + resp, err := p.http.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + rb, _ := io.ReadAll(io.LimitReader(resp.Body, 8<<10)) + return fmt.Errorf("pocketbase update failed: %s: %s", resp.Status, strings.TrimSpace(string(rb))) + } + return nil +} + +// UpsertTelemetry handles both creation and updates intelligently +// - status="installing": Always creates a new record +// - status!="installing": Updates existing record (found by random_id) with status/error/exit_code only +func (p *PBClient) UpsertTelemetry(ctx context.Context, payload TelemetryOut) error { + // For "installing" status, always create new record + if payload.Status == "installing" { + return p.CreateTelemetry(ctx, payload) + } + + // For status updates (sucess/failed/unknown), find and update existing record + recordID, err := p.FindRecordByRandomID(ctx, payload.RandomID) + if err != nil { + // Search failed, log and return error + return fmt.Errorf("cannot find record to update: %w", err) + } + + if recordID == "" { + // Record not found - this shouldn't happen normally + // Create a full record as fallback + return p.CreateTelemetry(ctx, payload) + } + + // Update only status, error, and exit_code + update := TelemetryStatusUpdate{ + Status: payload.Status, + Error: payload.Error, + ExitCode: payload.ExitCode, + } + return p.UpdateTelemetryStatus(ctx, recordID, update) +} + func (p *PBClient) CreateTelemetry(ctx context.Context, payload TelemetryOut) error { if err := p.ensureAuth(ctx); err != nil { return err @@ -350,7 +460,7 @@ func validate(in *TelemetryIn) error { // IMPORTANT: "error" must be short and not contain identifiers/logs in.Error = sanitizeShort(in.Error, 120) - // Required fields + // Required fields for all requests if in.RandomID == "" || in.Type == "" || in.NSAPP == "" || in.Status == "" { return errors.New("missing required fields: random_id, type, nsapp, status") } @@ -363,6 +473,10 @@ func validate(in *TelemetryIn) error { return errors.New("invalid status") } + // For status updates (not installing), skip numeric field validation + // These are only required for initial creation + isUpdate := in.Status != "installing" + // os_type is optional but if provided must be valid if in.OsType != "" && !allowedOsType[in.OsType] { return errors.New("invalid os_type") @@ -371,9 +485,11 @@ func validate(in *TelemetryIn) error { // method is optional and flexible - just sanitized, no strict validation // Values like "default", "advanced", "mydefaults-global", "mydefaults-app" are all valid - // Validate numeric ranges - if in.CTType < 0 || in.CTType > 2 { - return errors.New("invalid ct_type (must be 0, 1, or 2)") + // Validate numeric ranges (only strict for new records) + if !isUpdate { + if in.CTType < 0 || in.CTType > 2 { + return errors.New("invalid ct_type (must be 0, 1, or 2)") + } } if in.DiskSize < 0 || in.DiskSize > 100000 { return errors.New("invalid disk_size") @@ -511,7 +627,8 @@ func main() { ctx, cancel := context.WithTimeout(r.Context(), cfg.RequestTimeout) defer cancel() - if err := pb.CreateTelemetry(ctx, out); err != nil { + // Upsert: Creates new record if random_id doesn't exist, updates if it does + if err := pb.UpsertTelemetry(ctx, out); err != nil { // GDPR: don't log raw payload, don't log IPs; log only generic error log.Printf("pocketbase write failed: %v", err) http.Error(w, "upstream error", http.StatusBadGateway) From 897707645f149505629b1351c4a9f3730e6754cb Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 17:13:08 +0100 Subject: [PATCH 43/87] add static go dashboard --- misc/data/dashboard.go | 851 +++++++++++++++++++++++++++++++++++++++++ misc/data/service.go | 33 ++ 2 files changed, 884 insertions(+) create mode 100644 misc/data/dashboard.go diff --git a/misc/data/dashboard.go b/misc/data/dashboard.go new file mode 100644 index 000000000..34edbbf40 --- /dev/null +++ b/misc/data/dashboard.go @@ -0,0 +1,851 @@ +package main + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "time" +) + +// DashboardData holds aggregated statistics for the dashboard +type DashboardData struct { + TotalInstalls int `json:"total_installs"` + SuccessCount int `json:"success_count"` + FailedCount int `json:"failed_count"` + InstallingCount int `json:"installing_count"` + SuccessRate float64 `json:"success_rate"` + TopApps []AppCount `json:"top_apps"` + OsDistribution []OsCount `json:"os_distribution"` + MethodStats []MethodCount `json:"method_stats"` + RecentRecords []TelemetryRecord `json:"recent_records"` + DailyStats []DailyStat `json:"daily_stats"` +} + +type AppCount struct { + App string `json:"app"` + Count int `json:"count"` +} + +type OsCount struct { + Os string `json:"os"` + Count int `json:"count"` +} + +type MethodCount struct { + Method string `json:"method"` + Count int `json:"count"` +} + +type DailyStat struct { + Date string `json:"date"` + Success int `json:"success"` + Failed int `json:"failed"` +} + +// FetchDashboardData retrieves aggregated data from PocketBase +func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*DashboardData, error) { + if err := p.ensureAuth(ctx); err != nil { + return nil, err + } + + data := &DashboardData{} + + // Calculate date filter + since := time.Now().AddDate(0, 0, -days).Format("2006-01-02 00:00:00") + filter := url.QueryEscape(fmt.Sprintf("created >= '%s'", since)) + + // Fetch all records for the period + records, err := p.fetchRecords(ctx, filter, 500) + if err != nil { + return nil, err + } + + // Aggregate statistics + appCounts := make(map[string]int) + osCounts := make(map[string]int) + methodCounts := make(map[string]int) + dailySuccess := make(map[string]int) + dailyFailed := make(map[string]int) + + for _, r := range records { + data.TotalInstalls++ + + switch r.Status { + case "sucess": + data.SuccessCount++ + case "failed": + data.FailedCount++ + case "installing": + data.InstallingCount++ + } + + // Count apps + if r.NSAPP != "" { + appCounts[r.NSAPP]++ + } + + // Count OS + if r.OsType != "" { + osCounts[r.OsType]++ + } + + // Count methods + if r.Method != "" { + methodCounts[r.Method]++ + } + + // Daily stats (use Created field if available) + if r.Created != "" { + date := r.Created[:10] // "2026-02-09" + if r.Status == "sucess" { + dailySuccess[date]++ + } else if r.Status == "failed" { + dailyFailed[date]++ + } + } + } + + // Calculate success rate + completed := data.SuccessCount + data.FailedCount + if completed > 0 { + data.SuccessRate = float64(data.SuccessCount) / float64(completed) * 100 + } + + // Convert maps to sorted slices (top 10) + data.TopApps = topN(appCounts, 10) + data.OsDistribution = topNOs(osCounts, 10) + data.MethodStats = topNMethod(methodCounts, 10) + + // Daily stats for chart + data.DailyStats = buildDailyStats(dailySuccess, dailyFailed, days) + + // Recent records (last 20) + if len(records) > 20 { + data.RecentRecords = records[:20] + } else { + data.RecentRecords = records + } + + return data, nil +} + +// TelemetryRecord includes Created timestamp +type TelemetryRecord struct { + TelemetryOut + Created string `json:"created"` +} + +func (p *PBClient) fetchRecords(ctx context.Context, filter string, limit int) ([]TelemetryRecord, error) { + var allRecords []TelemetryRecord + page := 1 + perPage := 100 + + for { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, + fmt.Sprintf("%s/api/collections/%s/records?filter=%s&sort=-created&page=%d&perPage=%d", + p.baseURL, p.targetColl, filter, page, perPage), + nil, + ) + if err != nil { + return nil, err + } + req.Header.Set("Authorization", "Bearer "+p.token) + + resp, err := p.http.Do(req) + if err != nil { + return nil, err + } + + var result struct { + Items []TelemetryRecord `json:"items"` + TotalItems int `json:"totalItems"` + } + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + resp.Body.Close() + return nil, err + } + resp.Body.Close() + + allRecords = append(allRecords, result.Items...) + + if len(allRecords) >= limit || len(allRecords) >= result.TotalItems { + break + } + page++ + } + + return allRecords, nil +} + +func topN(m map[string]int, n int) []AppCount { + result := make([]AppCount, 0, len(m)) + for k, v := range m { + result = append(result, AppCount{App: k, Count: v}) + } + // Simple bubble sort for small datasets + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + if len(result) > n { + return result[:n] + } + return result +} + +func topNOs(m map[string]int, n int) []OsCount { + result := make([]OsCount, 0, len(m)) + for k, v := range m { + result = append(result, OsCount{Os: k, Count: v}) + } + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + if len(result) > n { + return result[:n] + } + return result +} + +func topNMethod(m map[string]int, n int) []MethodCount { + result := make([]MethodCount, 0, len(m)) + for k, v := range m { + result = append(result, MethodCount{Method: k, Count: v}) + } + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + if len(result) > n { + return result[:n] + } + return result +} + +func buildDailyStats(success, failed map[string]int, days int) []DailyStat { + result := make([]DailyStat, 0, days) + for i := days - 1; i >= 0; i-- { + date := time.Now().AddDate(0, 0, -i).Format("2006-01-02") + result = append(result, DailyStat{ + Date: date, + Success: success[date], + Failed: failed[date], + }) + } + return result +} + +// DashboardHTML returns the embedded dashboard HTML +func DashboardHTML() string { + return ` + + + + + Telemetry Dashboard - Community Scripts + + + + +
+

+ + + + + Telemetry Dashboard +

+
+ + + +
+
+ + + +
+
+
Total Installations
+
-
+
+
+
Successful
+
-
+
+
+
Failed
+
-
+
+
+
In Progress
+
-
+
+
+
Success Rate
+
-
+
+
+ +
+
+

Installations Over Time

+
+ +
+
+
+

Status Distribution

+
+ +
+
+
+ +
+
+

Top Applications

+
+ +
+
+
+

OS Distribution

+
+ +
+
+
+

Installation Method

+
+ +
+
+
+ +
+

Recent Installations

+
+ + + +
+ + + + + + + + + + + + + + + +
AppStatusOSTypeMethodExit CodeError
Loading...
+
+ + + +` +} diff --git a/misc/data/service.go b/misc/data/service.go index 294dc0c5d..18741251d 100644 --- a/misc/data/service.go +++ b/misc/data/service.go @@ -557,6 +557,39 @@ func main() { _, _ = w.Write([]byte("ok")) }) + // Dashboard HTML page + mux.HandleFunc("/dashboard", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html; charset=utf-8") + _, _ = w.Write([]byte(DashboardHTML())) + }) + + // Dashboard API endpoint + mux.HandleFunc("/api/dashboard", func(w http.ResponseWriter, r *http.Request) { + days := 30 + if d := r.URL.Query().Get("days"); d != "" { + fmt.Sscanf(d, "%d", &days) + if days < 1 { + days = 1 + } + if days > 365 { + days = 365 + } + } + + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + data, err := pb.FetchDashboardData(ctx, days) + if err != nil { + log.Printf("dashboard fetch failed: %v", err) + http.Error(w, "failed to fetch data", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(data) + }) + mux.HandleFunc("/telemetry", func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { http.Error(w, "method not allowed", http.StatusMethodNotAllowed) From 389708a33c844a3624d9bf012039dccb45672af7 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Mon, 9 Feb 2026 17:25:07 +0100 Subject: [PATCH 44/87] Update linkding-install.sh --- install/linkding-install.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/install/linkding-install.sh b/install/linkding-install.sh index e2080a082..e893daeda 100644 --- a/install/linkding-install.sh +++ b/install/linkding-install.sh @@ -14,7 +14,7 @@ network_check update_os msg_info "Installing Dependencies" -$STD apt-get install -y \ +$STD apt install -y \ build-essential \ pkg-config \ python3-dev \ @@ -22,7 +22,6 @@ $STD apt-get install -y \ libpq-dev \ libicu-dev \ libsqlite3-dev \ - libsqlite3-mod-icu \ libffi-dev msg_ok "Installed Dependencies" From 0226a043b8fa2c4a4d259275f4e99cb1f7d5fa17 Mon Sep 17 00:00:00 2001 From: MickLesk Date: Mon, 9 Feb 2026 18:33:33 +0100 Subject: [PATCH 45/87] feat(telemetry): add caching, alerts, migration & dashboard improvements - Add Redis/in-memory caching layer (cache.go) - Add SMTP alerting for high failure rates (alerts.go) - Add data migration script from old API (migrate.go) - Add docker-compose.yml for easy deployment - Move dashboard to / with redirect from /dashboard - Add dark/light mode toggle - Add error analysis and failed apps statistics - Add PVE version and LXC/VM type stats - Add /metrics Prometheus endpoint - Add /api/records pagination endpoint - Add CSV export functionality - Enhanced healthcheck with PB connection status New ENV vars: - Cache: ENABLE_CACHE, CACHE_TTL_SECONDS, ENABLE_REDIS, REDIS_URL - Alerts: ALERT_ENABLED, SMTP_*, ALERT_FAILURE_THRESHOLD, etc. - Migration: RUN_MIGRATION, MIGRATION_REQUIRED, MIGRATION_SOURCE_URL --- misc/data/Dockerfile | 44 ++- misc/data/alerts.go | 267 ++++++++++++++ misc/data/cache.go | 158 ++++++++ misc/data/dashboard.go | 682 +++++++++++++++++++++++++++++++++-- misc/data/docker-compose.yml | 74 ++++ misc/data/entrypoint.sh | 50 +++ misc/data/go.mod | 7 + misc/data/go.sum | 10 + misc/data/migrate.go | 265 ++++++++++++++ misc/data/migrate.sh | 67 ++++ misc/data/service.go | 279 +++++++++++++- misc/data/telemetry-service | Bin 0 -> 10448005 bytes 12 files changed, 1874 insertions(+), 29 deletions(-) create mode 100644 misc/data/alerts.go create mode 100644 misc/data/cache.go create mode 100644 misc/data/docker-compose.yml create mode 100644 misc/data/entrypoint.sh create mode 100644 misc/data/go.sum create mode 100644 misc/data/migrate.go create mode 100755 misc/data/migrate.sh create mode 100755 misc/data/telemetry-service diff --git a/misc/data/Dockerfile b/misc/data/Dockerfile index 6fd4377bc..d9228dafe 100644 --- a/misc/data/Dockerfile +++ b/misc/data/Dockerfile @@ -1,10 +1,52 @@ FROM golang:1.25-alpine AS build WORKDIR /src +COPY go.mod go.sum* ./ +RUN go mod download 2>/dev/null || true COPY . . RUN go build -trimpath -ldflags "-s -w" -o /out/telemetry-ingest . +RUN go build -trimpath -ldflags "-s -w" -o /out/migrate migrate.go FROM alpine:3.23 +RUN apk add --no-cache ca-certificates tzdata WORKDIR /app COPY --from=build /out/telemetry-ingest /app/telemetry-ingest +COPY --from=build /out/migrate /app/migrate +COPY entrypoint.sh /app/entrypoint.sh +RUN chmod +x /app/entrypoint.sh /app/migrate + +# Service config +ENV LISTEN_ADDR=":8080" +ENV MAX_BODY_BYTES="1024" +ENV RATE_LIMIT_RPM="60" +ENV RATE_BURST="20" +ENV UPSTREAM_TIMEOUT_MS="4000" +ENV ENABLE_REQUEST_LOGGING="false" + +# Cache config (optional) +ENV ENABLE_CACHE="true" +ENV CACHE_TTL_SECONDS="60" +ENV ENABLE_REDIS="false" +# ENV REDIS_URL="redis://localhost:6379" + +# Alert config (optional) +ENV ALERT_ENABLED="false" +# ENV SMTP_HOST="" +# ENV SMTP_PORT="587" +# ENV SMTP_USER="" +# ENV SMTP_PASSWORD="" +# ENV SMTP_FROM="telemetry@proxmoxved.local" +# ENV SMTP_TO="" +# ENV SMTP_USE_TLS="false" +ENV ALERT_FAILURE_THRESHOLD="20.0" +ENV ALERT_CHECK_INTERVAL_MIN="15" +ENV ALERT_COOLDOWN_MIN="60" + +# Migration config (optional) +ENV RUN_MIGRATION="false" +ENV MIGRATION_REQUIRED="false" +ENV MIGRATION_SOURCE_URL="https://api.htl-braunau.at/dev/data" + EXPOSE 8080 -CMD ["/app/telemetry-ingest"] +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s \ + CMD wget -q --spider http://localhost:8080/healthz || exit 1 +ENTRYPOINT ["/app/entrypoint.sh"] diff --git a/misc/data/alerts.go b/misc/data/alerts.go new file mode 100644 index 000000000..dccbbd6b6 --- /dev/null +++ b/misc/data/alerts.go @@ -0,0 +1,267 @@ +package main + +import ( + "bytes" + "context" + "crypto/tls" + "fmt" + "log" + "net/smtp" + "strings" + "sync" + "time" +) + +// AlertConfig holds SMTP alert configuration +type AlertConfig struct { + Enabled bool + SMTPHost string + SMTPPort int + SMTPUser string + SMTPPassword string + SMTPFrom string + SMTPTo []string + UseTLS bool + FailureThreshold float64 // Alert when failure rate exceeds this (e.g., 20.0 = 20%) + CheckInterval time.Duration // How often to check + Cooldown time.Duration // Minimum time between alerts +} + +// Alerter handles alerting functionality +type Alerter struct { + cfg AlertConfig + lastAlertAt time.Time + mu sync.Mutex + pb *PBClient + lastStats alertStats + alertHistory []AlertEvent +} + +type alertStats struct { + successCount int + failedCount int + checkedAt time.Time +} + +// AlertEvent records an alert that was sent +type AlertEvent struct { + Timestamp time.Time `json:"timestamp"` + Type string `json:"type"` + Message string `json:"message"` + FailureRate float64 `json:"failure_rate,omitempty"` +} + +// NewAlerter creates a new alerter instance +func NewAlerter(cfg AlertConfig, pb *PBClient) *Alerter { + return &Alerter{ + cfg: cfg, + pb: pb, + alertHistory: make([]AlertEvent, 0), + } +} + +// Start begins the alert monitoring loop +func (a *Alerter) Start() { + if !a.cfg.Enabled { + log.Println("INFO: alerting disabled") + return + } + + if a.cfg.SMTPHost == "" || len(a.cfg.SMTPTo) == 0 { + log.Println("WARN: alerting enabled but SMTP not configured") + return + } + + go a.monitorLoop() + log.Printf("INFO: alert monitoring started (threshold: %.1f%%, interval: %v)", a.cfg.FailureThreshold, a.cfg.CheckInterval) +} + +func (a *Alerter) monitorLoop() { + ticker := time.NewTicker(a.cfg.CheckInterval) + defer ticker.Stop() + + for range ticker.C { + a.checkAndAlert() + } +} + +func (a *Alerter) checkAndAlert() { + ctx, cancel := newTimeoutContext(10 * time.Second) + defer cancel() + + // Fetch last hour's data + data, err := a.pb.FetchDashboardData(ctx, 1) + if err != nil { + log.Printf("WARN: alert check failed: %v", err) + return + } + + // Calculate current failure rate + total := data.SuccessCount + data.FailedCount + if total < 10 { + // Not enough data to determine rate + return + } + + failureRate := float64(data.FailedCount) / float64(total) * 100 + + // Check if we should alert + if failureRate >= a.cfg.FailureThreshold { + a.maybeSendAlert(failureRate, data.FailedCount, total) + } +} + +func (a *Alerter) maybeSendAlert(rate float64, failed, total int) { + a.mu.Lock() + defer a.mu.Unlock() + + // Check cooldown + if time.Since(a.lastAlertAt) < a.cfg.Cooldown { + return + } + + // Send alert + subject := fmt.Sprintf("[ProxmoxVED Alert] High Failure Rate: %.1f%%", rate) + body := fmt.Sprintf(`ProxmoxVE Helper Scripts - Telemetry Alert + +⚠️ High installation failure rate detected! + +Current Statistics (last 24h): +- Failure Rate: %.1f%% +- Failed Installations: %d +- Total Installations: %d +- Threshold: %.1f%% + +Time: %s + +Please check the dashboard for more details. + +--- +This is an automated alert from the telemetry service. +`, rate, failed, total, a.cfg.FailureThreshold, time.Now().Format(time.RFC1123)) + + if err := a.sendEmail(subject, body); err != nil { + log.Printf("ERROR: failed to send alert email: %v", err) + return + } + + a.lastAlertAt = time.Now() + a.alertHistory = append(a.alertHistory, AlertEvent{ + Timestamp: time.Now(), + Type: "high_failure_rate", + Message: fmt.Sprintf("Failure rate %.1f%% exceeded threshold %.1f%%", rate, a.cfg.FailureThreshold), + FailureRate: rate, + }) + + // Keep only last 100 alerts + if len(a.alertHistory) > 100 { + a.alertHistory = a.alertHistory[len(a.alertHistory)-100:] + } + + log.Printf("ALERT: sent high failure rate alert (%.1f%%)", rate) +} + +func (a *Alerter) sendEmail(subject, body string) error { + // Build message + var msg bytes.Buffer + msg.WriteString(fmt.Sprintf("From: %s\r\n", a.cfg.SMTPFrom)) + msg.WriteString(fmt.Sprintf("To: %s\r\n", strings.Join(a.cfg.SMTPTo, ", "))) + msg.WriteString(fmt.Sprintf("Subject: %s\r\n", subject)) + msg.WriteString("MIME-Version: 1.0\r\n") + msg.WriteString("Content-Type: text/plain; charset=UTF-8\r\n") + msg.WriteString("\r\n") + msg.WriteString(body) + + addr := fmt.Sprintf("%s:%d", a.cfg.SMTPHost, a.cfg.SMTPPort) + + var auth smtp.Auth + if a.cfg.SMTPUser != "" && a.cfg.SMTPPassword != "" { + auth = smtp.PlainAuth("", a.cfg.SMTPUser, a.cfg.SMTPPassword, a.cfg.SMTPHost) + } + + if a.cfg.UseTLS { + // TLS connection + tlsConfig := &tls.Config{ + ServerName: a.cfg.SMTPHost, + } + + conn, err := tls.Dial("tcp", addr, tlsConfig) + if err != nil { + return fmt.Errorf("TLS dial failed: %w", err) + } + defer conn.Close() + + client, err := smtp.NewClient(conn, a.cfg.SMTPHost) + if err != nil { + return fmt.Errorf("SMTP client failed: %w", err) + } + defer client.Close() + + if auth != nil { + if err := client.Auth(auth); err != nil { + return fmt.Errorf("SMTP auth failed: %w", err) + } + } + + if err := client.Mail(a.cfg.SMTPFrom); err != nil { + return fmt.Errorf("SMTP MAIL failed: %w", err) + } + + for _, to := range a.cfg.SMTPTo { + if err := client.Rcpt(to); err != nil { + return fmt.Errorf("SMTP RCPT failed: %w", err) + } + } + + w, err := client.Data() + if err != nil { + return fmt.Errorf("SMTP DATA failed: %w", err) + } + + _, err = w.Write(msg.Bytes()) + if err != nil { + return fmt.Errorf("SMTP write failed: %w", err) + } + + return w.Close() + } + + // Non-TLS (STARTTLS) + return smtp.SendMail(addr, auth, a.cfg.SMTPFrom, a.cfg.SMTPTo, msg.Bytes()) +} + +// GetAlertHistory returns recent alert events +func (a *Alerter) GetAlertHistory() []AlertEvent { + a.mu.Lock() + defer a.mu.Unlock() + result := make([]AlertEvent, len(a.alertHistory)) + copy(result, a.alertHistory) + return result +} + +// TestAlert sends a test alert email +func (a *Alerter) TestAlert() error { + if !a.cfg.Enabled || a.cfg.SMTPHost == "" { + return fmt.Errorf("alerting not configured") + } + + subject := "[ProxmoxVED] Test Alert" + body := fmt.Sprintf(`This is a test alert from ProxmoxVE Helper Scripts telemetry service. + +If you received this email, your alert configuration is working correctly. + +Time: %s +SMTP Host: %s +Recipients: %s + +--- +This is an automated test message. +`, time.Now().Format(time.RFC1123), a.cfg.SMTPHost, strings.Join(a.cfg.SMTPTo, ", ")) + + return a.sendEmail(subject, body) +} + +// Helper for timeout context +func newTimeoutContext(d time.Duration) (context.Context, context.CancelFunc) { + return context.WithTimeout(context.Background(), d) +} diff --git a/misc/data/cache.go b/misc/data/cache.go new file mode 100644 index 000000000..54cc5f5c5 --- /dev/null +++ b/misc/data/cache.go @@ -0,0 +1,158 @@ +package main + +import ( + "context" + "encoding/json" + "log" + "sync" + "time" + + "github.com/redis/go-redis/v9" +) + +// CacheConfig holds cache configuration +type CacheConfig struct { + RedisURL string + EnableRedis bool + DefaultTTL time.Duration +} + +// Cache provides caching functionality with Redis or in-memory fallback +type Cache struct { + redis *redis.Client + useRedis bool + defaultTTL time.Duration + + // In-memory fallback + mu sync.RWMutex + memData map[string]cacheEntry +} + +type cacheEntry struct { + data []byte + expiresAt time.Time +} + +// NewCache creates a new cache instance +func NewCache(cfg CacheConfig) *Cache { + c := &Cache{ + defaultTTL: cfg.DefaultTTL, + memData: make(map[string]cacheEntry), + } + + if cfg.EnableRedis && cfg.RedisURL != "" { + opts, err := redis.ParseURL(cfg.RedisURL) + if err != nil { + log.Printf("WARN: invalid redis URL, using in-memory cache: %v", err) + return c + } + + client := redis.NewClient(opts) + ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) + defer cancel() + + if err := client.Ping(ctx).Err(); err != nil { + log.Printf("WARN: redis connection failed, using in-memory cache: %v", err) + return c + } + + c.redis = client + c.useRedis = true + log.Printf("INFO: connected to Redis for caching") + } + + // Start cleanup goroutine for in-memory cache + if !c.useRedis { + go c.cleanupLoop() + } + + return c +} + +func (c *Cache) cleanupLoop() { + ticker := time.NewTicker(5 * time.Minute) + defer ticker.Stop() + + for range ticker.C { + c.mu.Lock() + now := time.Now() + for k, v := range c.memData { + if now.After(v.expiresAt) { + delete(c.memData, k) + } + } + c.mu.Unlock() + } +} + +// Get retrieves a value from cache +func (c *Cache) Get(ctx context.Context, key string, dest interface{}) bool { + if c.useRedis { + data, err := c.redis.Get(ctx, key).Bytes() + if err != nil { + return false + } + return json.Unmarshal(data, dest) == nil + } + + // In-memory fallback + c.mu.RLock() + entry, ok := c.memData[key] + c.mu.RUnlock() + + if !ok || time.Now().After(entry.expiresAt) { + return false + } + + return json.Unmarshal(entry.data, dest) == nil +} + +// Set stores a value in cache +func (c *Cache) Set(ctx context.Context, key string, value interface{}, ttl time.Duration) error { + if ttl == 0 { + ttl = c.defaultTTL + } + + data, err := json.Marshal(value) + if err != nil { + return err + } + + if c.useRedis { + return c.redis.Set(ctx, key, data, ttl).Err() + } + + // In-memory fallback + c.mu.Lock() + c.memData[key] = cacheEntry{ + data: data, + expiresAt: time.Now().Add(ttl), + } + c.mu.Unlock() + + return nil +} + +// Delete removes a key from cache +func (c *Cache) Delete(ctx context.Context, key string) error { + if c.useRedis { + return c.redis.Del(ctx, key).Err() + } + + c.mu.Lock() + delete(c.memData, key) + c.mu.Unlock() + return nil +} + +// InvalidateDashboard clears dashboard cache +func (c *Cache) InvalidateDashboard(ctx context.Context) { + // Delete all dashboard cache keys + for days := 1; days <= 365; days++ { + _ = c.Delete(ctx, dashboardCacheKey(days)) + } +} + +func dashboardCacheKey(days int) string { + return "dashboard:" + string(rune(days)) +} diff --git a/misc/data/dashboard.go b/misc/data/dashboard.go index 34edbbf40..fb6b28955 100644 --- a/misc/data/dashboard.go +++ b/misc/data/dashboard.go @@ -6,6 +6,7 @@ import ( "fmt" "net/http" "net/url" + "strings" "time" ) @@ -19,6 +20,10 @@ type DashboardData struct { TopApps []AppCount `json:"top_apps"` OsDistribution []OsCount `json:"os_distribution"` MethodStats []MethodCount `json:"method_stats"` + PveVersions []PveCount `json:"pve_versions"` + TypeStats []TypeCount `json:"type_stats"` + ErrorAnalysis []ErrorGroup `json:"error_analysis"` + FailedApps []AppFailure `json:"failed_apps"` RecentRecords []TelemetryRecord `json:"recent_records"` DailyStats []DailyStat `json:"daily_stats"` } @@ -38,6 +43,29 @@ type MethodCount struct { Count int `json:"count"` } +type PveCount struct { + Version string `json:"version"` + Count int `json:"count"` +} + +type TypeCount struct { + Type string `json:"type"` + Count int `json:"count"` +} + +type ErrorGroup struct { + Pattern string `json:"pattern"` + Count int `json:"count"` + Apps string `json:"apps"` // Comma-separated list of affected apps +} + +type AppFailure struct { + App string `json:"app"` + TotalCount int `json:"total_count"` + FailedCount int `json:"failed_count"` + FailureRate float64 `json:"failure_rate"` +} + type DailyStat struct { Date string `json:"date"` Success int `json:"success"` @@ -64,8 +92,12 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard // Aggregate statistics appCounts := make(map[string]int) + appFailures := make(map[string]int) osCounts := make(map[string]int) methodCounts := make(map[string]int) + pveCounts := make(map[string]int) + typeCounts := make(map[string]int) + errorPatterns := make(map[string]map[string]bool) // pattern -> set of apps dailySuccess := make(map[string]int) dailyFailed := make(map[string]int) @@ -77,6 +109,20 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard data.SuccessCount++ case "failed": data.FailedCount++ + // Track failed apps + if r.NSAPP != "" { + appFailures[r.NSAPP]++ + } + // Group errors by pattern + if r.Error != "" { + pattern := normalizeError(r.Error) + if errorPatterns[pattern] == nil { + errorPatterns[pattern] = make(map[string]bool) + } + if r.NSAPP != "" { + errorPatterns[pattern][r.NSAPP] = true + } + } case "installing": data.InstallingCount++ } @@ -96,6 +142,16 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard methodCounts[r.Method]++ } + // Count PVE versions + if r.PveVer != "" { + pveCounts[r.PveVer]++ + } + + // Count types (LXC vs VM) + if r.Type != "" { + typeCounts[r.Type]++ + } + // Daily stats (use Created field if available) if r.Created != "" { date := r.Created[:10] // "2026-02-09" @@ -117,6 +173,14 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard data.TopApps = topN(appCounts, 10) data.OsDistribution = topNOs(osCounts, 10) data.MethodStats = topNMethod(methodCounts, 10) + data.PveVersions = topNPve(pveCounts, 10) + data.TypeStats = topNType(typeCounts, 10) + + // Error analysis + data.ErrorAnalysis = buildErrorAnalysis(errorPatterns, 10) + + // Failed apps with failure rates + data.FailedApps = buildFailedApps(appCounts, appFailures, 10) // Daily stats for chart data.DailyStats = buildDailyStats(dailySuccess, dailyFailed, days) @@ -234,6 +298,158 @@ func topNMethod(m map[string]int, n int) []MethodCount { return result } +func topNPve(m map[string]int, n int) []PveCount { + result := make([]PveCount, 0, len(m)) + for k, v := range m { + result = append(result, PveCount{Version: k, Count: v}) + } + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + if len(result) > n { + return result[:n] + } + return result +} + +func topNType(m map[string]int, n int) []TypeCount { + result := make([]TypeCount, 0, len(m)) + for k, v := range m { + result = append(result, TypeCount{Type: k, Count: v}) + } + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + if len(result) > n { + return result[:n] + } + return result +} + +// normalizeError simplifies error messages into patterns for grouping +func normalizeError(err string) string { + err = strings.TrimSpace(err) + if err == "" { + return "unknown" + } + + // Normalize common patterns + err = strings.ToLower(err) + + // Remove specific numbers, IPs, paths that vary + // Keep it simple for now - just truncate and normalize + if len(err) > 60 { + err = err[:60] + } + + // Common error pattern replacements + patterns := map[string]string{ + "connection refused": "connection refused", + "timeout": "timeout", + "no space left": "disk full", + "permission denied": "permission denied", + "not found": "not found", + "failed to download": "download failed", + "apt": "apt error", + "dpkg": "dpkg error", + "curl": "network error", + "wget": "network error", + "docker": "docker error", + "systemctl": "systemd error", + "service": "service error", + } + + for pattern, label := range patterns { + if strings.Contains(err, pattern) { + return label + } + } + + // If no pattern matches, return first 40 chars + if len(err) > 40 { + return err[:40] + "..." + } + return err +} + +func buildErrorAnalysis(patterns map[string]map[string]bool, n int) []ErrorGroup { + result := make([]ErrorGroup, 0, len(patterns)) + + for pattern, apps := range patterns { + appList := make([]string, 0, len(apps)) + for app := range apps { + appList = append(appList, app) + } + + // Limit app list display + appsStr := strings.Join(appList, ", ") + if len(appsStr) > 50 { + appsStr = appsStr[:47] + "..." + } + + result = append(result, ErrorGroup{ + Pattern: pattern, + Count: len(apps), // Number of unique apps with this error + Apps: appsStr, + }) + } + + // Sort by count descending + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + + if len(result) > n { + return result[:n] + } + return result +} + +func buildFailedApps(total, failed map[string]int, n int) []AppFailure { + result := make([]AppFailure, 0) + + for app, failCount := range failed { + totalCount := total[app] + if totalCount == 0 { + continue + } + + rate := float64(failCount) / float64(totalCount) * 100 + result = append(result, AppFailure{ + App: app, + TotalCount: totalCount, + FailedCount: failCount, + FailureRate: rate, + }) + } + + // Sort by failure rate descending + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].FailureRate > result[i].FailureRate { + result[i], result[j] = result[j], result[i] + } + } + } + + if len(result) > n { + return result[:n] + } + return result +} + func buildDailyStats(success, failed map[string]int, days int) []DailyStat { result := make([]DailyStat, 0, days) for i := days - 1; i >= 0; i-- { @@ -254,7 +470,9 @@ func DashboardHTML() string { - Telemetry Dashboard - Community Scripts + Telemetry Dashboard - ProxmoxVE Helper Scripts + + @@ -510,8 +926,13 @@ func DashboardHTML() string { + + + @@ -539,6 +960,17 @@ func DashboardHTML() string {
Success Rate
-
+
+
LXC / VM
+
-
+
+ + +
+

Proxmox VE Versions

+
+ Loading... +
@@ -577,6 +1009,34 @@ func DashboardHTML() string {
+
+

+ + + + + + Error Analysis +

+
+ Loading... +
+
+ +
+

+ + + + + + Apps with Highest Failure Rates +

+
+ Loading... +
+
+

Recent Installations

@@ -599,19 +1059,63 @@ func DashboardHTML() string { OS Type Method + Resources Exit Code Error - Loading... + Loading... + +
+ + ` -} +} \ No newline at end of file diff --git a/misc/data/service.go b/misc/data/service.go index 578297965..3a9034ba2 100644 --- a/misc/data/service.go +++ b/misc/data/service.go @@ -1024,4 +1024,4 @@ func splitCSV(s string) []string { } } return out -} +} \ No newline at end of file From 3e20781f88fc3e8b692421465cacbf404608a566 Mon Sep 17 00:00:00 2001 From: Tobias <96661824+CrazyWolf13@users.noreply.github.com> Date: Tue, 10 Feb 2026 08:10:54 +0100 Subject: [PATCH 55/87] Use APPLICATION_VERSION for version in fetch functions --- misc/tools.func | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/misc/tools.func b/misc/tools.func index 40acc1abb..82895bce0 100644 --- a/misc/tools.func +++ b/misc/tools.func @@ -1778,7 +1778,7 @@ function fetch_and_deploy_gh_release() { local app="$1" local repo="$2" local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile | tag - local version="${4:-latest}" + local version="${APPLICATION_VERSION:-${4:-latest}}" local target="${5:-/opt/$app}" local asset_pattern="${6:-}" @@ -2230,7 +2230,7 @@ function fetch_and_deploy_codeberg_release() { local app="$1" local repo="$2" local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile | tag - local version="${4:-latest}" + local version="${APPLICATION_VERSION:-${4:-latest}}" local target="${5:-/opt/$app}" local asset_pattern="${6:-}" From 887a899f2448f851d6405568d580c3ebcf57069f Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 08:14:45 +0100 Subject: [PATCH 56/87] Extend telemetry reporting and ingestion Add extended telemetry functions and server-side support: misc/api.func gains helpers (categorize_error, install timer, detect_gpu) and new reporters for tools, addons, and an extended post_update_to_api with duration, GPU and error_category. misc/data/service.go updated to accept and validate new fields (type: tool/addon, tool_name, parent_ct, gpu_vendor, gpu_passthrough, install_duration, error_category), expand allowed enums, include new fields in UpsertTelemetry and mapping, and add input sanitization. Also add telemetry-ingest.exe binary. These changes enable richer telemetry (tool/addon events, GPU info, durations and categorized errors) and server ingestion/validation for them. --- misc/api.func | 316 +++++++++++++++++++++++++++++++++++++++++ misc/data/dashboard.go | 39 +++++ misc/data/service.go | 132 +++++++++++++---- 3 files changed, 460 insertions(+), 27 deletions(-) diff --git a/misc/api.func b/misc/api.func index 1d3d1bcd7..a1f6802dd 100644 --- a/misc/api.func +++ b/misc/api.func @@ -407,3 +407,319 @@ EOF POST_UPDATE_DONE=true } + +# ============================================================================== +# SECTION 3: EXTENDED TELEMETRY FUNCTIONS +# ============================================================================== + +# ------------------------------------------------------------------------------ +# categorize_error() +# +# - Maps exit codes to error categories for better analytics +# - Categories: network, storage, dependency, permission, timeout, config, resource, unknown +# - Used to group errors in dashboard +# ------------------------------------------------------------------------------ +categorize_error() { + local code="$1" + case "$code" in + # Network errors + 6|7|22|28|35) echo "network" ;; + + # Storage errors + 214|217|219) echo "storage" ;; + + # Dependency/Package errors + 100|101|102|127|160|161|162) echo "dependency" ;; + + # Permission errors + 126|152) echo "permission" ;; + + # Timeout errors + 124|28|211) echo "timeout" ;; + + # Configuration errors + 203|204|205|206|207|208) echo "config" ;; + + # Resource errors (OOM, etc) + 137|134) echo "resource" ;; + + # Default + *) echo "unknown" ;; + esac +} + +# ------------------------------------------------------------------------------ +# start_install_timer() +# +# - Captures start time for installation duration tracking +# - Call at the beginning of installation +# - Sets INSTALL_START_TIME global variable +# ------------------------------------------------------------------------------ +start_install_timer() { + INSTALL_START_TIME=$(date +%s) + export INSTALL_START_TIME +} + +# ------------------------------------------------------------------------------ +# get_install_duration() +# +# - Returns elapsed seconds since start_install_timer() was called +# - Returns 0 if timer was not started +# ------------------------------------------------------------------------------ +get_install_duration() { + if [[ -z "${INSTALL_START_TIME:-}" ]]; then + echo "0" + return + fi + local now=$(date +%s) + echo $((now - INSTALL_START_TIME)) +} + +# ------------------------------------------------------------------------------ +# detect_gpu() +# +# - Detects GPU vendor and passthrough type +# - Sets GPU_VENDOR and GPU_PASSTHROUGH globals +# - Used for GPU analytics +# ------------------------------------------------------------------------------ +detect_gpu() { + GPU_VENDOR="" + GPU_PASSTHROUGH="none" + + # Detect Intel GPU + if lspci 2>/dev/null | grep -qi "VGA.*Intel"; then + GPU_VENDOR="intel" + GPU_PASSTHROUGH="igpu" + fi + + # Detect AMD GPU + if lspci 2>/dev/null | grep -qi "VGA.*AMD\|VGA.*ATI"; then + GPU_VENDOR="amd" + # Check if discrete + if lspci 2>/dev/null | grep -qi "AMD.*Radeon"; then + GPU_PASSTHROUGH="dgpu" + else + GPU_PASSTHROUGH="igpu" + fi + fi + + # Detect NVIDIA GPU + if lspci 2>/dev/null | grep -qi "VGA.*NVIDIA\|3D.*NVIDIA"; then + GPU_VENDOR="nvidia" + GPU_PASSTHROUGH="dgpu" + fi + + export GPU_VENDOR GPU_PASSTHROUGH +} + +# ------------------------------------------------------------------------------ +# post_tool_to_api() +# +# - Reports tool usage to telemetry +# - Arguments: +# * $1: tool_name (e.g., "microcode", "lxc-update", "post-pve-install") +# * $2: status ("success" or "failed") +# * $3: exit_code (optional, default: 0 for success, 1 for failed) +# - For PVE host tools, not container installations +# ------------------------------------------------------------------------------ +post_tool_to_api() { + command -v curl &>/dev/null || return 0 + [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 + + local tool_name="${1:-unknown}" + local status="${2:-success}" + local exit_code="${3:-0}" + local error="" error_category="" + local uuid duration + + # Generate UUID for this tool execution + uuid=$(cat /proc/sys/kernel/random/uuid 2>/dev/null || uuidgen 2>/dev/null || echo "tool-$(date +%s)") + duration=$(get_install_duration) + + # Map status + [[ "$status" == "done" ]] && status="success" + + if [[ "$status" == "failed" ]]; then + [[ ! "$exit_code" =~ ^[0-9]+$ ]] && exit_code=1 + error=$(explain_exit_code "$exit_code") + error_category=$(categorize_error "$exit_code") + fi + + local pve_version="" + if command -v pveversion &>/dev/null; then + pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true + fi + + local JSON_PAYLOAD + JSON_PAYLOAD=$(cat </dev/null || true +} + +# ------------------------------------------------------------------------------ +# post_addon_to_api() +# +# - Reports addon installation to telemetry +# - Arguments: +# * $1: addon_name (e.g., "filebrowser", "netdata") +# * $2: status ("success" or "failed") +# * $3: parent_ct (optional, name of parent container) +# * $4: exit_code (optional) +# - For addons installed inside containers +# ------------------------------------------------------------------------------ +post_addon_to_api() { + command -v curl &>/dev/null || return 0 + [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 + + local addon_name="${1:-unknown}" + local status="${2:-success}" + local parent_ct="${3:-}" + local exit_code="${4:-0}" + local error="" error_category="" + local uuid duration + + # Generate UUID for this addon installation + uuid=$(cat /proc/sys/kernel/random/uuid 2>/dev/null || uuidgen 2>/dev/null || echo "addon-$(date +%s)") + duration=$(get_install_duration) + + # Map status + [[ "$status" == "done" ]] && status="success" + + if [[ "$status" == "failed" ]]; then + [[ ! "$exit_code" =~ ^[0-9]+$ ]] && exit_code=1 + error=$(explain_exit_code "$exit_code") + error_category=$(categorize_error "$exit_code") + fi + + # Detect OS info + local os_type="" os_version="" + if [[ -f /etc/os-release ]]; then + os_type=$(grep "^ID=" /etc/os-release | cut -d= -f2 | tr -d '"') + os_version=$(grep "^VERSION_ID=" /etc/os-release | cut -d= -f2 | tr -d '"') + fi + + local JSON_PAYLOAD + JSON_PAYLOAD=$(cat </dev/null || true +} + +# ------------------------------------------------------------------------------ +# post_update_to_api_extended() +# +# - Extended version of post_update_to_api with duration, GPU, and error category +# - Same arguments as post_update_to_api: +# * $1: status ("done" or "failed") +# * $2: exit_code (numeric) +# - Automatically includes: +# * Install duration (if start_install_timer was called) +# * Error category (for failed status) +# * GPU info (if detect_gpu was called) +# ------------------------------------------------------------------------------ +post_update_to_api_extended() { + # Silent fail - telemetry should never break scripts + command -v curl &>/dev/null || return 0 + + # Prevent duplicate submissions + POST_UPDATE_DONE=${POST_UPDATE_DONE:-false} + [[ "$POST_UPDATE_DONE" == "true" ]] && return 0 + + [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 + [[ -z "${RANDOM_UUID:-}" ]] && return 0 + + local status="${1:-failed}" + local raw_exit_code="${2:-1}" + local exit_code=0 error="" pb_status error_category="" + local duration gpu_vendor gpu_passthrough + + # Get duration + duration=$(get_install_duration) + + # Get GPU info (if detected) + gpu_vendor="${GPU_VENDOR:-}" + gpu_passthrough="${GPU_PASSTHROUGH:-}" + + # Map status to telemetry values + case "$status" in + done | success) + pb_status="success" + exit_code=0 + error="" + error_category="" + ;; + failed) + pb_status="failed" + ;; + *) + pb_status="unknown" + ;; + esac + + # For failed/unknown status, resolve exit code and error description + if [[ "$pb_status" == "failed" ]] || [[ "$pb_status" == "unknown" ]]; then + if [[ "$raw_exit_code" =~ ^[0-9]+$ ]]; then + exit_code="$raw_exit_code" + else + exit_code=1 + fi + error=$(explain_exit_code "$exit_code") + error_category=$(categorize_error "$exit_code") + [[ -z "$error" ]] && error="Unknown error" + fi + + local JSON_PAYLOAD + JSON_PAYLOAD=$(cat </dev/null || true + + POST_UPDATE_DONE=true +} \ No newline at end of file diff --git a/misc/data/dashboard.go b/misc/data/dashboard.go index 16746e81b..acba2a89d 100644 --- a/misc/data/dashboard.go +++ b/misc/data/dashboard.go @@ -26,6 +26,15 @@ type DashboardData struct { FailedApps []AppFailure `json:"failed_apps"` RecentRecords []TelemetryRecord `json:"recent_records"` DailyStats []DailyStat `json:"daily_stats"` + + // Extended metrics + GPUStats []GPUCount `json:"gpu_stats"` + ErrorCategories []ErrorCatCount `json:"error_categories"` + TopTools []ToolCount `json:"top_tools"` + TopAddons []AddonCount `json:"top_addons"` + AvgInstallDuration float64 `json:"avg_install_duration"` // seconds + TotalTools int `json:"total_tools"` + TotalAddons int `json:"total_addons"` } type AppCount struct { @@ -72,6 +81,29 @@ type DailyStat struct { Failed int `json:"failed"` } +// Extended metric types +type GPUCount struct { + Vendor string `json:"vendor"` + Passthrough string `json:"passthrough"` + Count int `json:"count"` +} + +type ErrorCatCount struct { + Category string `json:"category"` + Count int `json:"count"` +} + +type ToolCount struct { + Tool string `json:"tool"` + Count int `json:"count"` +} + +type AddonCount struct { + Addon string `json:"addon"` + ParentCT string `json:"parent_ct"` + Count int `json:"count"` +} + // FetchDashboardData retrieves aggregated data from PocketBase func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*DashboardData, error) { if err := p.ensureAuth(ctx); err != nil { @@ -101,6 +133,13 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard dailySuccess := make(map[string]int) dailyFailed := make(map[string]int) + // Extended metrics maps + gpuCounts := make(map[string]int) // "vendor|passthrough" -> count + errorCatCounts := make(map[string]int) // category -> count + toolCounts := make(map[string]int) // tool_name -> count + addonCounts := make(map[string]int) // addon_name -> count + var totalDuration, durationCount int + for _, r := range records { data.TotalInstalls++ diff --git a/misc/data/service.go b/misc/data/service.go index 3a9034ba2..95ea0f390 100644 --- a/misc/data/service.go +++ b/misc/data/service.go @@ -62,7 +62,7 @@ type Config struct { type TelemetryIn struct { // Required RandomID string `json:"random_id"` // Session UUID - Type string `json:"type"` // "lxc" or "vm" + Type string `json:"type"` // "lxc", "vm", "tool", "addon" NSAPP string `json:"nsapp"` // Application name (e.g., "jellyfin") Status string `json:"status"` // "installing", "success", "failed", "unknown" @@ -81,6 +81,24 @@ type TelemetryIn struct { Method string `json:"method,omitempty"` // "default", "advanced" Error string `json:"error,omitempty"` // Error description (max 120 chars) ExitCode int `json:"exit_code,omitempty"` // 0-255 + + // === NEW FIELDS === + + // Tool telemetry (type="tool") + ToolName string `json:"tool_name,omitempty"` // "microcode", "lxc-update", "post-pve-install", etc. + + // Addon telemetry (type="addon") + ParentCT string `json:"parent_ct,omitempty"` // Parent container name (e.g., "jellyfin") + + // GPU Passthrough stats + GPUVendor string `json:"gpu_vendor,omitempty"` // "intel", "amd", "nvidia" + GPUPassthrough string `json:"gpu_passthrough,omitempty"` // "igpu", "dgpu", "vgpu", "none" + + // Performance metrics + InstallDuration int `json:"install_duration,omitempty"` // Seconds + + // Error categorization + ErrorCategory string `json:"error_category,omitempty"` // "network", "storage", "dependency", "permission", "timeout", "unknown" } // TelemetryOut is sent to PocketBase (matches _dev_telemetry_data collection) @@ -99,13 +117,25 @@ type TelemetryOut struct { Method string `json:"method,omitempty"` Error string `json:"error,omitempty"` ExitCode int `json:"exit_code,omitempty"` + + // Extended fields + ToolName string `json:"tool_name,omitempty"` + ParentCT string `json:"parent_ct,omitempty"` + GPUVendor string `json:"gpu_vendor,omitempty"` + GPUPassthrough string `json:"gpu_passthrough,omitempty"` + InstallDuration int `json:"install_duration,omitempty"` + ErrorCategory string `json:"error_category,omitempty"` } // TelemetryStatusUpdate contains only fields needed for status updates type TelemetryStatusUpdate struct { - Status string `json:"status"` - Error string `json:"error,omitempty"` - ExitCode int `json:"exit_code"` + Status string `json:"status"` + Error string `json:"error,omitempty"` + ExitCode int `json:"exit_code"` + InstallDuration int `json:"install_duration,omitempty"` + ErrorCategory string `json:"error_category,omitempty"` + GPUVendor string `json:"gpu_vendor,omitempty"` + GPUPassthrough string `json:"gpu_passthrough,omitempty"` } type PBClient struct { @@ -332,11 +362,15 @@ func (p *PBClient) UpsertTelemetry(ctx context.Context, payload TelemetryOut) er return p.CreateTelemetry(ctx, payload) } - // Update only status, error, and exit_code + // Update only status, error, exit_code, and new metrics fields update := TelemetryStatusUpdate{ - Status: payload.Status, - Error: payload.Error, - ExitCode: payload.ExitCode, + Status: payload.Status, + Error: payload.Error, + ExitCode: payload.ExitCode, + InstallDuration: payload.InstallDuration, + ErrorCategory: payload.ErrorCategory, + GPUVendor: payload.GPUVendor, + GPUPassthrough: payload.GPUPassthrough, } return p.UpdateTelemetryStatus(ctx, recordID, update) } @@ -491,7 +525,7 @@ func getClientIP(r *http.Request, pt *ProxyTrust) net.IP { var ( // Allowed values for 'type' field - allowedType = map[string]bool{"lxc": true, "vm": true} + allowedType = map[string]bool{"lxc": true, "vm": true, "tool": true, "addon": true} // Allowed values for 'status' field allowedStatus = map[string]bool{"installing": true, "success": true, "failed": true, "unknown": true} @@ -502,6 +536,18 @@ var ( "fedora": true, "rocky": true, "alma": true, "centos": true, "opensuse": true, "gentoo": true, "openeuler": true, } + + // Allowed values for 'gpu_vendor' field + allowedGPUVendor = map[string]bool{"intel": true, "amd": true, "nvidia": true, "": true} + + // Allowed values for 'gpu_passthrough' field + allowedGPUPassthrough = map[string]bool{"igpu": true, "dgpu": true, "vgpu": true, "none": true, "": true} + + // Allowed values for 'error_category' field + allowedErrorCategory = map[string]bool{ + "network": true, "storage": true, "dependency": true, "permission": true, + "timeout": true, "config": true, "resource": true, "unknown": true, "": true, + } ) func sanitizeShort(s string, max int) string { @@ -529,6 +575,13 @@ func validate(in *TelemetryIn) error { in.PveVer = sanitizeShort(in.PveVer, 32) in.Method = sanitizeShort(in.Method, 32) + // Sanitize new fields + in.ToolName = sanitizeShort(in.ToolName, 64) + in.ParentCT = sanitizeShort(in.ParentCT, 64) + in.GPUVendor = strings.ToLower(sanitizeShort(in.GPUVendor, 16)) + in.GPUPassthrough = strings.ToLower(sanitizeShort(in.GPUPassthrough, 16)) + in.ErrorCategory = strings.ToLower(sanitizeShort(in.ErrorCategory, 32)) + // IMPORTANT: "error" must be short and not contain identifiers/logs in.Error = sanitizeShort(in.Error, 120) @@ -537,20 +590,36 @@ func validate(in *TelemetryIn) error { return errors.New("missing required fields: random_id, type, nsapp, status") } + // Normalize common typos for backwards compatibility + if in.Status == "sucess" { + in.Status = "success" + } + // Validate enums if !allowedType[in.Type] { - return errors.New("invalid type (must be 'lxc' or 'vm')") + return errors.New("invalid type (must be 'lxc', 'vm', 'tool', or 'addon')") } if !allowedStatus[in.Status] { return errors.New("invalid status") } + // Validate new enum fields + if !allowedGPUVendor[in.GPUVendor] { + return errors.New("invalid gpu_vendor (must be 'intel', 'amd', 'nvidia', or empty)") + } + if !allowedGPUPassthrough[in.GPUPassthrough] { + return errors.New("invalid gpu_passthrough (must be 'igpu', 'dgpu', 'vgpu', 'none', or empty)") + } + if !allowedErrorCategory[in.ErrorCategory] { + return errors.New("invalid error_category") + } + // For status updates (not installing), skip numeric field validation // These are only required for initial creation isUpdate := in.Status != "installing" - // os_type is optional but if provided must be valid - if in.OsType != "" && !allowedOsType[in.OsType] { + // os_type is optional but if provided must be valid (only for lxc/vm) + if (in.Type == "lxc" || in.Type == "vm") && in.OsType != "" && !allowedOsType[in.OsType] { return errors.New("invalid os_type") } @@ -558,7 +627,7 @@ func validate(in *TelemetryIn) error { // Values like "default", "advanced", "mydefaults-global", "mydefaults-app" are all valid // Validate numeric ranges (only strict for new records) - if !isUpdate { + if !isUpdate && (in.Type == "lxc" || in.Type == "vm") { if in.CTType < 0 || in.CTType > 2 { return errors.New("invalid ct_type (must be 0, 1, or 2)") } @@ -575,6 +644,9 @@ func validate(in *TelemetryIn) error { if in.ExitCode < 0 || in.ExitCode > 255 { return errors.New("invalid exit_code") } + if in.InstallDuration < 0 || in.InstallDuration > 86400 { + return errors.New("invalid install_duration (max 24h)") + } return nil } @@ -897,20 +969,26 @@ func main() { // Map input to PocketBase schema out := TelemetryOut{ - RandomID: in.RandomID, - Type: in.Type, - NSAPP: in.NSAPP, - Status: in.Status, - CTType: in.CTType, - DiskSize: in.DiskSize, - CoreCount: in.CoreCount, - RAMSize: in.RAMSize, - OsType: in.OsType, - OsVersion: in.OsVersion, - PveVer: in.PveVer, - Method: in.Method, - Error: in.Error, - ExitCode: in.ExitCode, + RandomID: in.RandomID, + Type: in.Type, + NSAPP: in.NSAPP, + Status: in.Status, + CTType: in.CTType, + DiskSize: in.DiskSize, + CoreCount: in.CoreCount, + RAMSize: in.RAMSize, + OsType: in.OsType, + OsVersion: in.OsVersion, + PveVer: in.PveVer, + Method: in.Method, + Error: in.Error, + ExitCode: in.ExitCode, + ToolName: in.ToolName, + ParentCT: in.ParentCT, + GPUVendor: in.GPUVendor, + GPUPassthrough: in.GPUPassthrough, + InstallDuration: in.InstallDuration, + ErrorCategory: in.ErrorCategory, } _ = computeHash(out) // For future deduplication From ed3af965859efb13486a7deb4d8d8c583eba3195 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 08:16:39 +0100 Subject: [PATCH 57/87] Add extended dashboard metrics and helpers Extend FetchDashboardData to collect additional metrics: tool executions, addon installations, GPU usage (including passthrough), error categories, and install durations for averaging. Populate new Dashboard fields (GPUStats, ErrorCategories, TopTools, TopAddons, AvgInstallDuration) and add helper builders (buildGPUStats, buildErrorCategories, buildToolStats, buildAddonStats) that sort results and trim to top-N where appropriate. Keeps existing daily stats and recent records logic unchanged. --- misc/data/dashboard.go | 144 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) diff --git a/misc/data/dashboard.go b/misc/data/dashboard.go index acba2a89d..80f06e465 100644 --- a/misc/data/dashboard.go +++ b/misc/data/dashboard.go @@ -191,6 +191,40 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard typeCounts[r.Type]++ } + // === Extended metrics tracking === + + // Track tool executions + if r.Type == "tool" && r.ToolName != "" { + toolCounts[r.ToolName]++ + data.TotalTools++ + } + + // Track addon installations + if r.Type == "addon" { + addonCounts[r.NSAPP]++ + data.TotalAddons++ + } + + // Track GPU usage + if r.GPUVendor != "" { + key := r.GPUVendor + if r.GPUPassthrough != "" { + key += "|" + r.GPUPassthrough + } + gpuCounts[key]++ + } + + // Track error categories + if r.Status == "failed" && r.ErrorCategory != "" { + errorCatCounts[r.ErrorCategory]++ + } + + // Track install duration (for averaging) + if r.InstallDuration > 0 { + totalDuration += r.InstallDuration + durationCount++ + } + // Daily stats (use Created field if available) if r.Created != "" { date := r.Created[:10] // "2026-02-09" @@ -224,6 +258,25 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard // Daily stats for chart data.DailyStats = buildDailyStats(dailySuccess, dailyFailed, days) + // === Extended metrics === + + // GPU stats + data.GPUStats = buildGPUStats(gpuCounts) + + // Error categories + data.ErrorCategories = buildErrorCategories(errorCatCounts) + + // Top tools + data.TopTools = buildToolStats(toolCounts, 10) + + // Top addons + data.TopAddons = buildAddonStats(addonCounts, 10) + + // Average install duration + if durationCount > 0 { + data.AvgInstallDuration = float64(totalDuration) / float64(durationCount) + } + // Recent records (last 20) if len(records) > 20 { data.RecentRecords = records[:20] @@ -502,6 +555,97 @@ func buildDailyStats(success, failed map[string]int, days int) []DailyStat { return result } +// === Extended metrics helper functions === + +func buildGPUStats(gpuCounts map[string]int) []GPUCount { + result := make([]GPUCount, 0, len(gpuCounts)) + for key, count := range gpuCounts { + parts := strings.Split(key, "|") + vendor := parts[0] + passthrough := "" + if len(parts) > 1 { + passthrough = parts[1] + } + result = append(result, GPUCount{ + Vendor: vendor, + Passthrough: passthrough, + Count: count, + }) + } + // Sort by count descending + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + return result +} + +func buildErrorCategories(catCounts map[string]int) []ErrorCatCount { + result := make([]ErrorCatCount, 0, len(catCounts)) + for cat, count := range catCounts { + result = append(result, ErrorCatCount{ + Category: cat, + Count: count, + }) + } + // Sort by count descending + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + return result +} + +func buildToolStats(toolCounts map[string]int, n int) []ToolCount { + result := make([]ToolCount, 0, len(toolCounts)) + for tool, count := range toolCounts { + result = append(result, ToolCount{ + Tool: tool, + Count: count, + }) + } + // Sort by count descending + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + if len(result) > n { + return result[:n] + } + return result +} + +func buildAddonStats(addonCounts map[string]int, n int) []AddonCount { + result := make([]AddonCount, 0, len(addonCounts)) + for addon, count := range addonCounts { + result = append(result, AddonCount{ + Addon: addon, + Count: count, + }) + } + // Sort by count descending + for i := 0; i < len(result)-1; i++ { + for j := i + 1; j < len(result); j++ { + if result[j].Count > result[i].Count { + result[i], result[j] = result[j], result[i] + } + } + } + if len(result) > n { + return result[:n] + } + return result +} + // DashboardHTML returns the embedded dashboard HTML func DashboardHTML() string { return ` From 53cf705799c4bf6a959c0a75ab3b2e85542274cd Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 08:17:50 +0100 Subject: [PATCH 58/87] Format misc/api.func: spacing and heredocs Clean up formatting in misc/api.func for readability. Normalized spacing in the categorize_error case patterns, removed trailing blank-space lines, and standardized detect_gpu blank-line spacing. Converted heredoc assignments to use multiline $() style for JSON_PAYLOAD in post_tool_to_api, post_addon_to_api, and post_update_to_api_extended, and added a final newline at end of file. No functional changes intended; purely whitespace/formatting cleanup. --- misc/api.func | 81 ++++++++++++++++++++++++++------------------------- 1 file changed, 42 insertions(+), 39 deletions(-) diff --git a/misc/api.func b/misc/api.func index a1f6802dd..a2a178245 100644 --- a/misc/api.func +++ b/misc/api.func @@ -422,29 +422,29 @@ EOF categorize_error() { local code="$1" case "$code" in - # Network errors - 6|7|22|28|35) echo "network" ;; - - # Storage errors - 214|217|219) echo "storage" ;; - - # Dependency/Package errors - 100|101|102|127|160|161|162) echo "dependency" ;; - - # Permission errors - 126|152) echo "permission" ;; - - # Timeout errors - 124|28|211) echo "timeout" ;; - - # Configuration errors - 203|204|205|206|207|208) echo "config" ;; - - # Resource errors (OOM, etc) - 137|134) echo "resource" ;; - - # Default - *) echo "unknown" ;; + # Network errors + 6 | 7 | 22 | 28 | 35) echo "network" ;; + + # Storage errors + 214 | 217 | 219) echo "storage" ;; + + # Dependency/Package errors + 100 | 101 | 102 | 127 | 160 | 161 | 162) echo "dependency" ;; + + # Permission errors + 126 | 152) echo "permission" ;; + + # Timeout errors + 124 | 28 | 211) echo "timeout" ;; + + # Configuration errors + 203 | 204 | 205 | 206 | 207 | 208) echo "config" ;; + + # Resource errors (OOM, etc) + 137 | 134) echo "resource" ;; + + # Default + *) echo "unknown" ;; esac } @@ -485,13 +485,13 @@ get_install_duration() { detect_gpu() { GPU_VENDOR="" GPU_PASSTHROUGH="none" - + # Detect Intel GPU if lspci 2>/dev/null | grep -qi "VGA.*Intel"; then GPU_VENDOR="intel" GPU_PASSTHROUGH="igpu" fi - + # Detect AMD GPU if lspci 2>/dev/null | grep -qi "VGA.*AMD\|VGA.*ATI"; then GPU_VENDOR="amd" @@ -502,13 +502,13 @@ detect_gpu() { GPU_PASSTHROUGH="igpu" fi fi - + # Detect NVIDIA GPU if lspci 2>/dev/null | grep -qi "VGA.*NVIDIA\|3D.*NVIDIA"; then GPU_VENDOR="nvidia" GPU_PASSTHROUGH="dgpu" fi - + export GPU_VENDOR GPU_PASSTHROUGH } @@ -535,10 +535,10 @@ post_tool_to_api() { # Generate UUID for this tool execution uuid=$(cat /proc/sys/kernel/random/uuid 2>/dev/null || uuidgen 2>/dev/null || echo "tool-$(date +%s)") duration=$(get_install_duration) - + # Map status [[ "$status" == "done" ]] && status="success" - + if [[ "$status" == "failed" ]]; then [[ ! "$exit_code" =~ ^[0-9]+$ ]] && exit_code=1 error=$(explain_exit_code "$exit_code") @@ -551,7 +551,8 @@ post_tool_to_api() { fi local JSON_PAYLOAD - JSON_PAYLOAD=$(cat </dev/null || uuidgen 2>/dev/null || echo "addon-$(date +%s)") duration=$(get_install_duration) - + # Map status [[ "$status" == "done" ]] && status="success" - + if [[ "$status" == "failed" ]]; then [[ ! "$exit_code" =~ ^[0-9]+$ ]] && exit_code=1 error=$(explain_exit_code "$exit_code") @@ -615,7 +616,8 @@ post_addon_to_api() { fi local JSON_PAYLOAD - JSON_PAYLOAD=$(cat </dev/null || true POST_UPDATE_DONE=true -} \ No newline at end of file +} From a0ba6fbbd1aaa0c282cad055b930c09d09188af8 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 08:23:04 +0100 Subject: [PATCH 59/87] - --- misc/api.func | 8 ++------ misc/data/dashboard.go | 11 +++++------ misc/data/service.go | 16 ++-------------- 3 files changed, 9 insertions(+), 26 deletions(-) diff --git a/misc/api.func b/misc/api.func index a2a178245..87d8891da 100644 --- a/misc/api.func +++ b/misc/api.func @@ -557,7 +557,6 @@ post_tool_to_api() { "random_id": "${uuid}", "type": "tool", "nsapp": "${tool_name}", - "tool_name": "${tool_name}", "status": "${status}", "exit_code": ${exit_code}, "error": "${error}", @@ -580,8 +579,7 @@ EOF # - Arguments: # * $1: addon_name (e.g., "filebrowser", "netdata") # * $2: status ("success" or "failed") -# * $3: parent_ct (optional, name of parent container) -# * $4: exit_code (optional) +# * $3: exit_code (optional) # - For addons installed inside containers # ------------------------------------------------------------------------------ post_addon_to_api() { @@ -590,8 +588,7 @@ post_addon_to_api() { local addon_name="${1:-unknown}" local status="${2:-success}" - local parent_ct="${3:-}" - local exit_code="${4:-0}" + local exit_code="${3:-0}" local error="" error_category="" local uuid duration @@ -623,7 +620,6 @@ post_addon_to_api() { "type": "addon", "nsapp": "${addon_name}", "status": "${status}", - "parent_ct": "${parent_ct}", "exit_code": ${exit_code}, "error": "${error}", "error_category": "${error_category}", diff --git a/misc/data/dashboard.go b/misc/data/dashboard.go index 80f06e465..fb1395a96 100644 --- a/misc/data/dashboard.go +++ b/misc/data/dashboard.go @@ -99,9 +99,8 @@ type ToolCount struct { } type AddonCount struct { - Addon string `json:"addon"` - ParentCT string `json:"parent_ct"` - Count int `json:"count"` + Addon string `json:"addon"` + Count int `json:"count"` } // FetchDashboardData retrieves aggregated data from PocketBase @@ -193,9 +192,9 @@ func (p *PBClient) FetchDashboardData(ctx context.Context, days int) (*Dashboard // === Extended metrics tracking === - // Track tool executions - if r.Type == "tool" && r.ToolName != "" { - toolCounts[r.ToolName]++ + // Track tool executions (type="tool", tool name is in nsapp) + if r.Type == "tool" && r.NSAPP != "" { + toolCounts[r.NSAPP]++ data.TotalTools++ } diff --git a/misc/data/service.go b/misc/data/service.go index 95ea0f390..99ed18135 100644 --- a/misc/data/service.go +++ b/misc/data/service.go @@ -82,13 +82,7 @@ type TelemetryIn struct { Error string `json:"error,omitempty"` // Error description (max 120 chars) ExitCode int `json:"exit_code,omitempty"` // 0-255 - // === NEW FIELDS === - - // Tool telemetry (type="tool") - ToolName string `json:"tool_name,omitempty"` // "microcode", "lxc-update", "post-pve-install", etc. - - // Addon telemetry (type="addon") - ParentCT string `json:"parent_ct,omitempty"` // Parent container name (e.g., "jellyfin") + // === EXTENDED FIELDS === // GPU Passthrough stats GPUVendor string `json:"gpu_vendor,omitempty"` // "intel", "amd", "nvidia" @@ -119,8 +113,6 @@ type TelemetryOut struct { ExitCode int `json:"exit_code,omitempty"` // Extended fields - ToolName string `json:"tool_name,omitempty"` - ParentCT string `json:"parent_ct,omitempty"` GPUVendor string `json:"gpu_vendor,omitempty"` GPUPassthrough string `json:"gpu_passthrough,omitempty"` InstallDuration int `json:"install_duration,omitempty"` @@ -575,9 +567,7 @@ func validate(in *TelemetryIn) error { in.PveVer = sanitizeShort(in.PveVer, 32) in.Method = sanitizeShort(in.Method, 32) - // Sanitize new fields - in.ToolName = sanitizeShort(in.ToolName, 64) - in.ParentCT = sanitizeShort(in.ParentCT, 64) + // Sanitize extended fields in.GPUVendor = strings.ToLower(sanitizeShort(in.GPUVendor, 16)) in.GPUPassthrough = strings.ToLower(sanitizeShort(in.GPUPassthrough, 16)) in.ErrorCategory = strings.ToLower(sanitizeShort(in.ErrorCategory, 32)) @@ -983,8 +973,6 @@ func main() { Method: in.Method, Error: in.Error, ExitCode: in.ExitCode, - ToolName: in.ToolName, - ParentCT: in.ParentCT, GPUVendor: in.GPUVendor, GPUPassthrough: in.GPUPassthrough, InstallDuration: in.InstallDuration, From 0bfa6c3e547a47da0f47a3aabff145bfa601a497 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 10 Feb 2026 07:24:02 +0000 Subject: [PATCH 60/87] Delete paperless-exporter (addon) after migration to ProxmoxVE (#1438) Co-authored-by: github-actions[bot] --- frontend/public/json/paperless-exporter.json | 35 ---- tools/addon/paperless-exporter.sh | 188 ------------------- 2 files changed, 223 deletions(-) delete mode 100644 frontend/public/json/paperless-exporter.json delete mode 100644 tools/addon/paperless-exporter.sh diff --git a/frontend/public/json/paperless-exporter.json b/frontend/public/json/paperless-exporter.json deleted file mode 100644 index 2b10c7c5e..000000000 --- a/frontend/public/json/paperless-exporter.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "Prometheus Paperless NGX Exporter", - "slug": "prometheus-paperless-ngx-exporter", - "categories": [ - 9 - ], - "date_created": "2025-02-07", - "type": "addon", - "updateable": true, - "privileged": false, - "interface_port": 8081, - "documentation": "https://github.com/hansmi/prometheus-paperless-exporter", - "website": "https://github.com/hansmi/prometheus-paperless-exporter", - "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/paperless-ngx.webp", - "config_path": "/etc/prometheus-paperless-ngx-exporter/config.env", - "description": "Prometheus metrics exporter for Paperless-NGX, a document management system transforming physical documents into a searchable online archive. The exporter relies on Paperless' REST API.", - "install_methods": [ - { - "type": "default", - "script": "tools/addon/paperless-exporter.sh", - "resources": { - "cpu": null, - "ram": null, - "hdd": null, - "os": null, - "version": null - } - } - ], - "default_credentials": { - "username": null, - "password": null - }, - "notes": [] -} diff --git a/tools/addon/paperless-exporter.sh b/tools/addon/paperless-exporter.sh deleted file mode 100644 index 6f320c92b..000000000 --- a/tools/addon/paperless-exporter.sh +++ /dev/null @@ -1,188 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2021-2026 community-scripts ORG -# Author: Andy Grunwald (andygrunwald) -# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE -# Source: https://github.com/hansmi/prometheus-paperless-exporter - -source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/core.func) -source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/tools.func) -source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func) - -# Enable error handling -set -Eeuo pipefail -trap 'error_handler' ERR -load_functions - -# ============================================================================== -# CONFIGURATION -# ============================================================================== -VERBOSE=${var_verbose:-no} -APP="prometheus-paperless-ngx-exporter" -APP_TYPE="tools" -BINARY_PATH="/usr/bin/prometheus-paperless-exporter" -CONFIG_PATH="/etc/prometheus-paperless-ngx-exporter/config.env" -SERVICE_PATH="/etc/systemd/system/prometheus-paperless-ngx-exporter.service" -AUTH_TOKEN_FILE="/etc/prometheus-paperless-ngx-exporter/paperless_auth_token_file" - -# ============================================================================== -# OS DETECTION -# ============================================================================== -if ! grep -qE 'ID=debian|ID=ubuntu' /etc/os-release 2>/dev/null; then - echo -e "${CROSS} Unsupported OS detected. This script only supports Debian and Ubuntu." - exit 1 -fi - -# ============================================================================== -# UNINSTALL -# ============================================================================== -function uninstall() { - msg_info "Uninstalling Prometheus-Paperless-NGX-Exporter" - systemctl disable -q --now prometheus-paperless-ngx-exporter - - if dpkg -l | grep -q prometheus-paperless-exporter; then - $STD apt-get remove -y prometheus-paperless-exporter || $STD dpkg -r prometheus-paperless-exporter - fi - - rm -f "$SERVICE_PATH" - rm -rf /etc/prometheus-paperless-ngx-exporter - rm -f "/usr/local/bin/update_prometheus-paperless-ngx-exporter" - rm -f "$HOME/.prometheus-paperless-ngx-exporter" - msg_ok "Prometheus-Paperless-NGX-Exporter has been uninstalled" -} - -# ============================================================================== -# UPDATE -# ============================================================================== -function update() { - if check_for_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter"; then - msg_info "Stopping service" - systemctl stop prometheus-paperless-ngx-exporter - msg_ok "Stopped service" - - fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary" "latest" - - msg_info "Starting service" - systemctl start prometheus-paperless-ngx-exporter - msg_ok "Started service" - msg_ok "Updated successfully!" - exit - fi -} - -# ============================================================================== -# INSTALL -# ============================================================================== -function install() { - read -erp "Enter URL of Paperless-NGX, example: (http://127.0.0.1:8000): " PAPERLESS_URL - read -rsp "Enter Paperless-NGX authentication token: " PAPERLESS_AUTH_TOKEN - printf "\n" - - fetch_and_deploy_gh_release "prom-paperless-exp" "hansmi/prometheus-paperless-exporter" "binary" "latest" - - msg_info "Creating configuration" - mkdir -p /etc/prometheus-paperless-ngx-exporter - cat <"$CONFIG_PATH" -# https://github.com/hansmi/prometheus-paperless-exporter -PAPERLESS_URL="${PAPERLESS_URL}" -EOF - echo "${PAPERLESS_AUTH_TOKEN}" >"$AUTH_TOKEN_FILE" - chmod 600 "$AUTH_TOKEN_FILE" - msg_ok "Created configuration" - - msg_info "Creating service" - cat <"$SERVICE_PATH" -[Unit] -Description=Prometheus Paperless NGX Exporter -Wants=network-online.target -After=network-online.target - -[Service] -User=root -EnvironmentFile=$CONFIG_PATH -ExecStart=$BINARY_PATH \\ - --paperless_url=\${PAPERLESS_URL} \\ - --paperless_auth_token_file=$AUTH_TOKEN_FILE -Restart=always - -[Install] -WantedBy=multi-user.target -EOF - systemctl daemon-reload - systemctl enable -q --now prometheus-paperless-ngx-exporter - msg_ok "Created and started service" - - # Create update script - msg_info "Creating update script" - ensure_usr_local_bin_persist - cat <<'UPDATEEOF' >/usr/local/bin/update_prometheus-paperless-ngx-exporter -#!/usr/bin/env bash -# prometheus-paperless-ngx-exporter Update Script -type=update bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/tools/addon/prometheus-paperless-ngx-exporter.sh)" -UPDATEEOF - chmod +x /usr/local/bin/update_prometheus-paperless-ngx-exporter - msg_ok "Created update script (/usr/local/bin/update_prometheus-paperless-ngx-exporter)" - - echo "" - msg_ok "Prometheus-Paperless-NGX-Exporter installed successfully" - msg_ok "Metrics: ${BL}http://${LOCAL_IP}:8081/metrics${CL}" - msg_ok "Config: ${BL}${CONFIG_PATH}${CL}" -} - -# ============================================================================== -# MAIN -# ============================================================================== -header_info -ensure_usr_local_bin_persist -get_lxc_ip - -# Handle type=update (called from update script) -if [[ "${type:-}" == "update" ]]; then - if [[ -f "$BINARY_PATH" ]]; then - update - else - msg_error "Prometheus-Paperless-NGX-Exporter is not installed. Nothing to update." - exit 1 - fi - exit 0 -fi - -# Check if already installed -if [[ -f "$BINARY_PATH" ]]; then - msg_warn "Prometheus-Paperless-NGX-Exporter is already installed." - echo "" - - echo -n "${TAB}Uninstall Prometheus-Paperless-NGX-Exporter? (y/N): " - read -r uninstall_prompt - if [[ "${uninstall_prompt,,}" =~ ^(y|yes)$ ]]; then - uninstall - exit 0 - fi - - echo -n "${TAB}Update Prometheus-Paperless-NGX-Exporter? (y/N): " - read -r update_prompt - if [[ "${update_prompt,,}" =~ ^(y|yes)$ ]]; then - update - exit 0 - fi - - msg_warn "No action selected. Exiting." - exit 0 -fi - -# Fresh installation -msg_warn "Prometheus-Paperless-NGX-Exporter is not installed." -echo "" -echo -e "${TAB}${INFO} This will install:" -echo -e "${TAB} - Prometheus Paperless NGX Exporter (binary)" -echo -e "${TAB} - Systemd service" -echo "" - -echo -n "${TAB}Install Prometheus-Paperless-NGX-Exporter? (y/N): " -read -r install_prompt -if [[ "${install_prompt,,}" =~ ^(y|yes)$ ]]; then - install -else - msg_warn "Installation cancelled. Exiting." - exit 0 -fi From 2c0c72b0e5d325971d6527a52b546ebe449302dc Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 13:59:35 +0100 Subject: [PATCH 61/87] unified logging --- misc/build.func | 76 +++++++++++++++++++++--- misc/core.func | 152 +++++++++++++++++++++++++++++++++++------------- 2 files changed, 179 insertions(+), 49 deletions(-) diff --git a/misc/build.func b/misc/build.func index 4efabe06e..2e20142e7 100644 --- a/misc/build.func +++ b/misc/build.func @@ -1882,7 +1882,7 @@ advanced_settings() { fi ;; - # ══════════════����════════════════════════════════════════════════════════════ + # ══════════════��════════════════════════════════════════════════════════════ # STEP 3: Container ID # ═══════════════════════════════════════════════════════════════════════════ 3) @@ -2728,6 +2728,26 @@ Advanced: [[ "$APT_CACHER" == "yes" ]] && echo -e "${INFO}${BOLD}${DGN}APT Cacher: ${BGN}$APT_CACHER_IP${CL}" echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}$VERBOSE${CL}" echo -e "${CREATING}${BOLD}${RD}Creating a ${APP} LXC using the above advanced settings${CL}" + + # Log settings to file + log_section "CONTAINER SETTINGS (ADVANCED) - ${APP}" + log_msg "Application: ${APP}" + log_msg "PVE Version: ${PVEVERSION} (Kernel: ${KERNEL_VERSION})" + log_msg "Operating System: $var_os ($var_version)" + log_msg "Container Type: $([ "$CT_TYPE" == "1" ] && echo "Unprivileged" || echo "Privileged")" + log_msg "Container ID: $CT_ID" + log_msg "Hostname: $HN" + log_msg "Disk Size: ${DISK_SIZE} GB" + log_msg "CPU Cores: $CORE_COUNT" + log_msg "RAM Size: ${RAM_SIZE} MiB" + log_msg "Bridge: $BRG" + log_msg "IPv4: $NET" + log_msg "IPv6: $IPV6_METHOD" + log_msg "FUSE Support: ${ENABLE_FUSE:-no}" + log_msg "Nesting: $([ "${ENABLE_NESTING:-1}" == "1" ] && echo "Enabled" || echo "Disabled")" + log_msg "GPU Passthrough: ${ENABLE_GPU:-no}" + log_msg "Verbose Mode: $VERBOSE" + log_msg "Session ID: ${SESSION_ID}" } # ============================================================================== @@ -2914,6 +2934,20 @@ echo_default() { fi echo -e "${CREATING}${BOLD}${BL}Creating a ${APP} LXC using the above default settings${CL}" echo -e " " + + # Log settings to file + log_section "CONTAINER SETTINGS - ${APP}" + log_msg "Application: ${APP}" + log_msg "PVE Version: ${PVEVERSION} (Kernel: ${KERNEL_VERSION})" + log_msg "Container ID: ${CT_ID}" + log_msg "Operating System: $var_os ($var_version)" + log_msg "Container Type: $CT_TYPE_DESC" + log_msg "Disk Size: ${DISK_SIZE} GB" + log_msg "CPU Cores: ${CORE_COUNT}" + log_msg "RAM Size: ${RAM_SIZE} MiB" + [[ -n "${var_gpu:-}" && "${var_gpu}" == "yes" ]] && log_msg "GPU Passthrough: Enabled" + [[ "$VERBOSE" == "yes" ]] && log_msg "Verbose Mode: Enabled" + log_msg "Session ID: ${SESSION_ID}" } # ------------------------------------------------------------------------------ @@ -4023,20 +4057,46 @@ EOF' local install_log_copied=false if [[ -n "$CTID" && -n "${SESSION_ID:-}" ]]; then - # Copy BUILD_LOG (creation log) if it exists + # Create combined log with header + { + echo "================================================================================" + echo "COMBINED INSTALLATION LOG - ${APP:-LXC}" + echo "Container ID: ${CTID}" + echo "Session ID: ${SESSION_ID}" + echo "Timestamp: $(date '+%Y-%m-%d %H:%M:%S')" + echo "================================================================================" + echo "" + } >"$combined_log" + + # Append BUILD_LOG (host-side creation log) if it exists if [[ -f "${BUILD_LOG}" ]]; then - cp "${BUILD_LOG}" "/tmp/create-lxc-${CTID}-${SESSION_ID}.log" 2>/dev/null && build_log_copied=true + { + echo "================================================================================" + echo "PHASE 1: CONTAINER CREATION (Host)" + echo "================================================================================" + cat "${BUILD_LOG}" + echo "" + } >>"$combined_log" + build_log_copied=true fi - # Copy INSTALL_LOG from container - if pct pull "$CTID" "/root/.install-${SESSION_ID}.log" "/tmp/install-lxc-${CTID}-${SESSION_ID}.log" 2>/dev/null; then + # Copy and append INSTALL_LOG from container + local temp_install_log="/tmp/.install-temp-${SESSION_ID}.log" + if pct pull "$CTID" "/root/.install-${SESSION_ID}.log" "$temp_install_log" 2>/dev/null; then + { + echo "================================================================================" + echo "PHASE 2: APPLICATION INSTALLATION (Container)" + echo "================================================================================" + cat "$temp_install_log" + echo "" + } >>"$combined_log" + rm -f "$temp_install_log" install_log_copied=true fi - # Show available logs + # Show combined log echo "" - [[ "$build_log_copied" == true ]] && echo -e "${GN}βœ”${CL} Container creation log: ${BL}/tmp/create-lxc-${CTID}-${SESSION_ID}.log${CL}" - [[ "$install_log_copied" == true ]] && echo -e "${GN}βœ”${CL} Installation log: ${BL}/tmp/install-lxc-${CTID}-${SESSION_ID}.log${CL}" + echo -e "${GN}βœ”${CL} Installation log: ${BL}${combined_log}${CL}" fi # Dev mode: Keep container or open breakpoint shell diff --git a/misc/core.func b/misc/core.func index e14ba3c22..e4c7efcc6 100644 --- a/misc/core.func +++ b/misc/core.func @@ -413,6 +413,69 @@ get_active_logfile() { # Legacy compatibility: SILENT_LOGFILE points to active log SILENT_LOGFILE="$(get_active_logfile)" +# ------------------------------------------------------------------------------ +# strip_ansi() +# +# - Removes ANSI escape sequences from input text +# - Used to clean colored output for log files +# - Handles both piped input and arguments +# ------------------------------------------------------------------------------ +strip_ansi() { + if [[ $# -gt 0 ]]; then + echo -e "$*" | sed 's/\x1b\[[0-9;]*m//g; s/\x1b\[[0-9;]*[a-zA-Z]//g' + else + sed 's/\x1b\[[0-9;]*m//g; s/\x1b\[[0-9;]*[a-zA-Z]//g' + fi +} + +# ------------------------------------------------------------------------------ +# log_msg() +# +# - Writes message to active log file without ANSI codes +# - Adds timestamp prefix for log correlation +# - Creates log file if it doesn't exist +# - Arguments: message text (can include ANSI codes, will be stripped) +# ------------------------------------------------------------------------------ +log_msg() { + local msg="$*" + local logfile + logfile="$(get_active_logfile)" + + [[ -z "$msg" ]] && return + [[ -z "$logfile" ]] && return + + # Ensure log directory exists + mkdir -p "$(dirname "$logfile")" 2>/dev/null || true + + # Strip ANSI codes and write with timestamp + local clean_msg + clean_msg=$(strip_ansi "$msg") + echo "[$(date '+%Y-%m-%d %H:%M:%S')] $clean_msg" >>"$logfile" +} + +# ------------------------------------------------------------------------------ +# log_section() +# +# - Writes a section header to the log file +# - Used for separating different phases of installation +# - Arguments: section name +# ------------------------------------------------------------------------------ +log_section() { + local section="$1" + local logfile + logfile="$(get_active_logfile)" + + [[ -z "$logfile" ]] && return + mkdir -p "$(dirname "$logfile")" 2>/dev/null || true + + { + echo "" + echo "================================================================================" + echo "[$(date '+%Y-%m-%d %H:%M:%S')] $section" + echo "================================================================================" + } >>"$logfile" +} + # ------------------------------------------------------------------------------ # silent() # @@ -555,6 +618,9 @@ msg_info() { [[ -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return MSG_INFO_SHOWN["$msg"]=1 + # Log to file + log_msg "[INFO] $msg" + stop_spinner SPINNER_MSG="$msg" @@ -598,6 +664,7 @@ msg_ok() { stop_spinner clear_line echo -e "$CM ${GN}${msg}${CL}" + log_msg "[OK] $msg" local sanitized_msg sanitized_msg=$(printf '%s' "$msg" | sed 's/\x1b\[[0-9;]*m//g; s/[^a-zA-Z0-9_]/_/g') unset 'MSG_INFO_SHOWN['"$sanitized_msg"']' 2>/dev/null || true @@ -615,6 +682,7 @@ msg_error() { stop_spinner local msg="$1" echo -e "${BFR:-}${CROSS:-βœ–οΈ} ${RD}${msg}${CL}" >&2 + log_msg "[ERROR] $msg" } # ------------------------------------------------------------------------------ @@ -629,6 +697,7 @@ msg_warn() { stop_spinner local msg="$1" echo -e "${BFR:-}${INFO:-ℹ️} ${YWB}${msg}${CL}" >&2 + log_msg "[WARN] $msg" } # ------------------------------------------------------------------------------ @@ -646,6 +715,7 @@ msg_custom() { [[ -z "$msg" ]] && return stop_spinner echo -e "${BFR:-} ${symbol} ${color}${msg}${CL:-\e[0m}" + log_msg "$msg" } # ------------------------------------------------------------------------------ @@ -833,29 +903,29 @@ is_verbose_mode() { is_unattended() { # Primary: Check MODE variable (case-insensitive) local mode="${MODE:-${mode:-}}" - mode="${mode,,}" # lowercase + mode="${mode,,}" # lowercase case "$mode" in - default|1) + default | 1) + return 0 + ;; + mydefaults | userdefaults | 3) + return 0 + ;; + appdefaults | 4) + return 0 + ;; + advanced | 2) + # Advanced mode is interactive ONLY during wizard + # Inside container (install scripts), it should be unattended + # Check if we're inside a container (no pveversion command) + if ! command -v pveversion &>/dev/null; then + # We're inside the container - all values already collected return 0 - ;; - mydefaults|userdefaults|3) - return 0 - ;; - appdefaults|4) - return 0 - ;; - advanced|2) - # Advanced mode is interactive ONLY during wizard - # Inside container (install scripts), it should be unattended - # Check if we're inside a container (no pveversion command) - if ! command -v pveversion &>/dev/null; then - # We're inside the container - all values already collected - return 0 - fi - # On host during wizard - interactive - return 1 - ;; + fi + # On host during wizard - interactive + return 1 + ;; esac # Legacy fallbacks for compatibility @@ -977,29 +1047,29 @@ prompt_confirm() { # User provided input response="${response,,}" # lowercase case "$response" in - y|yes) + y | yes) + return 0 + ;; + n | no) + return 1 + ;; + "") + # Empty response, use default + if [[ "$default" == "y" ]]; then return 0 - ;; - n|no) + else return 1 - ;; - "") - # Empty response, use default - if [[ "$default" == "y" ]]; then - return 0 - else - return 1 - fi - ;; - *) - # Invalid input, use default - echo -e "${YW}Invalid response, using default: ${default}${CL}" - if [[ "$default" == "y" ]]; then - return 0 - else - return 1 - fi - ;; + fi + ;; + *) + # Invalid input, use default + echo -e "${YW}Invalid response, using default: ${default}${CL}" + if [[ "$default" == "y" ]]; then + return 0 + else + return 1 + fi + ;; esac else # Timeout occurred From 7bb9a09c6cd6a9e265ed5254cf4cc7d47f9b19ef Mon Sep 17 00:00:00 2001 From: Joerg Heinemann Date: Tue, 10 Feb 2026 14:12:36 +0100 Subject: [PATCH 62/87] Update logo URL in ebusd.json Official ebusd icon available at https://selfh.st/icons/ --- frontend/public/json/ebusd.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/public/json/ebusd.json b/frontend/public/json/ebusd.json index 862d24153..30743fd04 100644 --- a/frontend/public/json/ebusd.json +++ b/frontend/public/json/ebusd.json @@ -11,7 +11,7 @@ "interface_port": null, "documentation": "https://github.com/john30/ebusd/wiki", "website": "https://github.com/john30/ebusd", - "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/proxmox-helper-scripts.webp", + "logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/ebusd.webp", "config_path": "/etc/default/ebusd", "description": "ebusd is a daemon for handling communication with eBUS devices connected to a 2-wire `energy bus` used by numerous heating systems.", "install_methods": [ From c60ff2ae75ab717ea4ea43a96374f9ffae8fabea Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:18:42 +0100 Subject: [PATCH 63/87] Update build.func --- misc/build.func | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/misc/build.func b/misc/build.func index 2e20142e7..2907308f3 100644 --- a/misc/build.func +++ b/misc/build.func @@ -38,15 +38,16 @@ # - Captures app-declared resource defaults (CPU, RAM, Disk) # ------------------------------------------------------------------------------ variables() { - NSAPP=$(echo "${APP,,}" | tr -d ' ') # This function sets the NSAPP variable by converting the value of the APP variable to lowercase and removing any spaces. - var_install="${NSAPP}-install" # sets the var_install variable by appending "-install" to the value of NSAPP. - INTEGER='^[0-9]+([.][0-9]+)?$' # it defines the INTEGER regular expression pattern. - PVEHOST_NAME=$(hostname) # gets the Proxmox Hostname and sets it to Uppercase - DIAGNOSTICS="yes" # sets the DIAGNOSTICS variable to "yes", used for the API call. - METHOD="default" # sets the METHOD variable to "default", used for the API call. - RANDOM_UUID="$(cat /proc/sys/kernel/random/uuid)" # generates a random UUID and sets it to the RANDOM_UUID variable. - SESSION_ID="${RANDOM_UUID:0:8}" # Short session ID (first 8 chars of UUID) for log files - BUILD_LOG="/tmp/create-lxc-${SESSION_ID}.log" # Host-side container creation log + NSAPP=$(echo "${APP,,}" | tr -d ' ') # This function sets the NSAPP variable by converting the value of the APP variable to lowercase and removing any spaces. + var_install="${NSAPP}-install" # sets the var_install variable by appending "-install" to the value of NSAPP. + INTEGER='^[0-9]+([.][0-9]+)?$' # it defines the INTEGER regular expression pattern. + PVEHOST_NAME=$(hostname) # gets the Proxmox Hostname and sets it to Uppercase + DIAGNOSTICS="yes" # sets the DIAGNOSTICS variable to "yes", used for the API call. + METHOD="default" # sets the METHOD variable to "default", used for the API call. + RANDOM_UUID="$(cat /proc/sys/kernel/random/uuid)" # generates a random UUID and sets it to the RANDOM_UUID variable. + SESSION_ID="${RANDOM_UUID:0:8}" # Short session ID (first 8 chars of UUID) for log files + BUILD_LOG="/tmp/create-lxc-${SESSION_ID}.log" # Host-side container creation log + combined_log="/tmp/install-${SESSION_ID}-combined.log" # Combined log (build + install) for failed installations CTTYPE="${CTTYPE:-${CT_TYPE:-1}}" # Parse dev_mode early @@ -56,6 +57,7 @@ variables() { if [[ "${DEV_MODE_LOGS:-false}" == "true" ]]; then mkdir -p /var/log/community-scripts BUILD_LOG="/var/log/community-scripts/create-lxc-${SESSION_ID}-$(date +%Y%m%d_%H%M%S).log" + combined_log="/var/log/community-scripts/install-${SESSION_ID}-combined-$(date +%Y%m%d_%H%M%S).log" fi # Get Proxmox VE version and kernel version @@ -1882,7 +1884,7 @@ advanced_settings() { fi ;; - # ══════════════��════════════════════════════════════════════════════════════ + # ═══════════════════════════════════════════════════════════════════════════ # STEP 3: Container ID # ═══════════════════════════════════════════════════════════════════════════ 3) @@ -2882,6 +2884,7 @@ dev_mode_menu() { if [[ "${DEV_MODE_LOGS:-false}" == "true" ]]; then mkdir -p /var/log/community-scripts BUILD_LOG="/var/log/community-scripts/create-lxc-${SESSION_ID}-$(date +%Y%m%d_%H%M%S).log" + combined_log="/var/log/community-scripts/install-${SESSION_ID}-combined-$(date +%Y%m%d_%H%M%S).log" fi } From 82de09f19fcfc81d735e95f6c023b81de573df51 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:29:28 +0100 Subject: [PATCH 64/87] fix: remove debug output, add duration tracking, cleanup duplicate log messages --- misc/api.func | 44 ++++++++++++++--------------------------- misc/build.func | 3 +++ misc/error_handler.func | 7 +------ 3 files changed, 19 insertions(+), 35 deletions(-) diff --git a/misc/api.func b/misc/api.func index 87d8891da..b7b67261c 100644 --- a/misc/api.func +++ b/misc/api.func @@ -323,30 +323,15 @@ EOF # - Never blocks or fails script execution # ------------------------------------------------------------------------------ post_update_to_api() { - # DEBUG: Show function entry - echo "[DEBUG] post_update_to_api() called with status=$1 exit_code=$2" >&2 - # Silent fail - telemetry should never break scripts - command -v curl &>/dev/null || { - echo "[DEBUG] curl not found, skipping" >&2 - return 0 - } + command -v curl &>/dev/null || return 0 # Prevent duplicate submissions POST_UPDATE_DONE=${POST_UPDATE_DONE:-false} - [[ "$POST_UPDATE_DONE" == "true" ]] && { - echo "[DEBUG] Already sent update, skipping" >&2 - return 0 - } + [[ "$POST_UPDATE_DONE" == "true" ]] && return 0 - [[ "${DIAGNOSTICS:-no}" == "no" ]] && { - echo "[DEBUG] DIAGNOSTICS=no, skipping" >&2 - return 0 - } - [[ -z "${RANDOM_UUID:-}" ]] && { - echo "[DEBUG] RANDOM_UUID empty, skipping" >&2 - return 0 - } + [[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0 + [[ -z "${RANDOM_UUID:-}" ]] && return 0 local status="${1:-failed}" local raw_exit_code="${2:-1}" @@ -378,7 +363,13 @@ post_update_to_api() { [[ -z "$error" ]] && error="Unknown error" fi - # Update payload: only fields that change (status, error, exit_code) + # Calculate duration if timer was started + local duration=0 + if [[ -n "${INSTALL_START_TIME:-}" ]]; then + duration=$(( $(date +%s) - INSTALL_START_TIME )) + fi + + # Update payload: only fields that change (status, error, exit_code, duration) # The Go service will find the record by random_id and PATCH only these fields local JSON_PAYLOAD JSON_PAYLOAD=$( @@ -389,21 +380,16 @@ post_update_to_api() { "nsapp": "${NSAPP:-unknown}", "status": "${pb_status}", "exit_code": ${exit_code}, - "error": "${error}" + "error": "${error}", + "install_duration": ${duration} } EOF ) - echo "[DEBUG] Sending update to: $TELEMETRY_URL" >&2 - echo "[DEBUG] Update payload: $JSON_PAYLOAD" >&2 - # Fire-and-forget: never block, never fail - local http_code - http_code=$(curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ + curl -sS -w "%{http_code}" -m "${TELEMETRY_TIMEOUT}" -X POST "${TELEMETRY_URL}" \ -H "Content-Type: application/json" \ - -d "$JSON_PAYLOAD" -o /dev/stderr 2>&1) || true - - echo "[DEBUG] HTTP response code: $http_code" >&2 + -d "$JSON_PAYLOAD" -o /dev/null 2>&1 || true POST_UPDATE_DONE=true } diff --git a/misc/build.func b/misc/build.func index 2907308f3..a98cf2693 100644 --- a/misc/build.func +++ b/misc/build.func @@ -4023,6 +4023,9 @@ EOF' install_ssh_keys_into_ct # Run application installer + # Start timer for duration tracking + start_install_timer + # Disable error trap - container errors are handled internally via flag file set +Eeuo pipefail # Disable ALL error handling temporarily trap - ERR # Remove ERR trap completely diff --git a/misc/error_handler.func b/misc/error_handler.func index 7fec71817..87c2b4883 100644 --- a/misc/error_handler.func +++ b/misc/error_handler.func @@ -187,12 +187,7 @@ error_handler() { # Create error flag file with exit code for host detection echo "$exit_code" >"/root/.install-${SESSION_ID:-error}.failed" 2>/dev/null || true - - if declare -f msg_custom >/dev/null 2>&1; then - msg_custom "πŸ“‹" "${YW}" "Log saved to: ${container_log}" - else - echo -e "${YW}Log saved to:${CL} ${BL}${container_log}${CL}" - fi + # Log path is shown by host as combined log - no need to show container path else # HOST CONTEXT: Show local log path and offer container cleanup if declare -f msg_custom >/dev/null 2>&1; then From 754b3d4b6b097f8ae9757d82f297c115b8fba255 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:42:13 +0100 Subject: [PATCH 65/87] fix: add Debian codename matching for binary mode in fetch_and_deploy_gh_release --- misc/tools.func | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/misc/tools.func b/misc/tools.func index 82895bce0..40ad30a2b 100644 --- a/misc/tools.func +++ b/misc/tools.func @@ -1974,6 +1974,12 @@ function fetch_and_deploy_gh_release() { [[ "$arch" == "x86_64" ]] && arch="amd64" [[ "$arch" == "aarch64" ]] && arch="arm64" + # Get Debian codename for distro-specific packages + local codename="" + if [[ -f /etc/os-release ]]; then + codename=$(grep -oP '(?<=VERSION_CODENAME=).*' /etc/os-release 2>/dev/null || true) + fi + local assets url_match="" assets=$(echo "$json" | jq -r '.assets[].browser_download_url') @@ -1989,7 +1995,17 @@ function fetch_and_deploy_gh_release() { done fi - # If no match via explicit pattern, fall back to architecture heuristic + # If no match via explicit pattern, try architecture + codename match + if [[ -z "$url_match" && -n "$codename" ]]; then + for u in $assets; do + if [[ "$u" =~ $arch.*$codename.*\.deb$ ]] || [[ "$u" =~ $arch.*-$codename\.deb$ ]] || [[ "$u" =~ ${arch}-${codename}\.deb$ ]] || [[ "$u" =~ ${arch}_${codename}\.deb$ ]]; then + url_match="$u" + break + fi + done + fi + + # Fallback: architecture heuristic without codename if [[ -z "$url_match" ]]; then for u in $assets; do if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then From bc8a96b3fd2042939ae7d5066f7ca61e639ec5d4 Mon Sep 17 00:00:00 2001 From: vhsdream Date: Tue, 10 Feb 2026 09:13:22 -0500 Subject: [PATCH 66/87] nextExplorer: Update build for v2.2.0 --- ct/nextexplorer.sh | 10 +++++----- install/nextexplorer-install.sh | 21 ++++++++++----------- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/ct/nextexplorer.sh b/ct/nextexplorer.sh index 201525c8e..c3b7777a5 100644 --- a/ct/nextexplorer.sh +++ b/ct/nextexplorer.sh @@ -41,14 +41,14 @@ function update_script() { msg_info "Updating nextExplorer" APP_DIR="/opt/nextExplorer/app" mkdir -p "$APP_DIR" - cd /opt/nextExplorer/backend + cd /opt/nextExplorer export NODE_ENV=production - $STD npm ci - cd /opt/nextExplorer/frontend + $STD npm ci --workspace backend + cd /opt/nextExplorer unset NODE_ENV export NODE_ENV=development - $STD npm ci - $STD npm run build -- --sourcemap false + $STD npm ci --workspace frontend + $STD npm run -w frontend build -- --sourcemap false unset NODE_ENV cd /opt/nextExplorer/ mv backend/{node_modules,src,package.json} "$APP_DIR" diff --git a/install/nextexplorer-install.sh b/install/nextexplorer-install.sh index 11b8559af..eeff9f311 100644 --- a/install/nextexplorer-install.sh +++ b/install/nextexplorer-install.sh @@ -33,18 +33,16 @@ APP_DIR="/opt/nextExplorer/app" LOCAL_IP="$(hostname -I | awk '{print $1}')" mkdir -p "$APP_DIR" mkdir -p /etc/nextExplorer -cd /opt/nextExplorer/backend -export NODE_ENV=production -$STD npm ci -unset NODE_ENV - -cd /opt/nextExplorer/frontend -export NODE_ENV=development -$STD npm ci -$STD npm run build -- --sourcemap false -unset NODE_ENV - cd /opt/nextExplorer +export NODE_ENV=production +$STD npm ci --workspace backend +unset NODE_ENV + +export NODE_ENV=development +$STD npm ci --workspace frontend +$STD npm run -w frontend build -- --sourcemap false +unset NODE_ENV + mv backend/{node_modules,src,package.json} "$APP_DIR" mv frontend/dist/ "$APP_DIR"/src/public msg_ok "Built nextExplorer" @@ -84,6 +82,7 @@ SESSION_SECRET="${SECRET}" # OIDC_CLIENT_ID= # OIDC_CLIENT_SECRET= # OIDC_CALLBACK_URL= +# OIDC_LOGOUT_URL= # OIDC_SCOPES= # OIDC_AUTO_CREATE_USERS=true From f4ccccfb32917e15bd7dd4ac2f2145ccaa67ba7e Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 16:16:35 +0100 Subject: [PATCH 67/87] Update api.func --- misc/api.func | 102 +++++++++++++++++++++++++++++--------------------- 1 file changed, 60 insertions(+), 42 deletions(-) diff --git a/misc/api.func b/misc/api.func index b7b67261c..f4104f833 100644 --- a/misc/api.func +++ b/misc/api.func @@ -168,6 +168,43 @@ explain_exit_code() { # SECTION 2: TELEMETRY FUNCTIONS # ============================================================================== +# ------------------------------------------------------------------------------ +# detect_gpu() +# +# - Detects GPU vendor and passthrough type +# - Sets GPU_VENDOR and GPU_PASSTHROUGH globals +# - Used for GPU analytics +# ------------------------------------------------------------------------------ +detect_gpu() { + GPU_VENDOR="" + GPU_PASSTHROUGH="none" + + # Detect Intel GPU + if lspci 2>/dev/null | grep -qi "VGA.*Intel"; then + GPU_VENDOR="intel" + GPU_PASSTHROUGH="igpu" + fi + + # Detect AMD GPU + if lspci 2>/dev/null | grep -qi "VGA.*AMD\|VGA.*ATI"; then + GPU_VENDOR="amd" + # Check if discrete + if lspci 2>/dev/null | grep -qi "AMD.*Radeon"; then + GPU_PASSTHROUGH="dgpu" + else + GPU_PASSTHROUGH="igpu" + fi + fi + + # Detect NVIDIA GPU + if lspci 2>/dev/null | grep -qi "VGA.*NVIDIA\|3D.*NVIDIA"; then + GPU_VENDOR="nvidia" + GPU_PASSTHROUGH="dgpu" + fi + + export GPU_VENDOR GPU_PASSTHROUGH +} + # ------------------------------------------------------------------------------ # post_to_api() # @@ -215,6 +252,13 @@ post_to_api() { pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true fi + # Detect GPU if not already set + if [[ -z "${GPU_VENDOR:-}" ]]; then + detect_gpu + fi + local gpu_vendor="${GPU_VENDOR:-}" + local gpu_passthrough="${GPU_PASSTHROUGH:-none}" + local JSON_PAYLOAD JSON_PAYLOAD=$( cat </dev/null | grep -qi "VGA.*Intel"; then - GPU_VENDOR="intel" - GPU_PASSTHROUGH="igpu" - fi - - # Detect AMD GPU - if lspci 2>/dev/null | grep -qi "VGA.*AMD\|VGA.*ATI"; then - GPU_VENDOR="amd" - # Check if discrete - if lspci 2>/dev/null | grep -qi "AMD.*Radeon"; then - GPU_PASSTHROUGH="dgpu" - else - GPU_PASSTHROUGH="igpu" - fi - fi - - # Detect NVIDIA GPU - if lspci 2>/dev/null | grep -qi "VGA.*NVIDIA\|3D.*NVIDIA"; then - GPU_VENDOR="nvidia" - GPU_PASSTHROUGH="dgpu" - fi - - export GPU_VENDOR GPU_PASSTHROUGH -} - # ------------------------------------------------------------------------------ # post_tool_to_api() # From 1dcd83abea38f7721d0415e656c3f5bd35acd8b8 Mon Sep 17 00:00:00 2001 From: "CanbiZ (MickLesk)" <47820557+MickLesk@users.noreply.github.com> Date: Tue, 10 Feb 2026 16:20:26 +0100 Subject: [PATCH 68/87] Add sortable dashboard; extend telemetry data MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Dashboard: add sortable table UI and client-side sorting support β€” CSS for sortable headers, data-sort attributes, default sort on Created (desc), timestamp formatting, header click handling, and inclusion of sort param in paginated fetches. Records now show a formatted Created column with full timestamp in the title. Initialize sortable headers on load. Telemetry/client: switch to sending a full JSON payload (allows create if initial PATCH failed) and include extra fields (ct_type, disk_size, core_count, ram_size, os_type, os_version, pve_version, method). pve_version is detected when available. Server: extend FetchRecordsPaginated to accept a sort field, validate allowed sort fields to prevent injection, use the sort when building the PB API request (default -created), and propagate the sort query param from the HTTP handler to the fetch call. Overall this enables server-side sorted pagination from the dashboard and richer telemetry records. --- misc/api.func | 18 ++++++++- misc/data/dashboard.go | 90 ++++++++++++++++++++++++++++++++++++++---- misc/data/service.go | 27 +++++++++++-- 3 files changed, 121 insertions(+), 14 deletions(-) diff --git a/misc/api.func b/misc/api.func index f4104f833..12b63c5ad 100644 --- a/misc/api.func +++ b/misc/api.func @@ -421,8 +421,14 @@ post_update_to_api() { duration=$(($(date +%s) - INSTALL_START_TIME)) fi - # Update payload: only fields that change (status, error, exit_code, duration, gpu) - # The Go service will find the record by random_id and PATCH only these fields + # Get PVE version + local pve_version="" + if command -v pveversion &>/dev/null; then + pve_version=$(pveversion 2>/dev/null | awk -F'[/ ]' '{print $2}') || true + fi + + # Full payload including all fields - allows record creation if initial call failed + # The Go service will find the record by random_id and PATCH, or create if not found local JSON_PAYLOAD JSON_PAYLOAD=$( cat <All OS
- +
- - - - - + + + + + - + + - +
AppStatusOSTypeMethodAppStatusOSTypeMethod ResourcesExit CodeExit Code ErrorCreated β–Ό
Loading...
Loading...
+ + + + + +