Compare commits

..

1 Commits

Author SHA1 Message Date
530c6a463b Change all links to gite 2025-05-14 10:17:14 +02:00
477 changed files with 17855 additions and 106633 deletions

View File

@ -5,10 +5,10 @@
1) [adguard.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/adguard.sh): This script collects system parameters. (Also holds the function to update the application.)
2) [build.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/build.func): Adds user settings and integrates collected information.
3) [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/create_lxc.sh): Constructs the LXC container.
3) [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/create_lxc.sh): Constructs the LXC container.
4) [adguard-install.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/install/adguard-install.sh): Executes functions from [install.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/install.func), and installs the application.
5) [adguard.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/adguard.sh) (again): To display the completion message.
The installation process uses reusable scripts: [build.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/build.func), [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/create_lxc.sh), and [install.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/install.func), which are not specific to any particular application.
The installation process uses reusable scripts: [build.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/build.func), [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/create_lxc.sh), and [install.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/install.func), which are not specific to any particular application.
To gain a better understanding, focus on reviewing [adguard-install.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/install/adguard-install.sh). This script contains the commands and configurations for installing and configuring AdGuard Home within the LXC container.

View File

@ -1,36 +1,25 @@
---
name: 🛠️ New Script
description: New Script proposal.
labels: [task]
name: "🛠️ New Script"
description: "New Script proposal."
labels: ["task"]
body:
- type: markdown
attributes:
value: |
# 🛠️ **New Script**
Create an Issue when you want to merge a new Script. The name of the Issue must be the same as your APP.sh file. (Example: SnipeIT, snipeit.sh; Alpine-Docker, alpine-docker.sh)
Create a Issue when you want to merge a new Script. The name of the Issue must be the same as your APP.sh file. (Example: SnipeIT, snipeit.sh; Alpine-Docker, alpine-docker.sh)
- type: input
id: task_summary
attributes:
label: Name of the Script
placeholder: e.g., SnipeIT
validations:
required: true
- type: dropdown
id: script_type
attributes:
label: Script Type
description: What type of script is this?
options:
- CT (LXC Container)
- VM (Virtual Machine)
- Addon (tools/addon)
- PVE Tool (tools/pve)
label: "Name of the Script"
placeholder: "e.g., SnipeIT"
validations:
required: true
- type: textarea
id: task_details
attributes:
label: 📋 Script Details
placeholder: Explain what is needed or special about this script
label: "📋 Scritpt Details"
placeholder: "Explain what is needed or special about this script"
validations:
required: true

View File

@ -1,125 +1,90 @@
{
"new script": [
{
"fileStatus": "added",
"includeGlobs": [
"ct/**",
"install/**",
"misc/**",
"turnkey/**",
"vm/**"
],
"excludeGlobs": []
}
],
"update script": [
{
"fileStatus": "modified",
"includeGlobs": [
"ct/**",
"install/**",
"misc/**",
"turnkey/**",
"vm/**"
],
"excludeGlobs": [
"misc/build.func",
"misc/install.func",
"misc/api.func"
]
}
],
"delete script": [
{
"fileStatus": "removed",
"includeGlobs": [
"ct/**",
"install/**",
"misc/**",
"turnkey/**",
"vm/**"
],
"excludeGlobs": []
}
],
"maintenance": [
{
"fileStatus": null,
"includeGlobs": [
"*.md",
".github/**",
"misc/*.func",
"misc/create_lxc.sh",
"api/**"
],
"excludeGlobs": []
}
],
"core": [
{
"fileStatus": null,
"includeGlobs": [
"misc/*.func",
"misc/create_lxc.sh"
],
"excludeGlobs": []
}
],
"website": [
{
"fileStatus": null,
"includeGlobs": [
"frontend/**"
],
"excludeGlobs": []
}
],
"api": [
{
"fileStatus": null,
"includeGlobs": [
"api/**",
"misc/api.func"
],
"excludeGlobs": []
}
],
"github": [
{
"fileStatus": null,
"includeGlobs": [
".github/**"
],
"excludeGlobs": []
}
],
"json": [
{
"fileStatus": "modified",
"includeGlobs": [
"frontend/public/json/**"
],
"excludeGlobs": []
}
],
"high risk": [
{
"fileStatus": null,
"includeGlobs": [
"misc/build.func",
"misc/install.func",
"misc/create_lxc.sh"
],
"excludeGlobs": []
}
],
"documentation": [
{
"fileStatus": null,
"includeGlobs": [
"*.md"
],
"excludeGlobs": []
}
]
"new script": [
{
"fileStatus": "added",
"includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
"excludeGlobs": []
}
],
"update script": [
{
"fileStatus": "modified",
"includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
"excludeGlobs": ["misc/build.func", "misc/install.func", "misc/api.func"]
}
],
"delete script": [
{
"fileStatus": "removed",
"includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
"excludeGlobs": []
}
],
"maintenance": [
{
"fileStatus": null,
"includeGlobs": [
"*.md",
".github/**",
"misc/*.func",
"ct/create_lxc.sh",
"api/**"
],
"excludeGlobs": []
}
],
"core": [
{
"fileStatus": null,
"includeGlobs": ["misc/*.func", "ct/create_lxc.sh"],
"excludeGlobs": []
}
],
"website": [
{
"fileStatus": null,
"includeGlobs": ["frontend/**"],
"excludeGlobs": []
}
],
"api": [
{
"fileStatus": null,
"includeGlobs": ["api/**", "misc/api.func"],
"excludeGlobs": []
}
],
"github": [
{
"fileStatus": null,
"includeGlobs": [".github/**"],
"excludeGlobs": []
}
],
"json": [
{
"fileStatus": "modified",
"includeGlobs": ["frontend/publuc/json/**"],
"excludeGlobs": []
}
],
"high risk": [
{
"fileStatus": null,
"includeGlobs": [
"misc/build.func",
"misc/install.func",
"ct/create_lxc.sh"
],
"excludeGlobs": []
}
],
"documentation": [
{
"fileStatus": null,
"includeGlobs": ["*.md"],
"excludeGlobs": []
}
]
}

View File

@ -1,4 +1,7 @@
## **Scripts wich are clearly AI generated and not further revied by the Author of this PR (in terms of Coding Standards and Script Layout) may be closed without review.**
🛑 **New scripts must first be submitted to [ProxmoxVED](https://github.com/community-scripts/ProxmoxVED) for testing.**
PRs for new scripts that skip this process will be closed.
---
## ✍️ Description
<!-- Briefly describe your changes. -->

View File

@ -20,22 +20,10 @@ jobs:
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v2
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
owner: community-scripts
repositories: ProxmoxVED
- name: Generate a token for PR approval and merge
id: generate-token-merge
uses: actions/create-github-app-token@v2
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
owner: community-scripts
repositories: ProxmoxVED
# Step 1: Checkout repository
- name: Checkout repository
@ -103,17 +91,14 @@ jobs:
gh pr review $PR_NUMBER --approve
fi
- name: Approve pull request and merge
- name: Re-approve pull request after update
if: env.changed == 'true'
env:
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git config --global user.name "github-actions-automege[bot]"
git config --global user.email "github-actions-automege[bot]@users.noreply.github.com"
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[0].number')
PR_NUMBER=$(gh pr list --head "pr-update-app-files" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review "$PR_NUMBER" --approve
gh pr merge "$PR_NUMBER" --squash --admin
gh pr review $PR_NUMBER --approve
fi
# Step 8: Output success message when no changes

View File

@ -0,0 +1,60 @@
name: Update date_created in JSON files
on:
# Dieser Trigger wird für das Öffnen von PRs sowie für das Aktualisieren von offenen PRs verwendet
pull_request:
types: [opened, synchronize]
schedule:
# Dieser Trigger wird 4x am Tag ausgelöst, um sicherzustellen, dass das Datum aktualisiert wird
- cron: "0 0,6,12,18 * * *" # Führt alle 6 Stunden aus
workflow_dispatch: # Manuelle Ausführung des Workflows möglich
jobs:
update-date:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install yq
run: |
sudo apt-get update
sudo apt-get install -y yq
- name: Set the current date
id: set_date
run: echo "TODAY=$(date -u +%Y-%m-%d)" >> $GITHUB_ENV
- name: Check for changes in PR
run: |
# Hole den PR-Branch
PR_BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
git fetch origin $PR_BRANCH
# Liste alle JSON-Dateien im PR auf, die geändert wurden
CHANGED_JSON_FILES=$(git diff --name-only origin/main...$PR_BRANCH | grep '.json')
if [ -z "$CHANGED_JSON_FILES" ]; then
echo "No JSON files changed in this PR."
exit 0
fi
# Gehe alle geänderten JSON-Dateien durch und aktualisiere das Datum
for file in $CHANGED_JSON_FILES; do
echo "Updating date_created in $file"
# Setze das aktuelle Datum
yq eval ".date_created = \"${{ env.TODAY }}\"" -i "$file"
git add "$file"
done
- name: Commit and push changes
run: |
# Prüfe, ob es Änderungen gibt und committe sie
git config user.name "json-updater-bot"
git config user.email "github-actions[bot]@users.noreply.github.com"
git commit -m "Update date_created to ${{ env.TODAY }}" || echo "No changes to commit"
# Push zurück in den PR-Branch
git push origin $PR_BRANCH

60
.github/workflows/backup/shellcheck.yml vendored Normal file
View File

@ -0,0 +1,60 @@
name: Shellcheck
on:
push:
branches:
- main
pull_request:
workflow_dispatch:
schedule:
- cron: "5 1 * * *"
jobs:
shellcheck:
name: Shellcheck
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v45
with:
files: |
**.sh
- name: Download ShellCheck
shell: bash
env:
INPUT_VERSION: "v0.10.0"
run: |
set -euo pipefail
if [[ "${{ runner.os }}" == "macOS" ]]; then
osvariant="darwin"
else
osvariant="linux"
fi
baseurl="https://github.com/koalaman/shellcheck/releases/download"
curl -Lso "${{ github.workspace }}/sc.tar.xz" \
"${baseurl}/${INPUT_VERSION}/shellcheck-${INPUT_VERSION}.${osvariant}.x86_64.tar.xz"
tar -xf "${{ github.workspace }}/sc.tar.xz" -C "${{ github.workspace }}"
mv "${{ github.workspace }}/shellcheck-${INPUT_VERSION}/shellcheck" \
"${{ github.workspace }}/shellcheck"
- name: Verify ShellCheck binary
run: |
ls -l "${{ github.workspace }}/shellcheck"
- name: Display ShellCheck version
run: |
"${{ github.workspace }}/shellcheck" --version
- name: Run ShellCheck
if: steps.changed-files.outputs.any_changed == 'true'
env:
ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
run: |
echo "${ALL_CHANGED_FILES}" | xargs "${{ github.workspace }}/shellcheck"

View File

@ -0,0 +1,88 @@
name: Auto Update JSON-Date
on:
push:
branches:
- main
workflow_dispatch:
jobs:
update-json-dates:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Full history for proper detection
- name: Set up Git
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Find JSON files with incorrect date_created
id: find_wrong_json
run: |
TODAY=$(date -u +"%Y-%m-%d")
> incorrect_json_files.txt
for FILE in json/*.json; do
if [[ -f "$FILE" ]]; then
DATE_IN_JSON=$(jq -r '.date_created' "$FILE" 2>/dev/null || echo "")
if [[ "$DATE_IN_JSON" != "$TODAY" ]]; then
echo "$FILE" >> incorrect_json_files.txt
fi
fi
done
if [[ -s incorrect_json_files.txt ]]; then
echo "CHANGED=true" >> $GITHUB_ENV
else
echo "CHANGED=false" >> $GITHUB_ENV
fi
- name: Run update script
if: env.CHANGED == 'true'
run: |
chmod +x .github/workflows/scripts/update-json.sh
while read -r FILE; do
.github/workflows/scripts/update-json.sh "$FILE"
done < incorrect_json_files.txt
- name: Commit and create PR if changes exist
if: env.CHANGED == 'true'
run: |
git add json/*.json
git commit -m "Auto-update date_created in incorrect JSON files"
git checkout -b pr-fix-json-dates
git push origin pr-fix-json-dates --force
gh pr create --title "[core] Fix incorrect JSON date_created fields" \
--body "This PR is auto-generated to fix incorrect `date_created` fields in JSON files." \
--head pr-fix-json-dates \
--base main \
--label "automated pr"
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
- name: Approve pull request
if: env.CHANGED == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER=$(gh pr list --head "pr-fix-json-dates" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
fi

View File

@ -0,0 +1,133 @@
name: Validate script formatting
on:
push:
branches:
- main
pull_request_target:
paths:
- "**/*.sh"
- "**/*.func"
jobs:
shfmt:
name: Check changed files
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Get pull request information
if: github.event_name == 'pull_request_target'
uses: actions/github-script@v7
id: pr
with:
script: |
const { data: pullRequest } = await github.rest.pulls.get({
...context.repo,
pull_number: context.payload.pull_request.number,
});
return pullRequest;
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
- name: Get changed files
id: changed-files
run: |
if ${{ github.event_name == 'pull_request_target' }}; then
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
else
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
fi
- name: Set up Go
if: steps.changed-files.outputs.files != ''
uses: actions/setup-go@v5
- name: Install shfmt
if: steps.changed-files.outputs.files != ''
run: |
go install mvdan.cc/sh/v3/cmd/shfmt@latest
echo "$GOPATH/bin" >> $GITHUB_PATH
- name: Run shfmt
if: steps.changed-files.outputs.files != ''
id: shfmt
run: |
set +e
shfmt_output=$(shfmt -d ${{ steps.changed-files.outputs.files }})
if [[ $? -eq 0 ]]; then
exit 0
else
echo "diff=\"$(echo -n "$shfmt_output" | base64 -w 0)\"" >> $GITHUB_OUTPUT
printf "%s" "$shfmt_output"
exit 1
fi
- name: Post comment with results
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
uses: actions/github-script@v7
with:
script: |
const result = "${{ job.status }}" === "success" ? "success" : "failure";
const diff = Buffer.from(
${{ steps.shfmt.outputs.diff }},
"base64",
).toString();
const issueNumber = context.payload.pull_request
? context.payload.pull_request.number
: null;
const commentIdentifier = "validate-formatting";
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script formatting\n\n`;
if (result === "failure") {
newCommentBody +=
`:x: We found issues in the formatting of the following changed files:\n\n\`\`\`diff\n${diff}\n\`\`\`\n`;
} else {
newCommentBody += `:rocket: All changed shell scripts are formatted correctly!\n`;
}
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
if (issueNumber) {
const { data: comments } = await github.rest.issues.listComments({
...context.repo,
issue_number: issueNumber,
});
const existingComment = comments.find(
(comment) => comment.user.login === "github-actions[bot]",
);
if (existingComment) {
if (existingComment.body.includes(commentIdentifier)) {
const re = new RegExp(
String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`,
"",
);
newCommentBody = existingComment.body.replace(re, newCommentBody);
} else {
newCommentBody = existingComment.body + "\n\n---\n\n" + newCommentBody;
}
await github.rest.issues.updateComment({
...context.repo,
comment_id: existingComment.id,
body: newCommentBody,
});
} else {
await github.rest.issues.createComment({
...context.repo,
issue_number: issueNumber,
body: newCommentBody,
});
}
}

View File

@ -0,0 +1,234 @@
name: Validate scripts
on:
push:
branches:
- main
pull_request_target:
paths:
- "ct/*.sh"
- "install/*.sh"
jobs:
check-scripts:
name: Check changed files
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Debug event payload
run: |
echo "Event name: ${{ github.event_name }}"
echo "Payload: $(cat $GITHUB_EVENT_PATH)"
- name: Get pull request information
if: github.event_name == 'pull_request_target'
uses: actions/github-script@v7
id: pr
with:
script: |
const { data: pullRequest } = await github.rest.pulls.get({
...context.repo,
pull_number: context.payload.pull_request.number,
});
return pullRequest;
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
- name: Get changed files
id: changed-files
run: |
if [ "${{ github.event_name }}" == "pull_request_target" ]; then
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
else
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
fi
- name: Check build.func line
if: always() && steps.changed-files.outputs.files != ''
id: build-func
run: |
NON_COMPLIANT_FILES=""
for FILE in ${{ steps.changed-files.outputs.files }}; do
if [[ "$FILE" == ct/* ]] && [[ $(sed -n '2p' "$FILE") != "source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" ]]; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "Build.func line missing or incorrect in files:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: Check executable permissions
if: always() && steps.changed-files.outputs.files != ''
id: check-executable
run: |
NON_COMPLIANT_FILES=""
for FILE in ${{ steps.changed-files.outputs.files }}; do
if [[ ! -x "$FILE" ]]; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "Files not executable:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: Check copyright
if: always() && steps.changed-files.outputs.files != ''
id: check-copyright
run: |
NON_COMPLIANT_FILES=""
for FILE in ${{ steps.changed-files.outputs.files }}; do
if ! sed -n '3p' "$FILE" | grep -qE "^# Copyright \(c\) [0-9]{4}(-[0-9]{4})? (tteck \| community-scripts ORG|community-scripts ORG|tteck)$"; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "Copyright header missing or not on line 3 in files:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: Check author
if: always() && steps.changed-files.outputs.files != ''
id: check-author
run: |
NON_COMPLIANT_FILES=""
for FILE in ${{ steps.changed-files.outputs.files }}; do
if ! sed -n '4p' "$FILE" | grep -qE "^# Author: .+"; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "Author header missing or invalid on line 4 in files:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: Check license
if: always() && steps.changed-files.outputs.files != ''
id: check-license
run: |
NON_COMPLIANT_FILES=""
for FILE in ${{ steps.changed-files.outputs.files }}; do
if [[ "$(sed -n '5p' "$FILE")" != "# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE" ]]; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "License header missing or not on line 5 in files:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: Check source
if: always() && steps.changed-files.outputs.files != ''
id: check-source
run: |
NON_COMPLIANT_FILES=""
for FILE in ${{ steps.changed-files.outputs.files }}; do
if ! sed -n '6p' "$FILE" | grep -qE "^# Source: .+"; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "Source header missing or not on line 6 in files:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: Post results and comment
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
uses: actions/github-script@v7
with:
script: |
const result = '${{ job.status }}' === 'success' ? 'success' : 'failure';
const nonCompliantFiles = {
'Invalid build.func source': "${{ steps.build-func.outputs.files || '' }}",
'Not executable': "${{ steps.check-executable.outputs.files || '' }}",
'Copyright header line missing or invalid': "${{ steps.check-copyright.outputs.files || '' }}",
'Author header line missing or invalid': "${{ steps.check-author.outputs.files || '' }}",
'License header line missing or invalid': "${{ steps.check-license.outputs.files || '' }}",
'Source header line missing or invalid': "${{ steps.check-source.outputs.files || '' }}"
};
const issueNumber = context.payload.pull_request ? context.payload.pull_request.number : null;
const commentIdentifier = 'validate-scripts';
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script validation\n\n`;
if (result === 'failure') {
newCommentBody += ':x: We found issues in the following changed files:\n\n';
for (const [check, files] of Object.entries(nonCompliantFiles)) {
if (files) {
newCommentBody += `**${check}:**\n`;
files.trim().split(' ').forEach(file => {
newCommentBody += `- ${file}: ${check}\n`;
});
newCommentBody += `\n`;
}
}
} else {
newCommentBody += `:rocket: All changed shell scripts passed validation!\n`;
}
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
if (issueNumber) {
const { data: comments } = await github.rest.issues.listComments({
...context.repo,
issue_number: issueNumber
});
const existingComment = comments.find(comment =>
comment.body.includes(`<!-- ${commentIdentifier}-start -->`) &&
comment.user.login === 'github-actions[bot]'
);
if (existingComment) {
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\\s\\S]*?<!-- ${commentIdentifier}-end -->`, "m");
newCommentBody = existingComment.body.replace(re, newCommentBody);
await github.rest.issues.updateComment({
...context.repo,
comment_id: existingComment.id,
body: newCommentBody
});
} else {
await github.rest.issues.createComment({
...context.repo,
issue_number: issueNumber,
body: newCommentBody
});
}
}

272
.github/workflows/changelog-pr.yaml vendored Normal file
View File

@ -0,0 +1,272 @@
name: Create Changelog Pull Request
on:
push:
branches: ["main"]
workflow_dispatch:
jobs:
update-changelog-pull-request:
if: github.repository == 'community-scripts/ProxmoxVED'
runs-on: ubuntu-latest
env:
CONFIG_PATH: .github/changelog-pr-config.json
BRANCH_NAME: github-action-update-changelog
AUTOMATED_PR_LABEL: "automated pr"
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get latest dates in changelog
run: |
DATES=$(grep -E '^## [0-9]{4}-[0-9]{2}-[0-9]{2}' CHANGELOG.md | head -n 2 | awk '{print $2}')
LATEST_DATE=$(echo "$DATES" | sed -n '1p')
SECOND_LATEST_DATE=$(echo "$DATES" | sed -n '2p')
TODAY=$(date -u +%Y-%m-%d)
echo "TODAY=$TODAY" >> $GITHUB_ENV
if [[ "$LATEST_DATE" == "$TODAY" ]]; then
echo "LATEST_DATE=$SECOND_LATEST_DATE" >> $GITHUB_ENV
else
echo "LATEST_DATE=$LATEST_DATE" >> $GITHUB_ENV
fi
- name: Get categorized pull requests
id: get-categorized-prs
uses: actions/github-script@v7
with:
script: |
async function main() {
const fs = require('fs').promises;
const path = require('path');
const configPath = path.resolve(process.env.CONFIG_PATH);
const fileContent = await fs.readFile(configPath, 'utf-8');
const changelogConfig = JSON.parse(fileContent);
const categorizedPRs = changelogConfig.map(obj => ({
...obj,
notes: [],
subCategories: obj.subCategories ?? (
obj.labels.includes("update script") ? [
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
{ title: "✨ New Features", labels: ["feature"], notes: [] },
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
{ title: "🔧 Refactor", labels: ["refactor"], notes: [] },
] :
obj.labels.includes("maintenance") ? [
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
{ title: "✨ New Features", labels: ["feature"], notes: [] },
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
{ title: "📡 API", labels: ["api"], notes: [] },
{ title: "Github", labels: ["github"], notes: [] },
{ title: "📝 Documentation", labels: ["documentation"], notes: [] },
{ title: "🔧 Refactor", labels: ["refactor"], notes: [] }
] :
obj.labels.includes("website") ? [
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
{ title: "✨ New Features", labels: ["feature"], notes: [] },
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
{ title: "Script Information", labels: ["json"], notes: [] }
] : []
)
}));
const latestDateInChangelog = new Date(process.env.LATEST_DATE);
latestDateInChangelog.setUTCHours(23, 59, 59, 999);
const { data: pulls } = await github.rest.pulls.list({
owner: context.repo.owner,
repo: "ProxmoxVE",
base: "main",
state: "closed",
sort: "updated",
direction: "desc",
per_page: 100,
});
const filteredPRs = pulls.filter(pr =>
pr.merged_at &&
new Date(pr.merged_at) > latestDateInChangelog &&
!pr.labels.some(label =>
["invalid", "wontdo", process.env.AUTOMATED_PR_LABEL].includes(label.name.toLowerCase())
)
);
for (const pr of filteredPRs) {
const prLabels = pr.labels.map(label => label.name.toLowerCase());
if (pr.user.login.includes("push-app-to-main[bot]")) {
const scriptName = pr.title;
try {
const { data: relatedIssues } = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: "ProxmoxVED",
state: "all",
labels: ["Started Migration To ProxmoxVE"]
});
const matchingIssue = relatedIssues.find(issue =>
issue.title.toLowerCase().includes(scriptName.toLowerCase())
);
if (matchingIssue) {
const issueAuthor = matchingIssue.user.login;
const issueAuthorUrl = `https://github.com/${issueAuthor}`;
prNote = `- ${pr.title} [@${issueAuthor}](${issueAuthorUrl}) ([#${pr.number}](${pr.html_url}))`;
}
else {
prNote = `- ${pr.title} ([#${pr.number}](${pr.html_url}))`;
}
} catch (error) {
console.error(`Error fetching related issues: ${error}`);
prNote = `- ${pr.title} ([#${pr.number}](${pr.html_url}))`;
}
}else{
prNote = `- ${pr.title} [@${pr.user.login}](https://github.com/${pr.user.login}) ([#${pr.number}](${pr.html_url}))`;
}
if (prLabels.includes("new script")) {
const newScriptCategory = categorizedPRs.find(category =>
category.title === "New Scripts" || category.labels.includes("new script"));
if (newScriptCategory) {
newScriptCategory.notes.push(prNote);
}
} else {
let categorized = false;
const priorityCategories = categorizedPRs.slice();
for (const category of priorityCategories) {
if (categorized) break;
if (category.labels.some(label => prLabels.includes(label))) {
if (category.subCategories && category.subCategories.length > 0) {
const subCategory = category.subCategories.find(sub =>
sub.labels.some(label => prLabels.includes(label))
);
if (subCategory) {
subCategory.notes.push(prNote);
} else {
category.notes.push(prNote);
}
} else {
category.notes.push(prNote);
}
categorized = true;
}
}
}
}
return categorizedPRs;
}
return await main();
- name: Update CHANGELOG.md
uses: actions/github-script@v7
with:
script: |
const fs = require('fs').promises;
const path = require('path');
const today = process.env.TODAY;
const latestDateInChangelog = process.env.LATEST_DATE;
const changelogPath = path.resolve('CHANGELOG.md');
const categorizedPRs = ${{ steps.get-categorized-prs.outputs.result }};
let newReleaseNotes = `## ${today}\n\n`;
for (const { title, notes, subCategories } of categorizedPRs) {
const hasSubcategories = subCategories && subCategories.length > 0;
const hasMainNotes = notes.length > 0;
const hasSubNotes = hasSubcategories && subCategories.some(sub => sub.notes && sub.notes.length > 0);
if (hasMainNotes || hasSubNotes) {
newReleaseNotes += `### ${title}\n\n`;
}
if (hasMainNotes) {
newReleaseNotes += ` ${notes.join("\n")}\n\n`;
}
if (hasSubcategories) {
for (const { title: subTitle, notes: subNotes } of subCategories) {
if (subNotes && subNotes.length > 0) {
newReleaseNotes += ` - #### ${subTitle}\n\n`;
newReleaseNotes += ` ${subNotes.join("\n ")}\n\n`;
}
}
}
}
const changelogContent = await fs.readFile(changelogPath, 'utf-8');
const changelogIncludesTodaysReleaseNotes = changelogContent.includes(`\n## ${today}`);
const regex = changelogIncludesTodaysReleaseNotes
? new RegExp(`## ${today}.*(?=## ${latestDateInChangelog})`, "gs")
: new RegExp(`(?=## ${latestDateInChangelog})`, "gs");
const newChangelogContent = changelogContent.replace(regex, newReleaseNotes);
await fs.writeFile(changelogPath, newChangelogContent);
- name: Check for changes
id: verify-diff
run: |
git diff --quiet . || echo "changed=true" >> $GITHUB_ENV
- name: Commit and push changes
if: env.changed == 'true'
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git add CHANGELOG.md
git commit -m "Update CHANGELOG.md"
git checkout -b $BRANCH_NAME || git checkout $BRANCH_NAME
git push origin $BRANCH_NAME --force
- name: Create pull request if not exists
if: env.changed == 'true'
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
PR_EXISTS=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
if [ -z "$PR_EXISTS" ]; then
gh pr create --title "[Github Action] Update CHANGELOG.md" \
--body "This PR is auto-generated by a Github Action to update the CHANGELOG.md file." \
--head $BRANCH_NAME \
--base main \
--label "$AUTOMATED_PR_LABEL"
fi
- name: Approve pull request
if: env.changed == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
fi
- name: Re-approve pull request after update
if: env.changed == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
fi

164
.github/workflows/close-discussion.yaml vendored Normal file
View File

@ -0,0 +1,164 @@
name: Close Discussion on PR Merge
on:
push:
branches:
- main
permissions:
contents: read
discussions: write
jobs:
close-discussion:
if: github.repository == 'community-scripts/ProxmoxVED'
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Set Up Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
- name: Install Dependencies
run: npm install zx @octokit/graphql
- name: Close Discussion
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_SHA: ${{ github.sha }}
GITHUB_REPOSITORY: ${{ github.repository }}
run: |
npx zx << 'EOF'
import { graphql } from "@octokit/graphql";
(async function () {
try {
const token = process.env.GITHUB_TOKEN;
const commitSha = process.env.GITHUB_SHA;
const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/");
if (!token || !commitSha || !owner || !repo) {
console.log("Missing required environment variables.");
process.exit(1);
}
const graphqlWithAuth = graphql.defaults({
headers: { authorization: `Bearer ${token}` },
});
// Find PR from commit SHA
const searchQuery = `
query($owner: String!, $repo: String!, $sha: GitObjectID!) {
repository(owner: $owner, name: $repo) {
object(oid: $sha) {
... on Commit {
associatedPullRequests(first: 1) {
nodes {
number
body
}
}
}
}
}
}
`;
const prResult = await graphqlWithAuth(searchQuery, {
owner,
repo,
sha: commitSha,
});
const pr = prResult.repository.object.associatedPullRequests.nodes[0];
if (!pr) {
console.log("No PR found for this commit.");
return;
}
const prNumber = pr.number;
const prBody = pr.body;
const match = prBody.match(/#(\d+)/);
if (!match) {
console.log("No discussion ID found in PR body.");
return;
}
const discussionNumber = match[1];
console.log(`Extracted Discussion Number: ${discussionNumber}`);
// Fetch GraphQL discussion ID
const discussionQuery = `
query($owner: String!, $repo: String!, $number: Int!) {
repository(owner: $owner, name: $repo) {
discussion(number: $number) {
id
}
}
}
`;
//
try {
const discussionResponse = await graphqlWithAuth(discussionQuery, {
owner,
repo,
number: parseInt(discussionNumber, 10),
});
const discussionQLId = discussionResponse.repository.discussion.id;
if (!discussionQLId) {
console.log("Failed to fetch discussion GraphQL ID.");
return;
}
} catch (error) {
console.error("Discussion not found or error occurred while fetching discussion:", error);
return;
}
// Post comment
const commentMutation = `
mutation($discussionId: ID!, $body: String!) {
addDiscussionComment(input: { discussionId: $discussionId, body: $body }) {
comment { id body }
}
}
`;
const commentResponse = await graphqlWithAuth(commentMutation, {
discussionId: discussionQLId,
body: `Merged with PR #${prNumber}`,
});
const commentId = commentResponse.addDiscussionComment.comment.id;
if (!commentId) {
console.log("Failed to post the comment.");
return;
}
console.log(`Comment Posted Successfully! Comment ID: ${commentId}`);
// Mark comment as answer
const markAnswerMutation = `
mutation($id: ID!) {
markDiscussionCommentAsAnswer(input: { id: $id }) {
discussion { id title }
}
}
`;
await graphqlWithAuth(markAnswerMutation, { id: commentId });
console.log("Comment marked as answer successfully!");
} catch (error) {
console.error("Error:", error);
process.exit(1);
}
})();
EOF

View File

@ -1,4 +1,5 @@
name: Auto-Close Wrong Template Issues
name: Auto-Close tteck Issues
on:
issues:
types: [opened]
@ -8,7 +9,7 @@ jobs:
if: github.repository == 'community-scripts/ProxmoxVED'
runs-on: ubuntu-latest
steps:
- name: Auto-close if wrong Template issue detected
- name: Auto-close if tteck script detected
uses: actions/github-script@v7
with:
script: |
@ -17,13 +18,18 @@ jobs:
const issueNumber = issue.number;
// Check for tteck script mention
if (content.includes("Template debian-13-standard_13.1-2_amd64.tar.zst [local]") || content.includes("Container creation failed. Checking if template is corrupted or incomplete.") || content.includes("Template is valid, but container creation still failed.")){
const message = `Hello, it looks like you are referencing a container creation issue!.
if (content.includes("tteck") || content.includes("tteck/Proxmox")) {
const message = `Hello, it looks like you are referencing the **old tteck repo**.
We get many simmilar issues with this topic, so please check this disscusion #8126.
If this did not solve your problem, please reopen this issue.
This repository is no longer used for active scripts.
**Please update your bookmarks** and use: [https://helper-scripts.com](https://helper-scripts.com)
This issue is being closed automatically.`;
Also make sure your Bash command starts with:
\`\`\`bash
bash <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/...)
\`\`\`
This issue is being closed automatically.`;
await github.rest.issues.createComment({
...context.repo,

View File

@ -13,73 +13,23 @@ jobs:
runs-on: ubuntu-latest
if: contains(github.event.issue.labels.*.name, 'Ready For Testing') && github.repository == 'community-scripts/ProxmoxVED'
steps:
- name: Extract Issue Title and Script Type
id: extract_info
env:
ISSUE_BODY: ${{ github.event.issue.body }}
run: |
# Extract title (lowercase, spaces to dashes)
TITLE=$(echo '${{ github.event.issue.title }}' | tr '[:upper:]' '[:lower:]' | sed 's/ /-/g')
echo "TITLE=$TITLE" >> $GITHUB_ENV
- name: Extract Issue Title (Lowercase & Underscores)
id: extract_title
run: echo "TITLE=$(echo '${{ github.event.issue.title }}' | tr '[:upper:]' '[:lower:]' | sed 's/ /-/g')" >> $GITHUB_ENV
# Extract script type from issue body (using env var to handle special chars)
if echo "$ISSUE_BODY" | grep -qi "CT (LXC Container)"; then
SCRIPT_TYPE="ct"
elif echo "$ISSUE_BODY" | grep -qi "VM (Virtual Machine)"; then
SCRIPT_TYPE="vm"
elif echo "$ISSUE_BODY" | grep -qi "Addon (tools/addon)"; then
SCRIPT_TYPE="addon"
elif echo "$ISSUE_BODY" | grep -qi "PVE Tool (tools/pve)"; then
SCRIPT_TYPE="pve"
else
# Fallback: detect by filename pattern or default to ct
if [[ "$TITLE" == *"-vm"* ]]; then
SCRIPT_TYPE="vm"
else
SCRIPT_TYPE="ct"
fi
fi
echo "SCRIPT_TYPE=$SCRIPT_TYPE" >> $GITHUB_ENV
echo "Detected script type: $SCRIPT_TYPE for title: $TITLE"
- name: Check if Files Exist in community-scripts/ProxmoxVED
- name: Check if Files Exist in community-scripts/ProxmoxVE
id: check_files
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
REPO="community-scripts/ProxmoxVED"
API_URL="https://api.github.com/repos/$REPO/contents"
TITLE="${{ env.TITLE }}"
SCRIPT_TYPE="${{ env.SCRIPT_TYPE }}"
# Define files based on script type
case "$SCRIPT_TYPE" in
ct)
FILES=(
"ct/${TITLE}.sh"
"install/${TITLE}-install.sh"
"frontend/public/json/${TITLE}.json"
)
;;
vm)
FILES=(
"vm/${TITLE}.sh"
"frontend/public/json/${TITLE}.json"
)
;;
addon)
FILES=(
"tools/addon/${TITLE}.sh"
"frontend/public/json/${TITLE}.json"
)
;;
pve)
FILES=(
"tools/pve/${TITLE}.sh"
)
;;
esac
FILES=(
"ct/${{ env.TITLE }}.sh"
"install/${{ env.TITLE }}-install.sh"
"frontend/public/json/${{ env.TITLE }}.json"
)
EXISTING_FILES=()
@ -92,89 +42,45 @@ jobs:
echo "$FILE does NOT exist in $REPO"
fi
done
echo "EXISTING_FILES=${EXISTING_FILES[*]}" >> $GITHUB_ENV
echo "EXISTING_FILES=${EXISTING_FILES[@]}" >> $GITHUB_ENV
- name: Create message to send
id: create_message
run: |
TITLE="${{ env.TITLE }}"
SCRIPT_TYPE="${{ env.SCRIPT_TYPE }}"
VAR="The ${{ env.TITLE }} script is ready for testing:\n"
VAR+="\`\`\`bash -c \"\$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/ct/${{ env.TITLE }}.sh)\"\`\`\`\n"
if [[ " ${EXISTING_FILES[@]} " =~ " frontend/public/json/${TITLE}.json " ]]; then
JSON=$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/frontend/public/json/${{ env.TITLE }}.json)
username=$(echo "$JSON" | jq -r '.default_credentials.username')
password=$(echo "$JSON" | jq -r '.default_credentials.password')
mapfile -t notes_array < <(echo "$JSON" | jq -r '.notes[].text')
VAR="The ${TITLE} script is ready for testing:\n"
if [[ -n "$username" && "$username" != "null" || -n "$password" && "$password" != "null" ]]; then
VAR+="Default credentials:\n"
# Generate correct command based on script type
case "$SCRIPT_TYPE" in
ct)
VAR+="\`\`\`bash -c \"\$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/ct/${TITLE}.sh)\"\`\`\`\n"
;;
vm)
VAR+="\`\`\`bash -c \"\$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/vm/${TITLE}.sh)\"\`\`\`\n"
;;
addon)
VAR+="\`\`\`bash -c \"\$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/tools/addon/${TITLE}.sh)\"\`\`\`\n"
;;
pve)
VAR+="\`\`\`bash -c \"\$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/tools/pve/${TITLE}.sh)\"\`\`\`\n"
;;
esac
if [[ -n "$username" && "$username" != "null" ]]; then
VAR+="Username: $username\n"
fi
# Try to get JSON info (may not exist for all types)
JSON_FILE=""
case "$SCRIPT_TYPE" in
ct|vm|addon)
JSON_FILE="frontend/public/json/${TITLE}.json"
;;
esac
if [[ -n "$password" && "$password" != "null" ]]; then
VAR+="Password: $password\n"
fi
VAR+="\n"
fi
if [[ -n "$JSON_FILE" ]]; then
JSON=$(curl -fsSL "https://github.com/community-scripts/ProxmoxVED/raw/main/${JSON_FILE}" 2>/dev/null || echo "{}")
if [[ "$JSON" != "{}" && "$JSON" != "" ]]; then
username=$(echo "$JSON" | jq -r '.default_credentials.username // empty')
password=$(echo "$JSON" | jq -r '.default_credentials.password // empty')
if [[ -n "$username" || -n "$password" ]]; then
VAR+="Default credentials:\n"
[[ -n "$username" ]] && VAR+="Username: $username\n"
[[ -n "$password" ]] && VAR+="Password: $password\n"
VAR+="\n"
fi
# Get notes
mapfile -t notes_array < <(echo "$JSON" | jq -r '.notes[]?.text // empty' 2>/dev/null)
if [ ${#notes_array[@]} -gt 0 ]; then
for note in "${notes_array[@]}"; do
[[ -n "$note" ]] && VAR+="$note\n"
done
VAR+="\n"
fi
if [ ${#notes_array[@]} -gt 0 ]; then
for note in "${notes_array[@]}"; do
VAR+="$note\n"
done
VAR+="\n"
fi
fi
VAR+="Note: This is not in the official repo yet—it's just a dev version! After merging into ProxmoxVE, it will need to be recreated.\n\n"
VAR+="Discussion & issue tracking:\n"
VAR+="${{ github.event.issue.html_url }}"
echo "message=$VAR" >> $GITHUB_ENV
- name: Check if Discord thread exists
id: check_thread
run: |
ISSUE_TITLE="${{ github.event.issue.title }}"
THREAD_ID=$(curl -s -X GET "https://discord.com/api/v10/guilds/${{ secrets.DISCORD_GUILD_ID }}/threads/active" \
-H "Authorization: Bot ${{ secrets.DISCORD_BOT_TOKEN }}" \
-H "Content-Type: application/json" | \
jq -r --arg TITLE "$ISSUE_TITLE" --arg PARENT_ID "${{ secrets.DISCORD_CHANNEL_ID }}" \
'.threads[] | select(.parent_id == $PARENT_ID and .name == ("Wanted Tester for " + $TITLE)) | .id')
if [ -n "$THREAD_ID" ]; then
echo "thread_exists=true" >> "$GITHUB_OUTPUT"
else
echo "thread_exists=false" >> "$GITHUB_OUTPUT"
fi
- name: Create a forumpost in Discord
if: steps.check_thread.outputs.thread_exists != 'true'
id: post_to_discord
env:
DISCORD_CHANNEL_ID: ${{ secrets.DISCORD_CHANNEL_ID }}
@ -190,16 +96,8 @@ jobs:
-H "Content-Type: application/json" \
-d "$JSON_PAYLOAD")
HTTP_BODY=$(echo "$RESPONSE" | head -n -1)
HTTP_CODE=$(echo "$RESPONSE" | tail -n1)
THREAD_ID=$(echo "$HTTP_BODY" | jq -r '.id')
STATUS_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ "$HTTP_CODE" == "201" && -n "$THREAD_ID" ]]; then
LOCK_RESPONSE=$(curl -s -X PATCH "https://discord.com/api/v10/channels/$THREAD_ID" \
-H "Authorization: Bot $DISCORD_BOT_TOKEN" \
-H "Content-Type: application/json" \
-d '{"locked": true}')
if [ "$STATUS_CODE" -eq 201 ]; then
echo "Discord post created successfully!"
else
echo "Response: $RESPONSE"
@ -208,7 +106,6 @@ jobs:
fi
- name: Comment on Issue
if: steps.check_thread.outputs.thread_exists != 'true'
id: comment_on_issue
env:
MESSAGE: ${{ env.message }}

View File

@ -9,12 +9,12 @@ jobs:
close_discord_thread:
if: github.repository == 'community-scripts/ProxmoxVED'
runs-on: ubuntu-latest
env:
ISSUE_TITLE: ${{ github.event.issue.title }}
steps:
- name: Get thread-ID op and close thread
run: |
ISSUE_TITLE="${{ github.event.issue.title }}"
THREAD_ID=$(curl -s -X GET "https://discord.com/api/v10/guilds/${{ secrets.DISCORD_GUILD_ID }}/threads/active" \
-H "Authorization: Bot ${{ secrets.DISCORD_BOT_TOKEN }}" \
-H "Content-Type: application/json" | \

View File

@ -14,43 +14,9 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Generate a token for PR approval and merge
id: generate-token-merge
uses: actions/create-github-app-token@v2
with:
app-id: ${{ secrets.APP_ID_APPROVE_AND_MERGE }}
private-key: ${{ secrets.APP_KEY_APPROVE_AND_MERGE }}
owner: community-scripts
repositories: ProxmoxVED
- name: Extract Issue Info and Script Type
id: extract_info
run: |
TITLE=$(echo '${{ github.event.issue.title }}' | tr '[:upper:]' '[:lower:]' | tr -d ' ')
BODY='${{ github.event.issue.body }}'
echo "TITLE=$TITLE" >> $GITHUB_ENV
# Detect script type from issue body
if echo "$BODY" | grep -qi "CT (LXC Container)"; then
SCRIPT_TYPE="ct"
elif echo "$BODY" | grep -qi "VM (Virtual Machine)"; then
SCRIPT_TYPE="vm"
elif echo "$BODY" | grep -qi "Addon (tools/addon)"; then
SCRIPT_TYPE="addon"
elif echo "$BODY" | grep -qi "PVE Tool (tools/pve)"; then
SCRIPT_TYPE="pve"
else
# Fallback detection
if [[ "$TITLE" == *"-vm"* ]]; then
SCRIPT_TYPE="vm"
else
SCRIPT_TYPE="ct"
fi
fi
echo "SCRIPT_TYPE=$SCRIPT_TYPE" >> $GITHUB_ENV
echo "Detected: $TITLE (type: $SCRIPT_TYPE)"
- name: Extract Issue Title (Lowercase & Underscores)
id: extract_title
run: echo "TITLE=$(echo '${{ github.event.issue.title }}' | tr '[:upper:]' '[:lower:]' | sed 's/ /_/g')" >> $GITHUB_ENV
- name: Check if Files Exist in community-scripts/ProxmoxVE
id: check_files
@ -59,36 +25,12 @@ jobs:
run: |
REPO="community-scripts/ProxmoxVE"
API_URL="https://api.github.com/repos/$REPO/contents"
TITLE="${{ env.TITLE }}"
SCRIPT_TYPE="${{ env.SCRIPT_TYPE }}"
# Define files based on script type
case "$SCRIPT_TYPE" in
ct)
FILES=(
"ct/${TITLE}.sh"
"install/${TITLE}-install.sh"
"frontend/public/json/${TITLE}.json"
)
;;
vm)
FILES=(
"vm/${TITLE}.sh"
"frontend/public/json/${TITLE}.json"
)
;;
addon)
FILES=(
"tools/addon/${TITLE}.sh"
"frontend/public/json/${TITLE}.json"
)
;;
pve)
FILES=(
"tools/pve/${TITLE}.sh"
)
;;
esac
FILES=(
"ct/${TITLE}.sh"
"install/${TITLE}-install.sh"
"frontend/public/json/${TITLE}.json"
)
EXISTS=false
for FILE in "${FILES[@]}"; do
@ -110,79 +52,33 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
TITLE="${{ env.TITLE }}"
SCRIPT_TYPE="${{ env.SCRIPT_TYPE }}"
branch="delete_files_${TITLE}"
branch=$(echo "delete-files_${{ github.event.issue.number }}_${TITLE}" | tr '[:upper:]' '[:lower:]' | sed 's/ /_/g')
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git checkout -b $branch
# Delete files based on script type
case "$SCRIPT_TYPE" in
ct)
rm -f "ct/${TITLE}.sh"
rm -f "ct/headers/${TITLE}"
rm -f "install/${TITLE}-install.sh"
rm -f "frontend/public/json/${TITLE}.json"
# Also try alpine variant
if [[ "$TITLE" == alpine-* ]]; then
stripped="${TITLE#alpine-}"
rm -f "frontend/public/json/${stripped}.json"
fi
;;
vm)
rm -f "vm/${TITLE}.sh"
rm -f "frontend/public/json/${TITLE}.json"
;;
addon)
rm -f "tools/addon/${TITLE}.sh"
rm -f "frontend/public/json/${TITLE}.json"
;;
pve)
rm -f "tools/pve/${TITLE}.sh"
;;
esac
rm -f ct/${TITLE}.sh
rm -f install/${TITLE}-install.sh
rm -f frontend/public/json/${TITLE}.json
git add .
if git diff --staged --quiet; then
echo "No files to delete. Exiting..."
exit 0
fi
git commit -m "Deleted files for issue: ${{ github.event.issue.title }}"
git push origin $branch
gh pr create --title "Delete Files for ${{ github.event.issue.title }} after Merge to Main" --body "Delete files after merge in main repo." --base main --head $branch
git commit -m "Delete ${TITLE} (${SCRIPT_TYPE}) after migration to ProxmoxVE"
git push origin $branch --force
gh pr create \
--title "Delete ${TITLE} after Merge to Main" \
--body "Automated cleanup: Delete ${TITLE} (${SCRIPT_TYPE}) files after successful migration to ProxmoxVE main repo." \
--base main \
--head $branch
pr_number=$(gh pr list --head $branch --json number --jq '.[].number')
echo "pr_number=$pr_number" >> $GITHUB_ENV
echo "branch=$branch" >> $GITHUB_ENV
- name: Approve pull request and merge
if: env.pr_number != ''
env:
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
run: |
git config --global user.name "github-actions-automerge[bot]"
git config --global user.email "github-actions-automerge[bot]@users.noreply.github.com"
PR_NUMBER="${{ env.pr_number }}"
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
gh pr merge $PR_NUMBER --squash --admin
fi
pr_number=$(gh pr list | grep -m 1 $branch | awk '{print $1}')
#gh pr merge $pr_number --squash
echo pr_number=$pr_number >> $GITHUB_ENV
- name: Comment on Issue
if: env.pr_number != ''
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const issue_number = context.payload.issue.number;
const message = `Files deleted with PR #${process.env.pr_number}`;
github.rest.issues.createComment({
owner: context.repo.owner,

View File

@ -24,12 +24,10 @@ jobs:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v2
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
owner: community-scripts
repositories: ProxmoxVED
- name: Crawl from Github API
env:

View File

@ -24,21 +24,10 @@ jobs:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v2
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
owner: community-scripts
repositories: ProxmoxVED
- name: Generate a token for PR approval and merge
id: generate-token-merge
uses: actions/create-github-app-token@v2
with:
app-id: ${{ secrets.APP_ID_APPROVE_AND_MERGE }}
private-key: ${{ secrets.APP_KEY_APPROVE_AND_MERGE }}
owner: community-scripts
repositories: ProxmoxVED
- name: Crawl from newreleases.io
env:
@ -121,12 +110,10 @@ jobs:
- name: Approve pull request and merge
if: env.changed == 'true'
env:
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
GH_TOKEN: ${{ secrets.PAT_MICHEL }}
run: |
git config --global user.name "github-actions-automege[bot]"
git config --global user.email "github-actions-automege[bot]@users.noreply.github.com"
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[0].number')
PR_NUMBER=$(gh pr list --head "update_versions" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review "$PR_NUMBER" --approve
gh pr merge "$PR_NUMBER" --squash --admin
gh pr review $PR_NUMBER --approve
gh pr merge $PR_NUMBER --squash --delete-branch --admin
fi

226
.github/workflows/live/changelog-pr.yml vendored Normal file
View File

@ -0,0 +1,226 @@
name: Create Changelog Pull Request
on:
push:
branches: ["main"]
workflow_dispatch:
jobs:
update-changelog-pull-request:
runs-on: ubuntu-latest
env:
CONFIG_PATH: .github/changelog-pr-config.json
BRANCH_NAME: github-action-update-changelog
AUTOMATED_PR_LABEL: "automated pr"
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get latest dates in changelog
run: |
DATES=$(grep -E '^## [0-9]{4}-[0-9]{2}-[0-9]{2}' CHANGELOG.md | head -n 2 | awk '{print $2}')
LATEST_DATE=$(echo "$DATES" | sed -n '1p')
SECOND_LATEST_DATE=$(echo "$DATES" | sed -n '2p')
TODAY=$(date -u +%Y-%m-%d)
echo "TODAY=$TODAY" >> $GITHUB_ENV
if [[ "$LATEST_DATE" == "$TODAY" ]]; then
echo "LATEST_DATE=$SECOND_LATEST_DATE" >> $GITHUB_ENV
else
echo "LATEST_DATE=$LATEST_DATE" >> $GITHUB_ENV
fi
- name: Get categorized pull requests
id: get-categorized-prs
uses: actions/github-script@v7
with:
script: |
const fs = require('fs').promises;
const path = require('path');
const configPath = path.resolve(process.env.CONFIG_PATH);
const fileContent = await fs.readFile(configPath, 'utf-8');
const changelogConfig = JSON.parse(fileContent);
const categorizedPRs = changelogConfig.map(obj => ({
...obj,
notes: [],
subCategories: obj.subCategories ?? (
obj.labels.includes("update script") ? [
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
{ title: "✨ New Features", labels: ["feature"], notes: [] },
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] }
] :
obj.labels.includes("maintenance") ? [
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
{ title: "✨ New Features", labels: ["feature"], notes: [] },
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
{ title: "📡 API", labels: ["api"], notes: [] },
{ title: "Github", labels: ["github"], notes: [] }
] :
obj.labels.includes("website") ? [
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
{ title: "✨ New Features", labels: ["feature"], notes: [] },
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
{ title: "Script Information", labels: ["json"], notes: [] }
] : []
)
}));
const latestDateInChangelog = new Date(process.env.LATEST_DATE);
latestDateInChangelog.setUTCHours(23, 59, 59, 999);
const { data: pulls } = await github.rest.pulls.list({
owner: context.repo.owner,
repo: context.repo.repo,
base: "main",
state: "closed",
sort: "updated",
direction: "desc",
per_page: 100,
});
pulls.filter(pr =>
pr.merged_at &&
new Date(pr.merged_at) > latestDateInChangelog &&
!pr.labels.some(label =>
["invalid", "wontdo", process.env.AUTOMATED_PR_LABEL].includes(label.name.toLowerCase())
)
).forEach(pr => {
const prLabels = pr.labels.map(label => label.name.toLowerCase());
const prNote = `- ${pr.title} [@${pr.user.login}](https://github.com/${pr.user.login}) ([#${pr.number}](${pr.html_url}))`;
const updateScriptsCategory = categorizedPRs.find(category =>
category.labels.some(label => prLabels.includes(label))
);
if (updateScriptsCategory) {
const subCategory = updateScriptsCategory.subCategories.find(sub =>
sub.labels.some(label => prLabels.includes(label))
);
if (subCategory) {
subCategory.notes.push(prNote);
} else {
updateScriptsCategory.notes.push(prNote);
}
}
});
console.log(JSON.stringify(categorizedPRs, null, 2));
return categorizedPRs;
- name: Update CHANGELOG.md
uses: actions/github-script@v7
with:
script: |
const fs = require('fs').promises;
const path = require('path');
const today = process.env.TODAY;
const latestDateInChangelog = process.env.LATEST_DATE;
const changelogPath = path.resolve('CHANGELOG.md');
const categorizedPRs = ${{ steps.get-categorized-prs.outputs.result }};
console.log(JSON.stringify(categorizedPRs, null, 2));
let newReleaseNotes = `## ${today}\n\n`;
for (const { title, notes, subCategories } of categorizedPRs) {
const hasSubcategories = subCategories && subCategories.length > 0;
const hasMainNotes = notes.length > 0;
const hasSubNotes = hasSubcategories && subCategories.some(sub => sub.notes && sub.notes.length > 0);
if (hasMainNotes || hasSubNotes) {
newReleaseNotes += `### ${title}\n\n`;
}
if (hasMainNotes) {
newReleaseNotes += ` ${notes.join("\n")}\n\n`;
}
if (hasSubcategories) {
for (const { title: subTitle, notes: subNotes } of subCategories) {
if (subNotes && subNotes.length > 0) {
newReleaseNotes += ` - #### ${subTitle}\n\n`;
newReleaseNotes += ` ${subNotes.join("\n ")}\n\n`;
}
}
}
}
const changelogContent = await fs.readFile(changelogPath, 'utf-8');
const changelogIncludesTodaysReleaseNotes = changelogContent.includes(`\n## ${today}`);
const regex = changelogIncludesTodaysReleaseNotes
? new RegExp(`## ${today}.*(?=## ${latestDateInChangelog})`, "gs")
: new RegExp(`(?=## ${latestDateInChangelog})`, "gs");
const newChangelogContent = changelogContent.replace(regex, newReleaseNotes);
await fs.writeFile(changelogPath, newChangelogContent);
- name: Check for changes
id: verify-diff
run: |
git diff --quiet . || echo "changed=true" >> $GITHUB_ENV
- name: Commit and push changes
if: env.changed == 'true'
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git add CHANGELOG.md
git commit -m "Update CHANGELOG.md"
git checkout -b $BRANCH_NAME || git checkout $BRANCH_NAME
git push origin $BRANCH_NAME --force
- name: Create pull request if not exists
if: env.changed == 'true'
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
PR_EXISTS=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
if [ -z "$PR_EXISTS" ]; then
gh pr create --title "[Github Action] Update CHANGELOG.md" \
--body "This PR is auto-generated by a Github Action to update the CHANGELOG.md file." \
--head $BRANCH_NAME \
--base main \
--label "$AUTOMATED_PR_LABEL"
fi
- name: Approve pull request
if: env.changed == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
fi
- name: Re-approve pull request after update
if: env.changed == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
fi

View File

@ -0,0 +1,122 @@
name: Close Discussion on PR Merge
on:
pull_request:
types: [closed]
jobs:
close-discussion:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Set Up Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
- name: Install Dependencies
run: npm install zx @octokit/graphql
- name: Close Discussion
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_BODY: ${{ github.event.pull_request.body }}
PR_NUMBER: ${{ github.event.pull_request.number }}
REPO_OWNER: ${{ github.repository_owner }}
REPO_NAME: ${{ github.event.repository.name }}
run: |
npx zx << 'EOF'
import { graphql } from "@octokit/graphql";
(async function() {
try {
const token = process.env.GITHUB_TOKEN;
const prBody = process.env.PR_BODY;
const prNumber = process.env.PR_NUMBER;
const owner = process.env.REPO_OWNER;
const repo = process.env.REPO_NAME;
if (!token || !prBody || !prNumber || !owner || !repo) {
console.log("Missing required environment variables.");
process.exit(1);
}
const match = prBody.match(/#(\d+)/);
if (!match) {
console.log("No discussion ID found in PR body.");
return;
}
const discussionNumber = match[1];
console.log(`Extracted Discussion Number: ${discussionNumber}`);
console.log(`PR Number: ${prNumber}`);
console.log(`Repository: ${owner}/${repo}`);
const graphqlWithAuth = graphql.defaults({
headers: { authorization: `Bearer ${token}` },
});
const discussionQuery = `
query($owner: String!, $repo: String!, $number: Int!) {
repository(owner: $owner, name: $repo) {
discussion(number: $number) {
id
}
}
}
`;
const discussionResponse = await graphqlWithAuth(discussionQuery, {
owner,
repo,
number: parseInt(discussionNumber, 10),
});
const discussionQLId = discussionResponse.repository.discussion.id;
if (!discussionQLId) {
console.log("Failed to fetch discussion GraphQL ID.");
return;
}
console.log(`GraphQL Discussion ID: ${discussionQLId}`);
const commentMutation = `
mutation($discussionId: ID!, $body: String!) {
addDiscussionComment(input: { discussionId: $discussionId, body: $body }) {
comment { id body }
}
}
`;
const commentResponse = await graphqlWithAuth(commentMutation, {
discussionId: discussionQLId,
body: `Merged with PR #${prNumber}`,
});
const commentId = commentResponse.addDiscussionComment.comment.id;
if (!commentId) {
console.log("Failed to post the comment.");
return;
}
console.log(`Comment Posted Successfully! Comment ID: ${commentId}`);
const markAnswerMutation = `
mutation($id: ID!) {
markDiscussionCommentAsAnswer(input: { id: $id }) {
discussion { id title }
}
}
`;
await graphqlWithAuth(markAnswerMutation, { id: commentId });
console.log("Comment marked as answer successfully!");
} catch (error) {
console.error("Error:", error);
return;
}
})();
EOF

View File

@ -0,0 +1,37 @@
name: Build and Publish Docker Image
on:
push:
branches:
- main
paths:
- '.github/runner/docker/**'
schedule:
- cron: '0 0 * * *'
jobs:
build:
runs-on: ubuntu-latest #To ensure it always builds we use the github runner with all the right tooling
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Log in to GHCR
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
run: |
repo_name=${{ github.repository }} # Get repository name
repo_name_lower=$(echo $repo_name | tr '[:upper:]' '[:lower:]') # Convert to lowercase
docker build -t ghcr.io/$repo_name_lower/gh-runner-self:latest -f .github/runner/docker/gh-runner-self.dockerfile .
- name: Push Docker image to GHCR
run: |
repo_name=${{ github.repository }} # Get repository name
repo_name_lower=$(echo $repo_name | tr '[:upper:]' '[:lower:]') # Convert to lowercase
docker push ghcr.io/$repo_name_lower/gh-runner-self:latest

View File

@ -0,0 +1,28 @@
name: Delete JSON date PR Branch
on:
pull_request:
types: [closed]
branches:
- main
jobs:
delete_branch:
runs-on: ubuntu-latest
steps:
- name: Checkout the code
uses: actions/checkout@v3
- name: Delete PR Update Branch
if: github.event.pull_request.merged == true && startsWith(github.event.pull_request.head.ref, 'pr-update-json-')
run: |
PR_BRANCH="${{ github.event.pull_request.head.ref }}"
echo "Deleting branch $PR_BRANCH..."
# Avoid deleting the default branch (e.g., main)
if [[ "$PR_BRANCH" != "main" ]]; then
git push origin --delete "$PR_BRANCH"
else
echo "Skipping deletion of the main branch"
fi

View File

@ -0,0 +1,57 @@
name: Create Daily Release
on:
schedule:
- cron: '1 0 * * *' # Runs daily at 00:01 UTC
workflow_dispatch:
jobs:
create-daily-release:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Extract first 5000 characters from CHANGELOG.md
run: head -c 5000 CHANGELOG.md > changelog_cropped.md
- name: Debugging - Show extracted changelog
run: |
echo "=== CHANGELOG EXCERPT ==="
cat changelog_cropped.md
echo "========================="
- name: Parse CHANGELOG.md and create release
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
YESTERDAY=$(date -u --date="yesterday" +%Y-%m-%d)
echo "Checking for changes on: $YESTERDAY"
# Ensure yesterday's date exists in the changelog
if ! grep -q "## $YESTERDAY" changelog_cropped.md; then
echo "No entry found for $YESTERDAY, skipping release."
exit 0
fi
# Extract section for yesterday's date
awk -v date="## $YESTERDAY" '
$0 ~ date {found=1; next}
found && /^## [0-9]{4}-[0-9]{2}-[0-9]{2}/ {exit}
found
' changelog_cropped.md > changelog_tmp.md
echo "=== Extracted Changelog ==="
cat changelog_tmp.md
echo "==========================="
# Skip if no content was found
if [ ! -s changelog_tmp.md ]; then
echo "No changes found for $YESTERDAY, skipping release."
exit 0
fi
# Create GitHub release
gh release create "$YESTERDAY" -t "$YESTERDAY" -F changelog_tmp.md

177
.github/workflows/live/script-test.yml vendored Normal file
View File

@ -0,0 +1,177 @@
name: Run Scripts on PVE Node for testing
permissions:
pull-requests: write
on:
pull_request_target:
branches:
- main
paths:
- 'install/**.sh'
- 'ct/**.sh'
jobs:
run-install-script:
runs-on: pvenode
steps:
- name: Checkout PR branch
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 0
- name: Add Git safe directory
run: |
git config --global --add safe.directory /__w/ProxmoxVED/ProxmoxVE
- name: Set up GH_TOKEN
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
- name: Get Changed Files
run: |
CHANGED_FILES=$(gh pr diff ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --name-only)
CHANGED_FILES=$(echo "$CHANGED_FILES" | tr '\n' ' ')
echo "Changed files: $CHANGED_FILES"
echo "SCRIPT=$CHANGED_FILES" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get scripts
id: check-install-script
run: |
ALL_FILES=()
ADDED_FILES=()
for FILE in ${{ env.SCRIPT }}; do
if [[ $FILE =~ ^install/.*-install\.sh$ ]] || [[ $FILE =~ ^ct/.*\.sh$ ]]; then
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
if [[ ! " ${ADDED_FILES[@]} " =~ " $STRIPPED_NAME " ]]; then
ALL_FILES+=("$FILE")
ADDED_FILES+=("$STRIPPED_NAME") # Mark this base file as added (without the path)
fi
fi
done
ALL_FILES=$(echo "${ALL_FILES[@]}" | xargs)
echo "$ALL_FILES"
echo "ALL_FILES=$ALL_FILES" >> $GITHUB_ENV
- name: Run scripts
id: run-install
continue-on-error: true
run: |
set +e
#run for each files in /ct
for FILE in ${{ env.ALL_FILES }}; do
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
echo "Running Test for: $STRIPPED_NAME"
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "$FILE"; then
echo "The script contains an interactive prompt. Skipping execution."
continue
fi
if [[ $FILE =~ ^install/.*-install\.sh$ ]]; then
CT_SCRIPT="ct/$STRIPPED_NAME.sh"
if [[ ! -f $CT_SCRIPT ]]; then
echo "No CT script found for $STRIPPED_NAME"
ERROR_MSG="No CT script found for $FILE"
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
continue
fi
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "install/$STRIPPED_NAME-install.sh"; then
echo "The script contains an interactive prompt. Skipping execution."
continue
fi
echo "Found CT script for $STRIPPED_NAME"
chmod +x "$CT_SCRIPT"
RUNNING_FILE=$CT_SCRIPT
elif [[ $FILE =~ ^ct/.*\.sh$ ]]; then
INSTALL_SCRIPT="install/$STRIPPED_NAME-install.sh"
if [[ ! -f $INSTALL_SCRIPT ]]; then
echo "No install script found for $STRIPPED_NAME"
ERROR_MSG="No install script found for $FILE"
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
continue
fi
echo "Found install script for $STRIPPED_NAME"
chmod +x "$INSTALL_SCRIPT"
RUNNING_FILE=$FILE
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "ct/$STRIPPED_NAME.sh"; then
echo "The script contains an interactive prompt. Skipping execution."
continue
fi
fi
git remote add community-scripts https://github.com/community-scripts/ProxmoxVE.git
git fetch community-scripts
rm -f .github/workflows/scripts/app-test/pr-build.func || true
rm -f .github/workflows/scripts/app-test/pr-install.func || true
rm -f .github/workflows/scripts/app-test/pr-alpine-install.func || true
rm -f .github/workflows/scripts/app-test/pr-create-lxc.sh || true
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-build.func
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-install.func
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-alpine-install.func
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-create-lxc.sh
chmod +x $RUNNING_FILE
chmod +x .github/workflows/scripts/app-test/pr-create-lxc.sh
chmod +x .github/workflows/scripts/app-test/pr-install.func
chmod +x .github/workflows/scripts/app-test/pr-alpine-install.func
chmod +x .github/workflows/scripts/app-test/pr-build.func
sed -i 's|source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)|source .github/workflows/scripts/app-test/pr-build.func|g' "$RUNNING_FILE"
echo "Executing $RUNNING_FILE"
ERROR_MSG=$(./$RUNNING_FILE 2>&1 > /dev/null)
echo "Finished running $FILE"
if [ -n "$ERROR_MSG" ]; then
echo "ERROR in $STRIPPED_NAME: $ERROR_MSG"
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
fi
done
set -e # Restore exit-on-error
- name: Cleanup PVE Node
run: |
containers=$(pct list | tail -n +2 | awk '{print $0 " " $4}' | awk '{print $1}')
for container_id in $containers; do
status=$(pct status $container_id | awk '{print $2}')
if [[ $status == "running" ]]; then
pct stop $container_id
pct destroy $container_id
fi
done
- name: Post error comments
run: |
ERROR="false"
SEARCH_LINE=".github/workflows/scripts/app-test/pr-build.func: line 255:"
# Get all existing comments on the PR
EXISTING_COMMENTS=$(gh pr view ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --json comments --jq '.comments[].body')
for FILE in ${{ env.ALL_FILES }}; do
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
if [[ ! -f result_$STRIPPED_NAME.log ]]; then
continue
fi
ERROR_MSG=$(cat result_$STRIPPED_NAME.log)
if [ -n "$ERROR_MSG" ]; then
CLEANED_ERROR_MSG=$(echo "$ERROR_MSG" | sed "s|$SEARCH_LINE.*||")
COMMENT_BODY=":warning: The script _**$FILE**_ failed with the following message: <br> <div><strong>${CLEANED_ERROR_MSG}</strong></div>"
# Check if the comment already exists
if echo "$EXISTING_COMMENTS" | grep -qF "$COMMENT_BODY"; then
echo "Skipping duplicate comment for $FILE"
else
echo "Posting error message for $FILE"
gh pr comment ${{ github.event.pull_request.number }} \
--repo ${{ github.repository }} \
--body "$COMMENT_BODY"
ERROR="true"
fi
fi
done
echo "ERROR=$ERROR" >> $GITHUB_ENV

243
.github/workflows/live/script_format.yml vendored Normal file
View File

@ -0,0 +1,243 @@
name: Script Format Check
permissions:
pull-requests: write
on:
pull_request_target:
branches:
- main
paths:
- 'install/*.sh'
- 'ct/*.sh'
jobs:
run-install-script:
runs-on: pvenode
steps:
- name: Checkout PR branch (supports forks)
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 0
- name: Add Git safe directory
run: |
git config --global --add safe.directory /__w/ProxmoxVED/ProxmoxVE
- name: Set up GH_TOKEN
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
- name: Get Changed Files
run: |
CHANGED_FILES=$(gh pr diff ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --name-only)
CHANGED_FILES=$(echo "$CHANGED_FILES" | tr '\n' ' ')
echo "Changed files: $CHANGED_FILES"
echo "SCRIPT=$CHANGED_FILES" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Check scripts
id: run-install
continue-on-error: true
run: |
for FILE in ${{ env.SCRIPT }}; do
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
echo "Running Test for: $STRIPPED_NAME"
FILE_STRIPPED="${FILE##*/}"
LOG_FILE="result_$FILE_STRIPPED.log"
if [[ $FILE =~ ^ct/.*\.sh$ ]]; then
FIRST_LINE=$(sed -n '1p' "$FILE")
[[ "$FIRST_LINE" != "#!/usr/bin/env bash" ]] && echo "Line 1 was $FIRST_LINE | Should be: #!/usr/bin/env bash" >> "$LOG_FILE"
SECOND_LINE=$(sed -n '2p' "$FILE")
[[ "$SECOND_LINE" != "source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" ]] &&
echo "Line 2 was $SECOND_LINE | Should be: source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" >> "$LOG_FILE"
THIRD_LINE=$(sed -n '3p' "$FILE")
if ! [[ "$THIRD_LINE" =~ ^#\ Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ community-scripts\ ORG$ || "$THIRD_LINE" =~ ^Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ tteck$ ]]; then
echo "Line 3 was $THIRD_LINE | Should be: # Copyright (c) 2021-2025 community-scripts ORG" >> "$LOG_FILE"
fi
EXPECTED_AUTHOR="# Author:"
EXPECTED_LICENSE="# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE"
EXPECTED_SOURCE="# Source:"
EXPECTED_EMPTY=""
for i in {4..7}; do
LINE=$(sed -n "${i}p" "$FILE")
case $i in
4)
[[ $LINE == $EXPECTED_AUTHOR* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_AUTHOR" >> $LOG_FILE
;;
5)
[[ "$LINE" == "$EXPECTED_LICENSE" ]] || printf "Line %d was: '%s' | Should be: '%s'\n" "$i" "$LINE" "$EXPECTED_LICENSE" >> $LOG_FILE
;;
6)
[[ $LINE == $EXPECTED_SOURCE* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_SOURCE" >> $LOG_FILE
;;
7)
[[ -z $LINE ]] || printf "Line %d was: '%s' | Should be empty\n" "$i" "$LINE" >> $LOG_FILE
;;
esac
done
EXPECTED_PREFIXES=(
"APP="
"var_tags="
"var_cpu=" # Must be a number
"var_ram=" # Must be a number
"var_disk=" # Must be a number
"var_os=" # Must be debian, alpine, or ubuntu
"var_version="
"var_unprivileged=" # Must be 0 or 1
)
for i in {8..15}; do
LINE=$(sed -n "${i}p" "$FILE")
INDEX=$((i - 8))
case $INDEX in
2|3|4) # var_cpu, var_ram, var_disk (must be numbers)
if [[ "$LINE" =~ ^${EXPECTED_PREFIXES[$INDEX]}([0-9]+)$ ]]; then
continue # Valid
else
echo "Line $i was '$LINE' | Should be: '${EXPECTED_PREFIXES[$INDEX]}<NUMBER>'" >> "$LOG_FILE"
fi
;;
5) # var_os (must be debian, alpine, or ubuntu)
if [[ "$LINE" =~ ^var_os=(debian|alpine|ubuntu)$ ]]; then
continue # Valid
else
echo "Line $i was '$LINE' | Should be: 'var_os=[debian|alpine|ubuntu]'" >> "$LOG_FILE"
fi
;;
7) # var_unprivileged (must be 0 or 1)
if [[ "$LINE" =~ ^var_unprivileged=[01]$ ]]; then
continue # Valid
else
echo "Line $i was '$LINE' | Should be: 'var_unprivileged=[0|1]'" >> "$LOG_FILE"
fi
;;
*) # Other lines (must start with expected prefix)
if [[ "$LINE" == ${EXPECTED_PREFIXES[$INDEX]}* ]]; then
continue # Valid
else
echo "Line $i was '$LINE' | Should start with '${EXPECTED_PREFIXES[$INDEX]}'" >> "$LOG_FILE"
fi
;;
esac
done
for i in {16..20}; do
LINE=$(sed -n "${i}p" "$FILE")
EXPECTED=(
"header_info \"$APP\""
"variables"
"color"
"catch_errors"
"function update_script() {"
)
[[ "$LINE" != "${EXPECTED[$((i-16))]}" ]] && echo "Line $i was $LINE | Should be: ${EXPECTED[$((i-16))]}" >> "$LOG_FILE"
done
cat "$LOG_FILE"
elif [[ $FILE =~ ^install/.*-install\.sh$ ]]; then
FIRST_LINE=$(sed -n '1p' "$FILE")
[[ "$FIRST_LINE" != "#!/usr/bin/env bash" ]] && echo "Line 1 was $FIRST_LINE | Should be: #!/usr/bin/env bash" >> "$LOG_FILE"
SECOND_LINE=$(sed -n '2p' "$FILE")
[[ -n "$SECOND_LINE" ]] && echo "Line 2 should be empty" >> "$LOG_FILE"
THIRD_LINE=$(sed -n '3p' "$FILE")
if ! [[ "$THIRD_LINE" =~ ^#\ Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ community-scripts\ ORG$ || "$THIRD_LINE" =~ ^Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ tteck$ ]]; then
echo "Line 3 was $THIRD_LINE | Should be: # Copyright (c) 2021-2025 community-scripts ORG" >> "$LOG_FILE"
fi
EXPECTED_AUTHOR="# Author:"
EXPECTED_LICENSE="# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE"
EXPECTED_SOURCE="# Source:"
EXPECTED_EMPTY=""
for i in {4..7}; do
LINE=$(sed -n "${i}p" "$FILE")
case $i in
4)
[[ $LINE == $EXPECTED_AUTHOR* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_AUTHOR" >> $LOG_FILE
;;
5)
[[ "$LINE" == "$EXPECTED_LICENSE" ]] || printf "Line %d was: '%s' | Should be: '%s'\n" "$i" "$LINE" "$EXPECTED_LICENSE" >> $LOG_FILE
;;
6)
[[ $LINE == $EXPECTED_SOURCE* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_SOURCE" >> $LOG_FILE
;;
7)
[[ -z $LINE ]] || printf "Line %d was: '%s' | Should be empty\n" "$i" "$LINE" >> $LOG_FILE
;;
esac
done
[[ "$(sed -n '8p' "$FILE")" != 'source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"' ]] && echo 'Line 8 should be: source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"' >> "$LOG_FILE"
for i in {9..14}; do
LINE=$(sed -n "${i}p" "$FILE")
EXPECTED=(
"color"
"verb_ip6"
"catch_errors"
"setting_up_container"
"network_check"
"update_os"
)
[[ "$LINE" != "${EXPECTED[$((i-9))]}" ]] && echo "Line $i was $LINE | Should be: ${EXPECTED[$((i-9))]}" >> "$LOG_FILE"
done
[[ -n "$(sed -n '15p' "$FILE")" ]] && echo "Line 15 should be empty" >> "$LOG_FILE"
[[ "$(sed -n '16p' "$FILE")" != 'msg_info "Installing Dependencies"' ]] && echo 'Line 16 should be: msg_info "Installing Dependencies"' >> "$LOG_FILE"
LAST_3_LINES=$(tail -n 3 "$FILE")
[[ "$LAST_3_LINES" != *"$STD apt-get -y autoremove"* ]] && echo 'Third to last line should be: $STD apt-get -y autoremove' >> "$LOG_FILE"
[[ "$LAST_3_LINES" != *"$STD apt-get -y autoclean"* ]] && echo 'Second to last line should be: $STD apt-get -y clean' >> "$LOG_FILE"
[[ "$LAST_3_LINES" != *'msg_ok "Cleaned"'* ]] && echo 'Last line should be: msg_ok "Cleaned"' >> "$LOG_FILE"
cat "$LOG_FILE"
fi
done
- name: Post error comments
run: |
ERROR="false"
for FILE in ${{ env.SCRIPT }}; do
FILE_STRIPPED="${FILE##*/}"
LOG_FILE="result_$FILE_STRIPPED.log"
echo $LOG_FILE
if [[ ! -f $LOG_FILE ]]; then
continue
fi
ERROR_MSG=$(cat $LOG_FILE)
if [ -n "$ERROR_MSG" ]; then
echo "Posting error message for $FILE"
echo ${ERROR_MSG}
gh pr comment ${{ github.event.pull_request.number }} \
--repo ${{ github.repository }} \
--body ":warning: The script _**$FILE**_ has the following formatting errors: <br> <div><strong>${ERROR_MSG}</strong></div>"
ERROR="true"
fi
done
echo "ERROR=$ERROR" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Fail if error
if: ${{ env.ERROR == 'true' }}
run: exit 1

View File

@ -0,0 +1,131 @@
name: Update JSON Date
on:
push:
branches:
- main
paths:
- 'json/**.json'
workflow_dispatch:
jobs:
update-app-files:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Generate dynamic branch name
id: timestamp
run: echo "BRANCH_NAME=pr-update-json-$(date +'%Y%m%d%H%M%S')" >> $GITHUB_ENV
- name: Set up GH_TOKEN
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
- name: Checkout Repository
uses: actions/checkout@v4
with:
fetch-depth: 2 # Ensure we have the last two commits
- name: Get Previous Commit
id: prev_commit
run: |
PREV_COMMIT=$(git rev-parse HEAD^)
echo "Previous commit: $PREV_COMMIT"
echo "prev_commit=$PREV_COMMIT" >> $GITHUB_ENV
- name: Get Newly Added JSON Files
id: new_json_files
run: |
git diff --name-only --diff-filter=A ${{ env.prev_commit }} HEAD | grep '^json/.*\.json$' > new_files.txt || true
echo "New files detected:"
cat new_files.txt || echo "No new files."
- name: Disable file mode changes
run: git config core.fileMode false
- name: Set up Git
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Change JSON Date
id: change-json-date
run: |
current_date=$(date +"%Y-%m-%d")
while IFS= read -r file; do
# Skip empty lines
[[ -z "$file" ]] && continue
if [[ -f "$file" ]]; then
echo "Processing $file..."
current_json_date=$(jq -r '.date_created // empty' "$file")
if [[ -z "$current_json_date" || "$current_json_date" != "$current_date" ]]; then
echo "Updating $file with date $current_date"
jq --arg date "$current_date" '.date_created = $date' "$file" > temp.json && mv temp.json "$file"
else
echo "Date in $file is already up to date."
fi
else
echo "Warning: File $file not found!"
fi
done < new_files.txt
rm new_files.txt
- name: Check if there are any changes
run: |
echo "Checking for changes..."
git add -A # Untracked Dateien aufnehmen
git status
if git diff --cached --quiet; then
echo "No changes detected."
echo "changed=false" >> "$GITHUB_ENV"
else
echo "Changes detected:"
git diff --stat --cached
echo "changed=true" >> "$GITHUB_ENV"
fi
# Step 7: Commit and create PR if changes exist
- name: Commit and create PR if changes exist
if: env.changed == 'true'
run: |
git commit -m "Update date in json"
git checkout -b ${{ env.BRANCH_NAME }}
git push origin ${{ env.BRANCH_NAME }}
gh pr create --title "[core] update date in json" \
--body "This PR is auto-generated by a GitHub Action to update the date in json." \
--head ${{ env.BRANCH_NAME }} \
--base main \
--label "automated pr"
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
- name: Approve pull request
if: env.changed == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER=$(gh pr list --head "${{ env.BRANCH_NAME }}" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
fi
- name: No changes detected
if: env.changed == 'false'
run: echo "No changes to commit. Workflow completed successfully."

View File

@ -0,0 +1,161 @@
name: Validate filenames
on:
pull_request_target:
paths:
- "ct/*.sh"
- "install/*.sh"
- "json/*.json"
jobs:
check-files:
name: Check changed files
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Get pull request information
if: github.event_name == 'pull_request_target'
uses: actions/github-script@v7
id: pr
with:
script: |
const { data: pullRequest } = await github.rest.pulls.get({
...context.repo,
pull_number: context.payload.pull_request.number,
});
return pullRequest;
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
- name: Get changed files
id: changed-files
run: |
if ${{ github.event_name == 'pull_request_target' }}; then
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | xargs)" >> $GITHUB_OUTPUT
else
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT
fi
- name: "Validate filenames in ct and install directory"
if: always() && steps.changed-files.outputs.files != ''
id: check-scripts
run: |
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^(ct|install)/.*\.sh$' || true; })
NON_COMPLIANT_FILES=""
for FILE in $CHANGED_FILES; do
# Datei "ct/create_lxc.sh" explizit überspringen
if [[ "$FILE" == "ct/create_lxc.sh" ]]; then
continue
fi
BASENAME=$(echo "$(basename "${FILE%.*}")")
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "Non-compliant filenames found, change to lowercase:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: "Validate filenames in json directory."
if: always() && steps.changed-files.outputs.files != ''
id: check-json
run: |
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^json/.*\.json$' || true; })
NON_COMPLIANT_FILES=""
for FILE in $CHANGED_FILES; do
BASENAME=$(echo "$(basename "${FILE%.*}")")
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
fi
done
if [ -n "$NON_COMPLIANT_FILES" ]; then
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
echo "Non-compliant filenames found, change to lowercase:"
for FILE in $NON_COMPLIANT_FILES; do
echo "$FILE"
done
exit 1
fi
- name: Post results and comment
if: always() && steps.check-scripts.outputs.files != '' && steps.check-json.outputs.files != '' && github.event_name == 'pull_request_target'
uses: actions/github-script@v7
with:
script: |
const result = "${{ job.status }}" === "success" ? "success" : "failure";
const nonCompliantFiles = {
script: "${{ steps.check-scripts.outputs.files }}",
JSON: "${{ steps.check-json.outputs.files }}",
};
const issueNumber = context.payload.pull_request
? context.payload.pull_request.number
: null;
const commentIdentifier = "validate-filenames";
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Filename validation\n\n`;
if (result === "failure") {
newCommentBody += ":x: We found issues in the following changed files:\n\n";
for (const [check, files] of Object.entries(nonCompliantFiles)) {
if (files) {
newCommentBody += `**${check.charAt(0).toUpperCase() + check.slice(1)} filename invalid:**\n${files
.trim()
.split(" ")
.map((file) => `- ${file}`)
.join("\n")}\n\n`;
}
}
newCommentBody +=
"Please change the filenames to lowercase and use only alphanumeric characters and dashes.\n";
} else {
newCommentBody += `:rocket: All files passed filename validation!\n`;
}
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
if (issueNumber) {
const { data: comments } = await github.rest.issues.listComments({
...context.repo,
issue_number: issueNumber,
});
const existingComment = comments.find(
(comment) => comment.user.login === "github-actions[bot]",
);
if (existingComment) {
if (existingComment.body.includes(commentIdentifier)) {
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`, "");
newCommentBody = existingComment.body.replace(re, newCommentBody);
} else {
newCommentBody = existingComment.body + '\n\n---\n\n' + newCommentBody;
}
await github.rest.issues.updateComment({
...context.repo,
comment_id: existingComment.id,
body: newCommentBody,
});
} else {
await github.rest.issues.createComment({
...context.repo,
issue_number: issueNumber,
body: newCommentBody,
});
}
}

View File

@ -18,7 +18,7 @@ jobs:
steps:
- name: Generate a token
id: app-token
uses: actions/create-github-app-token@v2
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.PUSH_MAIN_APP_ID }}
private-key: ${{ secrets.PUSH_MAIN_APP_SECRET }}
@ -34,128 +34,63 @@ jobs:
repository: community-scripts/ProxmoxVED
token: ${{ secrets.GH_MERGE_PAT }}
- name: List Issues and Extract Script Type
- name: List Issues in Repository
id: list_issues
env:
GH_TOKEN: ${{ github.token }}
run: |
echo "Filtering Issues with Label Migration To ProxmoxVE"
raw_output=$(gh issue list --json title,labels,number,body)
filtered_issue=$(echo "$raw_output" | jq -r '[.[] | select(.labels[]?.name == "Migration To ProxmoxVE")][0]')
raw_output=$(gh issue list --json title,labels,number)
filtered_issues=$(echo "$raw_output" | jq -r '.[] | select(.labels[]?.name == "Migration To ProxmoxVE") | .title' | head -n 1)
issue_nr=$(echo "$raw_output" | jq -r '.[] | select(.labels[]?.name == "Migration To ProxmoxVE") | .number' | head -n 1)
if [ "$filtered_issue" == "null" ] || [ -z "$filtered_issue" ]; then
if [ -z "$filtered_issues" ]; then
echo "No issues found with label 'Migration To ProxmoxVE'."
exit 1
fi
script_name=$(echo "$filtered_issue" | jq -r '.title' | tr '[:upper:]' '[:lower:]' | tr -d ' ')
issue_nr=$(echo "$filtered_issue" | jq -r '.number')
issue_body=$(echo "$filtered_issue" | jq -r '.body')
echo "Script Name: $script_name"
echo "Issue Number: $issue_nr"
# Detect script type from issue body
if echo "$issue_body" | grep -qi "CT (LXC Container)"; then
script_type="ct"
elif echo "$issue_body" | grep -qi "VM (Virtual Machine)"; then
script_type="vm"
elif echo "$issue_body" | grep -qi "Addon (tools/addon)"; then
script_type="addon"
elif echo "$issue_body" | grep -qi "PVE Tool (tools/pve)"; then
script_type="pve"
else
# Fallback detection by filename pattern
if [[ "$script_name" == *"-vm"* ]]; then
script_type="vm"
else
script_type="ct"
fi
script_name=$(echo "$filtered_issues" | head -n 1) # Nur das erste Issue nehmen
script_name_lowercase=$(echo "$script_name" | tr '[:upper:]' '[:lower:]' | tr -d ' ')
echo "Script Name: $script_name_lowercase"
echo "script_name=$script_name_lowercase" >> $GITHUB_OUTPUT
echo "issue_nr=$issue_nr" >> $GITHUB_OUTPUT
fi
echo "Script Type: $script_type"
echo "script_name=$script_name" >> $GITHUB_OUTPUT
echo "issue_nr=$issue_nr" >> $GITHUB_OUTPUT
echo "script_type=$script_type" >> $GITHUB_OUTPUT
- name: Check if script files exist
id: check_files
run: |
script_name="${{ steps.list_issues.outputs.script_name }}"
script_type="${{ steps.list_issues.outputs.script_type }}"
files_found="true"
missing_files=""
ct_file="ct/${script_name}.sh"
install_file="install/${script_name}-install.sh"
json_file="frontend/public/json/${script_name}.json"
# Check files based on script type
case "$script_type" in
ct)
if [[ ! -f "ct/${script_name}.sh" ]]; then
echo "ct file not found: ct/${script_name}.sh"
files_found="false"
missing_files+="ct/${script_name}.sh "
fi
if [[ ! -f "install/${script_name}-install.sh" ]]; then
echo "install file not found: install/${script_name}-install.sh"
files_found="false"
missing_files+="install/${script_name}-install.sh "
fi
# JSON check with alpine fallback
json_file="frontend/public/json/${script_name}.json"
if [[ ! -f "$json_file" ]]; then
if [[ "$script_name" == alpine-* ]]; then
stripped_name="${script_name#alpine-}"
alt_json="frontend/public/json/${stripped_name}.json"
if [[ -f "$alt_json" ]]; then
echo "Using alpine fallback JSON: $alt_json"
echo "json_fallback=$alt_json" >> $GITHUB_OUTPUT
else
echo "json file not found: $json_file"
files_found="false"
missing_files+="$json_file "
fi
else
echo "json file not found: $json_file"
files_found="false"
missing_files+="$json_file "
fi
fi
;;
vm)
if [[ ! -f "vm/${script_name}.sh" ]]; then
echo "vm file not found: vm/${script_name}.sh"
files_found="false"
missing_files+="vm/${script_name}.sh "
fi
# JSON is optional for VMs but check anyway
json_file="frontend/public/json/${script_name}.json"
if [[ ! -f "$json_file" ]]; then
echo "json file not found (optional): $json_file"
fi
;;
addon)
if [[ ! -f "tools/addon/${script_name}.sh" ]]; then
echo "addon file not found: tools/addon/${script_name}.sh"
files_found="false"
missing_files+="tools/addon/${script_name}.sh "
fi
# JSON is optional for addons
json_file="frontend/public/json/${script_name}.json"
if [[ ! -f "$json_file" ]]; then
echo "json file not found (optional): $json_file"
fi
;;
pve)
if [[ ! -f "tools/pve/${script_name}.sh" ]]; then
echo "pve tool file not found: tools/pve/${script_name}.sh"
files_found="false"
missing_files+="tools/pve/${script_name}.sh "
fi
;;
esac
echo "files_found=$files_found" >> $GITHUB_OUTPUT
echo "missing=$missing_files" >> $GITHUB_OUTPUT
if [[ ! -f "$ct_file" ]]; then
echo "ct file not found."
echo "files_found=false" >> $GITHUB_OUTPUT
echo "missing=$ct_file" >> $GITHUB_OUTPUT
fi
if [[ ! -f "$install_file" ]]; then
echo "install file not found."
echo "files_found=false" >> $GITHUB_OUTPUT
echo "missing=$install_file" >> $GITHUB_OUTPUT
fi
if [[ ! -f "$json_file" ]]; then
if [[ "$json_file" = *alpine* ]]; then
stripped_name="${json_file/frontend\/public\/json\/alpine-/frontend/public/json/}"
echo $stripped_name
if [[ -f "$stripped_name" ]]; then
echo "files_found=true" >> $GITHUB_OUTPUT
else
echo "json file striped not found."
echo "files_found=false" >> $GITHUB_OUTPUT
echo "missing=$json_file" >> $GITHUB_OUTPUT
fi
else
echo "json file not found."
echo "files_found=false" >> $GITHUB_OUTPUT
echo "missing=$json_file" >> $GITHUB_OUTPUT
fi
fi
- name: Comment if not all Files found
if: steps.check_files.outputs.files_found == 'false'
@ -163,8 +98,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
script_name="${{ steps.list_issues.outputs.script_name }}"
script_type="${{ steps.list_issues.outputs.script_type }}"
gh issue comment ${{ steps.list_issues.outputs.issue_nr }} --body "Not all required files were found for **$script_name** (type: $script_type). Please check the files and try again. Missing: ${{ steps.check_files.outputs.missing }}"
gh issue comment ${{ steps.list_issues.outputs.issue_nr }} --body "Not all required files were found for $script_name. Please check the files and try again. Missing: ${{ steps.check_files.outputs.missing }}."
exit 1
- name: Get GitHub App User ID
@ -185,86 +119,54 @@ jobs:
echo "Using branch: $branch_name"
echo "branch_name=$branch_name" >> $GITHUB_ENV
- name: Clone ProxmoxVE and Copy Files
- name: Clone ProxmoxVE (Target Repo)
run: |
script_name="${{ steps.list_issues.outputs.script_name }}"
script_type="${{ steps.list_issues.outputs.script_type }}"
json_fallback="${{ steps.check_files.outputs.json_fallback }}"
git clone https://x-access-token:${{ steps.app-token.outputs.token }}@github.com/community-scripts/ProxmoxVE.git ProxmoxVE
cd ProxmoxVE
# Check if files already exist in target repo
case "$script_type" in
ct)
[[ -f "ct/${script_name}.sh" ]] && echo "ct file already exists in ProxmoxVE" && exit 1
[[ -f "install/${script_name}-install.sh" ]] && echo "install file already exists in ProxmoxVE" && exit 1
;;
vm)
[[ -f "vm/${script_name}.sh" ]] && echo "vm file already exists in ProxmoxVE" && exit 1
;;
addon)
[[ -f "tools/addon/${script_name}.sh" ]] && echo "addon file already exists in ProxmoxVE" && exit 1
;;
pve)
[[ -f "tools/pve/${script_name}.sh" ]] && echo "pve tool file already exists in ProxmoxVE" && exit 1
;;
esac
if [[ -f "ct/${script_name}.sh" ]]; then
echo "ct file already exists in ProxmoxVE"
exit 1
fi
if [[ -f "install/${script_name}-install.sh" ]]; then
echo "install file already exists in ProxmoxVE"
exit 1
fi
if [[ -f "frontend/public/json/${script_name}.json" ]]; then
echo "json file already exists in ProxmoxVE"
exit 1
fi
git checkout -b "$branch_name"
# Copy files based on script type
case "$script_type" in
ct)
cp ../ct/${script_name}.sh ct/
cp ../ct/headers/${script_name} ct/headers/ 2>/dev/null || true
cp ../install/${script_name}-install.sh install/
cp ../ct/$script_name.sh ct/.
cp ../ct/headers/$script_name ct/headers/. || true
cp ../install/$script_name-install.sh install/.
# Handle JSON with alpine fallback
if [[ -n "$json_fallback" ]]; then
cp ../${json_fallback} frontend/public/json/ || true
else
cp ../frontend/public/json/${script_name}.json frontend/public/json/ 2>/dev/null || true
json_file="${script_name}.json"
if [[ ! -f "../frontend/public/json/$json_file" ]]; then
if [[ "$json_file" = *alpine* ]]; then
stripped_name="${json_file#alpine-}"
if [[ -f "../frontend/public/json/$stripped_name" ]]; then
cp ../frontend/public/json/$stripped_name frontend/public/json/. || true
fi
fi
else
cp ../frontend/public/json/$json_file frontend/public/json/. || true
fi
# Update URLs in ct script
sed -i "s|https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func|https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func|" ct/${script_name}.sh
sed -i "s|https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func|https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func|" ct/${script_name}.sh
sed -i "s|community-scripts/ProxmoxVED|community-scripts/ProxmoxVE|g" ct/${script_name}.sh
sed -i "s|community-scripts/ProxmoxVED|community-scripts/ProxmoxVE|g" install/${script_name}-install.sh
;;
vm)
cp ../vm/${script_name}.sh vm/
cp ../frontend/public/json/${script_name}.json frontend/public/json/ 2>/dev/null || true
# Update URLs in vm script
sed -i "s|https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func|https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func|" vm/${script_name}.sh
sed -i "s|community-scripts/ProxmoxVED|community-scripts/ProxmoxVE|g" vm/${script_name}.sh
;;
addon)
mkdir -p tools/addon
cp ../tools/addon/${script_name}.sh tools/addon/
cp ../frontend/public/json/${script_name}.json frontend/public/json/ 2>/dev/null || true
# Update URLs in addon script
sed -i "s|community-scripts/ProxmoxVED|community-scripts/ProxmoxVE|g" tools/addon/${script_name}.sh
;;
pve)
mkdir -p tools/pve
cp ../tools/pve/${script_name}.sh tools/pve/
# Update URLs in pve script
sed -i "s|community-scripts/ProxmoxVED|community-scripts/ProxmoxVE|g" tools/pve/${script_name}.sh
;;
esac
git add . > /dev/null 2>&1
sed -i 's|source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)|source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)|' ct/$script_name.sh
sed -i 's|# License: MIT \| https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE|# License: MIT \| https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE|' ct/$script_name.sh
sed -i 's|# License: MIT \| https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE|# License: MIT \| https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE|' install/$script_name-install.sh
git add .
if git diff --cached --exit-code; then
echo "No changes detected, skipping commit."
exit 0
fi
git commit -m "Add ${script_name} (${script_type})"
git commit -m "${commit_message:-'Add new script'}"
- name: Push to ProxmoxVE
run: |
@ -277,13 +179,12 @@ jobs:
GITHUB_TOKEN: ${{ steps.app-token.outputs.token }}
run: |
script_name="${{ steps.list_issues.outputs.script_name }}"
script_type="${{ steps.list_issues.outputs.script_type }}"
gh pr create \
--repo community-scripts/ProxmoxVE \
--head "$branch_name" \
--base main \
--title "${script_name}" \
--body "Automated migration of **${script_name}** (type: ${script_type}) from ProxmoxVED to ProxmoxVE."
--title "$script_name" \
--body "Automated migration of $script_name from ProxmoxVED to ProxmoxVE."
PR_NUMBER=$(gh pr list --repo community-scripts/ProxmoxVE --head "$branch_name" --json number --jq '.[].number')
echo "PR_NUMBER=$PR_NUMBER"
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT

View File

@ -11,29 +11,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout source repo
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set Git identity for actions
run: |
git config --global user.name "Push From Github"
git config --global user.email "actions@github.com"
- name: Add Gitea remote
run: git remote add gitea https://$GITEA_USER:$GITEA_TOKEN@git.community-scripts.org/community-scripts/ProxmoxVED.git
env:
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
- name: Pull Gitea changes
run: |
git fetch gitea
git merge --strategy=ours gitea/main
env:
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
- name: Push to Gitea
run: git push gitea main --force
env:
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
- name: Checkout source repo
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Push to Gitea
run: |
git config --global user.name "Push From Github"
git config --global user.email "actions@github.com"
git remote add gitea https://$GITEA_USER:$GITEA_TOKEN@git.community-scripts.org/community-scripts/ProxmoxVED.git
git push gitea --mirror
env:
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}

View File

@ -1,106 +0,0 @@
name: Bump build.func Revision
on:
push:
branches:
- main
paths:
- "misc/**"
workflow_dispatch:
jobs:
bump-revision:
if: github.repository == 'community-scripts/ProxmoxVED'
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Generate token for PR
id: generate-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ vars.APP_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Generate token for auto-merge
id: generate-token-merge
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.APP_ID_APPROVE_AND_MERGE }}
private-key: ${{ secrets.APP_KEY_APPROVE_AND_MERGE }}
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Get changed files
id: changes
run: |
git diff --name-only HEAD^ HEAD > changed_files.txt
echo "Changed files:"
cat changed_files.txt
- name: Skip if only build.func changed
id: skipcheck
run: |
if grep -q "^misc/build.func$" changed_files.txt && [ $(wc -l < changed_files.txt) -eq 1 ]; then
echo "skip=true" >> $GITHUB_ENV
else
echo "skip=false" >> $GITHUB_ENV
fi
- name: Disable file mode changes
run: git config core.fileMode false
- name: Bump build.func revision
if: env.skip == 'false'
run: |
REV_FILE=".build-revision"
if [ ! -f "$REV_FILE" ]; then echo 0 > "$REV_FILE"; fi
REV_NUM=$(($(cat $REV_FILE) + 1))
echo $REV_NUM > $REV_FILE
SHORT_SHA=$(git rev-parse --short HEAD)
REV_STR="Revision: r${REV_NUM} (git-${SHORT_SHA})"
echo "Updating build.func with $REV_STR"
sed -i "s/^# Revision:.*/# $REV_STR/" misc/build.func
echo "REV_STR=$REV_STR" >> $GITHUB_ENV
git config --global user.name "GitHub Actions"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git add misc/build.func .build-revision
git commit -m "chore: bump build.func to $REV_STR"
- name: Create PR
if: env.skip == 'false'
run: |
BRANCH_NAME="pr-build-revision-$(date +'%Y%m%d%H%M%S')"
git checkout -b $BRANCH_NAME
git push origin $BRANCH_NAME
gh pr create --title "[core] bump build.func to $REV_STR" \
--body "This PR bumps build.func revision because files in misc/ changed." \
--head $BRANCH_NAME \
--base main \
--label "automated pr"
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
- name: Approve PR and merge
if: env.skip == 'false'
env:
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
run: |
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
if [ -n "$PR_NUMBER" ]; then
gh pr review $PR_NUMBER --approve
gh pr merge $PR_NUMBER --squash --admin
fi
- name: Skip log
if: env.skip == 'true'
run: echo "Only build.func changed nothing to do."

View File

@ -1,50 +1,34 @@
#!/usr/bin/env bash
# Function for generating Figlet headers
generate_headers() {
local base_dir=$1
local target_subdir=$2
local search_pattern=$3
# Base directory for headers
headers_dir="./ct/headers"
local headers_dir="${base_dir}/headers"
mkdir -p "$headers_dir"
rm -f "$headers_dir"/*
# Ensure the headers directory exists and clear it
mkdir -p "$headers_dir"
rm -f "$headers_dir"/*
# Recursive or non-recursive search
if [[ "$search_pattern" == "**" ]]; then
shopt -s globstar nullglob
file_list=("${base_dir}"/**/*.sh)
shopt -u globstar
else
file_list=("${base_dir}"/*.sh)
fi
# Find all .sh files in ./ct directory, sorted alphabetically
find ./ct -type f -name "*.sh" | sort | while read -r script; do
# Extract the APP name from the APP line
app_name=$(grep -oP '^APP="\K[^"]+' "$script" 2>/dev/null)
for script in "${file_list[@]}"; do
[[ -f "$script" ]] || continue
if [[ -n "$app_name" ]]; then
# Define the output file name in the headers directory
output_file="${headers_dir}/$(basename "${script%.*}")"
app_name=$(grep -oP '^APP="\K[^"]+' "$script" 2>/dev/null)
if [[ -n "$app_name" ]]; then
output_file="${headers_dir}/$(basename "${script%.*}")"
figlet_output=$(figlet -w 500 -f slant "$app_name")
if [[ -n "$figlet_output" ]]; then
echo "$figlet_output" >"$output_file"
echo "Generated: $output_file"
else
echo "Figlet failed for $app_name in $script"
fi
# Generate figlet output
figlet_output=$(figlet -w 500 -f slant "$app_name")
# Check if figlet output is not empty
if [[ -n "$figlet_output" ]]; then
echo "$figlet_output" > "$output_file"
echo "Generated: $output_file"
else
echo "No APP name found in $script, skipping."
echo "Figlet failed for $app_name in $script"
fi
done
}
else
echo "No APP name found in $script, skipping."
fi
done
# ct
generate_headers "./ct" "headers" "*"
# tools (addon, pve, ...)
generate_headers "./tools" "headers" "**"
# vm
generate_headers "./vm" "headers" "*"
echo "Completed processing all sections."
echo "Completed processing .sh files."

14
.gitignore vendored
View File

@ -1,15 +1 @@
.vscode/settings.json
CHANGELOG_MISC.md
misc/COMPLETION_REPORT.md
misc/DOCUMENTATION_INDEX.md
misc/DOCUMENTATION_SUMMARY.md
misc/live/alpine-install.func
misc/live/build.func
misc/live/install.func
vm/debian-13-vm.sh
vm/README-vm-manager.md
vm/vm-manager.sh
vm/debian-13-vm.sh
vm/vm-manager.sh
vm/vm-manager.sh
vm/debian-13-vm.sh

View File

@ -7,76 +7,6 @@
<h3 align="center">All notable changes to this project will be documented in this file.</h3>
## 2025-05-14
### 🆕 New Scripts
- odoo ([#4477](https://github.com/community-scripts/ProxmoxVE/pull/4477))
- alpine-transmission ([#4277](https://github.com/community-scripts/ProxmoxVE/pull/4277))
- alpine-tinyauth ([#4264](https://github.com/community-scripts/ProxmoxVE/pull/4264))
- alpine-rclone ([#4265](https://github.com/community-scripts/ProxmoxVE/pull/4265))
- streamlink-webui ([#4262](https://github.com/community-scripts/ProxmoxVE/pull/4262))
- Fumadocs ([#4263](https://github.com/community-scripts/ProxmoxVE/pull/4263))
- asterisk ([#4468](https://github.com/community-scripts/ProxmoxVE/pull/4468))
- gatus ([#4443](https://github.com/community-scripts/ProxmoxVE/pull/4443))
- alpine-gatus ([#4442](https://github.com/community-scripts/ProxmoxVE/pull/4442))
- Alpine-Traefik [@MickLesk](https://github.com/MickLesk) ([#4412](https://github.com/community-scripts/ProxmoxVE/pull/4412))
### 🚀 Updated Scripts
- fix: fetch_release_and_deploy function [@CrazyWolf13](https://github.com/CrazyWolf13) ([#4478](https://github.com/community-scripts/ProxmoxVE/pull/4478))
- Website: re-add documenso & some little bugfixes [@MickLesk](https://github.com/MickLesk) ([#4456](https://github.com/community-scripts/ProxmoxVE/pull/4456))
- update some improvements from dev (tools.func) [@MickLesk](https://github.com/MickLesk) ([#4430](https://github.com/community-scripts/ProxmoxVE/pull/4430))
- Alpine: Use onliner for updates [@tremor021](https://github.com/tremor021) ([#4414](https://github.com/community-scripts/ProxmoxVE/pull/4414))
- #### 🐞 Bug Fixes
- Bugfix: Mikrotik & Pimox HAOS VM (NEXTID) [@MickLesk](https://github.com/MickLesk) ([#4313](https://github.com/community-scripts/ProxmoxVE/pull/4313))
- Bookstack: fix copy of themes/uploads/storage [@MickLesk](https://github.com/MickLesk) ([#4457](https://github.com/community-scripts/ProxmoxVE/pull/4457))
- homarr: fetch versions dynamically from source repo [@CrazyWolf13](https://github.com/CrazyWolf13) ([#4409](https://github.com/community-scripts/ProxmoxVE/pull/4409))
- Authentik: change install to UV & increase resources to 10GB RAM [@MickLesk](https://github.com/MickLesk) ([#4364](https://github.com/community-scripts/ProxmoxVE/pull/4364))
- Jellyseerr: better handling of node and pnpm [@MickLesk](https://github.com/MickLesk) ([#4365](https://github.com/community-scripts/ProxmoxVE/pull/4365))
- Alpine-Rclone: Fix location of passwords file [@tremor021](https://github.com/tremor021) ([#4465](https://github.com/community-scripts/ProxmoxVE/pull/4465))
- Zammad: Enable ElasticSearch service [@tremor021](https://github.com/tremor021) ([#4391](https://github.com/community-scripts/ProxmoxVE/pull/4391))
- openhab: use zulu17-jdk [@moodyblue](https://github.com/moodyblue) ([#4438](https://github.com/community-scripts/ProxmoxVE/pull/4438))
- (fix) Documenso: fix build failures [@vhsdream](https://github.com/vhsdream) ([#4382](https://github.com/community-scripts/ProxmoxVE/pull/4382))
- #### ✨ New Features
- Feature: LXC-Delete (pve helper): add "all items" [@MickLesk](https://github.com/MickLesk) ([#4296](https://github.com/community-scripts/ProxmoxVE/pull/4296))
- Feature: get correct next VMID [@MickLesk](https://github.com/MickLesk) ([#4292](https://github.com/community-scripts/ProxmoxVE/pull/4292))
- HomeAssistant-Core: update script for 2025.5+ [@MickLesk](https://github.com/MickLesk) ([#4363](https://github.com/community-scripts/ProxmoxVE/pull/4363))
- Feature: autologin for Alpine [@MickLesk](https://github.com/MickLesk) ([#4344](https://github.com/community-scripts/ProxmoxVE/pull/4344))
- monitor-all: improvements - tag based filtering [@grizmin](https://github.com/grizmin) ([#4437](https://github.com/community-scripts/ProxmoxVE/pull/4437))
- Make apt-cacher-ng a client of its own server [@pgcudahy](https://github.com/pgcudahy) ([#4092](https://github.com/community-scripts/ProxmoxVE/pull/4092))
- #### 🔧 Refactor
- openhab. correct some typos [@moodyblue](https://github.com/moodyblue) ([#4448](https://github.com/community-scripts/ProxmoxVE/pull/4448))
### 🧰 Maintenance
- #### 💾 Core
- fix: improve bridge detection in all network interface configuration files [@filippolauria](https://github.com/filippolauria) ([#4413](https://github.com/community-scripts/ProxmoxVE/pull/4413))
- Config file Function in build.func [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#4411](https://github.com/community-scripts/ProxmoxVE/pull/4411))
- fix: detect all bridge types, not just vmbr prefix [@filippolauria](https://github.com/filippolauria) ([#4351](https://github.com/community-scripts/ProxmoxVE/pull/4351))
- #### 📂 Github
- Add Github app for auto PR merge [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#4461](https://github.com/community-scripts/ProxmoxVE/pull/4461))
### 🌐 Website
- FAQ: Explanation "updatable" [@tremor021](https://github.com/tremor021) ([#4300](https://github.com/community-scripts/ProxmoxVE/pull/4300))
- #### 📝 Script Information
- Jellyfin Media Server: Update configuration path [@tremor021](https://github.com/tremor021) ([#4434](https://github.com/community-scripts/ProxmoxVE/pull/4434))
- Pingvin Share: Added explanation on how to add/edit environment variables [@tremor021](https://github.com/tremor021) ([#4432](https://github.com/community-scripts/ProxmoxVE/pull/4432))
- pingvin.json: fix typo [@warmbo](https://github.com/warmbo) ([#4426](https://github.com/community-scripts/ProxmoxVE/pull/4426))
- Navidrome - Fix config path (use /etc/ instead of /var/lib) [@quake1508](https://github.com/quake1508) ([#4406](https://github.com/community-scripts/ProxmoxVE/pull/4406))
## 2025-03-24
### 🆕 New Scripts
@ -85,7 +15,7 @@
- wazuh [@omiinaya](https://github.com/omiinaya) ([#3381](https://github.com/community-scripts/ProxmoxVE/pull/3381))
- yt-dlp-webui [@CrazyWolf13](https://github.com/CrazyWolf13) ([#3364](https://github.com/community-scripts/ProxmoxVE/pull/3364))
- Extension/New Script: Redis Alpine Installation [@MickLesk](https://github.com/MickLesk) ([#3367](https://github.com/community-scripts/ProxmoxVE/pull/3367))
- Fluid Calendar [@vhsdream](https://github.com/vhsdream) ([#2869](ht
- Fluid Calendar [@vhsdream](https://github.com/vhsdream) ([#2869](https://github.com/community-scripts/ProxmoxVE/pull/2869))
### 🚀 Updated Scripts
@ -105,7 +35,7 @@
- GoMFT: Fix build dependencies [@tremor021](https://github.com/tremor021) ([#3313](https://github.com/community-scripts/ProxmoxVE/pull/3313))
- GoMFT: Don't rely on binaries from github [@tremor021](https://github.com/tremor021) ([#3303](https://github.com/community-scripts/ProxmoxVE/pull/3303))
- Wikijs: Remove Dev Message & Performance-Boost [@bvdberg01](https://github.com/bvdberg01) ([#3232](https://github.com/community-scripts/ProxmoxVE/pull/3232))
- Update omada download url [@bvdberg01](https://github.com/bvdberg01) ([#3245](https://github.cooxVE/pull/3245))
- Update omada download url [@bvdberg01](https://github.com/bvdberg01) ([#3245](https://github.com/community-scripts/ProxmoxVE/pull/3245))
- TriliumNotes: Fix release handling [@tremor021](https://github.com/tremor021) ([#3160](https://github.com/community-scripts/ProxmoxVE/pull/3160))
- #### ✨ New Features
@ -126,6 +56,7 @@
- #### ✨ New Features
- [core] add gitignore to prevent big pulls [@MickLesk](https://github.com/MickLesk) ([#3278](https://github.com/community-scripts/ProxmoxVE/pull/3278))
- [core] install core deps (debian / ubuntu) [@MickLesk](https://github.com/MickLesk) ([#3366](https://github.com/community-scripts/ProxmoxVE/pull/3366))
- #### 💾 Core
@ -141,7 +72,7 @@
### 🌐 Website
- Update siteConfig.tsx to use new analytics code [@BramSuurdje](https://github.com/BramSuurdje) ([#3389](https://github.com/community-scripts/ProxmoxVE/pull/3389))
- Bump next from 15.1.3 to 15.2.16](https://github.com/community-scripE/pull/3316))
- Bump next from 15.1.3 to 15.2.3 in /frontend [@dependabot[bot]](https://github.com/dependabot[bot]) ([#3316](https://github.com/community-scripts/ProxmoxVE/pull/3316))
- #### 🐞 Bug Fixes

View File

@ -1,6 +1,6 @@
module proxmox-api
go 1.24.0
go 1.23.2
require (
github.com/gorilla/mux v1.8.1
@ -17,7 +17,7 @@ require (
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
golang.org/x/crypto v0.45.0 // indirect
golang.org/x/sync v0.18.0 // indirect
golang.org/x/text v0.31.0 // indirect
golang.org/x/crypto v0.35.0 // indirect
golang.org/x/sync v0.11.0 // indirect
golang.org/x/text v0.22.0 // indirect
)

View File

@ -27,16 +27,16 @@ go.mongodb.org/mongo-driver v1.17.2 h1:gvZyk8352qSfzyZ2UMWcpDpMSGEr1eqE4T793Sqyh
go.mongodb.org/mongo-driver v1.17.2/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@ -48,8 +48,8 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,355 +0,0 @@
#!/usr/bin/env bash
# ==============================================================================
# TEST SUITE FOR tools.func
# ==============================================================================
# This script tests all setup_* functions from tools.func
# Can be run standalone in any Debian-based system
#
# Usage:
# bash <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/test-tools-func.sh)
# ==============================================================================
set -uo pipefail
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m' # No Color
# Counters
TESTS_PASSED=0
TESTS_FAILED=0
TESTS_SKIPPED=0
# Log file
TEST_LOG="/tmp/tools-func-test-$(date +%Y%m%d-%H%M%S).log"
echo -e "${CYAN}═══════════════════════════════════════════════════════════${NC}"
echo -e "${CYAN} TOOLS.FUNC TEST SUITE${NC}"
echo -e "${CYAN}═══════════════════════════════════════════════════════════${NC}"
echo -e "Log file: ${TEST_LOG}\n"
# Source tools.func from repository
echo -e "${BLUE}► Sourcing tools.func from repository...${NC}"
if ! source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/tools.func); then
echo -e "${RED}✖ Failed to source tools.func${NC}"
exit 1
fi
echo -e "${GREEN}✔ tools.func loaded${NC}\n"
# Source core functions if available
if curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func &>/dev/null; then
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func) || true
fi
# Override STD to show all output for debugging
export STD=''
# Force non-interactive mode for all apt operations
export DEBIAN_FRONTEND=noninteractive
# Update PATH to include common installation directories
export PATH="/usr/local/bin:/usr/local/go/bin:/root/.cargo/bin:/root/.rbenv/bin:/root/.rbenv/shims:/opt/java/bin:$PATH"
# Helper functions (override if needed from core.func)
msg_info() { echo -e "${BLUE} ${1}${CL:-${NC}}"; }
msg_ok() { echo -e "${GREEN}${1}${CL:-${NC}}"; }
msg_error() { echo -e "${RED}${1}${CL:-${NC}}"; }
msg_warn() { echo -e "${YELLOW}${1}${CL:-${NC}}"; }
# Color definitions if not already set
GN="${GN:-${GREEN}}"
BL="${BL:-${BLUE}}"
RD="${RD:-${RED}}"
YW="${YW:-${YELLOW}}"
CL="${CL:-${NC}}"
# Reload environment helper
reload_path() {
export PATH="/usr/local/bin:/usr/local/go/bin:/root/.cargo/bin:/root/.rbenv/bin:/root/.rbenv/shims:/opt/java/bin:$PATH"
# Source profile files if they exist
[ -f "/root/.bashrc" ] && source /root/.bashrc 2>/dev/null || true
[ -f "/root/.profile" ] && source /root/.profile 2>/dev/null || true
[ -f "/root/.cargo/env" ] && source /root/.cargo/env 2>/dev/null || true
}
# Clean up before test to avoid interactive prompts and locks
cleanup_before_test() {
# Kill any hanging apt processes
killall apt-get apt 2>/dev/null || true
# Remove apt locks
rm -f /var/lib/dpkg/lock-frontend /var/lib/dpkg/lock /var/cache/apt/archives/lock 2>/dev/null || true
# Clean up broken repository files from previous tests
# Remove all custom sources files
rm -f /etc/apt/sources.list.d/*.sources 2>/dev/null || true
rm -f /etc/apt/sources.list.d/*.list 2>/dev/null || true
# Remove all keyrings
rm -f /etc/apt/keyrings/*.gpg 2>/dev/null || true
rm -f /etc/apt/keyrings/*.asc 2>/dev/null || true
# Update package lists to ensure clean state
apt-get update -qq 2>/dev/null || true
# Wait a moment for processes to clean up
sleep 1
}
[ -f "/root/.profile" ] && source /root/.profile 2>/dev/null || true
[ -f "/root/.cargo/env" ] && source /root/.cargo/env 2>/dev/null || true
# Test validation function
test_function() {
local test_name="$1"
local test_command="$2"
local validation_cmd="${3:-}"
# Clean up before starting test
cleanup_before_test
echo -e "\n${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${CYAN}Testing: ${test_name}${NC}"
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
{
echo "=== Test: ${test_name} ==="
echo "Command: ${test_command}"
echo "Started: $(date)"
} | tee -a "$TEST_LOG"
# Execute installation with output visible AND logged
if eval "$test_command" 2>&1 | tee -a "$TEST_LOG"; then
# Reload PATH after installation
reload_path
if [[ -n "$validation_cmd" ]]; then
local output
if output=$(bash -c "$validation_cmd" 2>&1); then
msg_ok "${test_name} - $(echo "$output" | head -n1)"
((TESTS_PASSED++))
else
msg_error "${test_name} - Installation succeeded but validation failed"
{
echo "Validation command: $validation_cmd"
echo "Validation output: $output"
echo "PATH: $PATH"
} | tee -a "$TEST_LOG"
((TESTS_FAILED++))
fi
else
msg_ok "${test_name}"
((TESTS_PASSED++))
fi
else
msg_error "${test_name} - Installation failed"
echo "Installation failed" | tee -a "$TEST_LOG"
((TESTS_FAILED++))
fi
echo "Completed: $(date)" | tee -a "$TEST_LOG"
echo "" | tee -a "$TEST_LOG"
}
# Skip test with reason
skip_test() {
local test_name="$1"
local reason="$2"
echo -e "\n${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${CYAN}Testing: ${test_name}${NC}"
echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
msg_warn "Skipped: ${reason}"
((TESTS_SKIPPED++))
}
# Update system
msg_info "Updating system packages"
apt-get update &>/dev/null && msg_ok "System updated"
# Install base dependencies
msg_info "Installing base dependencies"
apt-get install -y curl wget gpg jq git build-essential ca-certificates &>/dev/null && msg_ok "Base dependencies installed"
# ==============================================================================
# TEST 1: YQ - YAML Processor
# ==============================================================================
# test_function "YQ" \
# "setup_yq" \
# "yq --version"
# ==============================================================================
# TEST 2: ADMINER - Database Management
# ==============================================================================
# test_function "Adminer" \
# "setup_adminer" \
# "dpkg -l adminer 2>/dev/null | grep -q '^ii' && a2query -c adminer 2>/dev/null && echo 'Adminer installed'"
# ==============================================================================
# TEST 3: CLICKHOUSE
# ==============================================================================
# test_function "ClickHouse" \
# "setup_clickhouse" \
# "clickhouse-server --version"
# ==============================================================================
# TEST 4: POSTGRESQL
# ==============================================================================
test_function "PostgreSQL 16" \
"PG_VERSION=16 setup_postgresql" \
"psql --version"
# ==============================================================================
# TEST 6: MARIADB
# ==============================================================================
test_function "MariaDB 11.4" \
"MARIADB_VERSION=11.4 setup_mariadb" \
"mariadb --version"
# ==============================================================================
# TEST 7: MYSQL (Remove MariaDB first)
# ==============================================================================
msg_info "Removing MariaDB before MySQL installation"
systemctl stop mariadb &>/dev/null || true
apt-get purge -y mariadb-server mariadb-client mariadb-common &>/dev/null || true
apt-get autoremove -y &>/dev/null
rm -rf /etc/mysql /var/lib/mysql
msg_ok "MariaDB removed"
test_function "MySQL 8.0" \
"MYSQL_VERSION=8.0 setup_mysql" \
"mysql --version"
# ==============================================================================
# TEST 8: MONGODB (Check AVX support)
# ==============================================================================
# if grep -q avx /proc/cpuinfo; then
# test_function "MongoDB 8.0" \
# "MONGO_VERSION=8.0 setup_mongodb" \
# "mongod --version"
# else
# skip_test "MongoDB 8.0" "CPU does not support AVX"
# fi
# ==============================================================================
# TEST 9: NODE.JS
# ==============================================================================
# test_function "Node.js 22 with modules" \
# "NODE_VERSION=22 NODE_MODULE='yarn,pnpm@10.1.0,pm2' setup_nodejs" \
# "node --version && npm --version && yarn --version && pnpm --version && pm2 --version"
# ==============================================================================
# TEST 10: PYTHON (UV)
# ==============================================================================
# test_function "Python 3.12 via uv" \
# "PYTHON_VERSION=3.12 setup_uv" \
# "uv --version"
# ==============================================================================
# TEST 11: PHP
# ==============================================================================
# test_function "PHP 8.3 with FPM" \
# "PHP_VERSION=8.3 PHP_FPM=YES PHP_MODULE='redis,imagick,apcu,zip,mbstring' setup_php" \
# "php --version"
# ==============================================================================
# TEST 12: COMPOSER
# # ==============================================================================
# test_function "Composer" \
# "setup_composer" \
# "composer --version"
# ==============================================================================
# TEST 13: JAVA
# ==============================================================================
# test_function "Java Temurin 21" \
# "JAVA_VERSION=21 setup_java" \
# "java --version"
# ==============================================================================
# TEST 14: GO
# ==============================================================================
# test_function "Go (latest)" \
# "GO_VERSION=latest setup_go" \
# "go version"
# ==============================================================================
# TEST 15: RUBY
# ==============================================================================
test_function "Ruby 3.4.1 with Rails" \
"RUBY_VERSION=3.4.1 RUBY_INSTALL_RAILS=true setup_ruby" \
"ruby --version"
# ==============================================================================
# TEST 16: RUST
# ==============================================================================
# test_function "Rust (stable)" \
# "RUST_TOOLCHAIN=stable RUST_CRATES='cargo-edit' setup_rust" \
# "source \$HOME/.cargo/env && rustc --version"
# ==============================================================================
# TEST 17: GHOSTSCRIPT
# ==============================================================================
# test_function "Ghostscript" \
# "setup_gs" \
# "gs --version"
# ==============================================================================
# TEST 18: IMAGEMAGICK
# ==============================================================================
# test_function "ImageMagick" \
# "setup_imagemagick" \
# "magick --version"
# ==============================================================================
# TEST 19: FFMPEG
# ==============================================================================
# test_function "FFmpeg n7.1.1 (full)" \
# "FFMPEG_VERSION=n7.1.1 FFMPEG_TYPE=full setup_ffmpeg" \
# "ffmpeg -version"
# ==============================================================================
# FINAL SUMMARY
# ==============================================================================
echo -e "\n${CYAN}═══════════════════════════════════════════════════════════${NC}"
echo -e "${CYAN} TEST SUMMARY${NC}"
echo -e "${CYAN}═══════════════════════════════════════════════════════════${NC}"
echo -e "${GREEN}✔ Passed: ${TESTS_PASSED}${NC}"
echo -e "${RED}✖ Failed: ${TESTS_FAILED}${NC}"
echo -e "${YELLOW}⚠ Skipped: ${TESTS_SKIPPED}${NC}"
echo -e "\nDetailed log: ${TEST_LOG}"
# Generate summary report
{
echo ""
echo "=== FINAL SUMMARY ==="
echo "Tests Passed: ${TESTS_PASSED}"
echo "Tests Failed: ${TESTS_FAILED}"
echo "Tests Skipped: ${TESTS_SKIPPED}"
echo ""
echo "=== Installed Versions ==="
command -v yq &>/dev/null && echo "yq: $(yq --version 2>&1)"
command -v clickhouse-server &>/dev/null && echo "ClickHouse: $(clickhouse-server --version 2>&1 | head -n1)"
command -v psql &>/dev/null && echo "PostgreSQL: $(psql --version)"
command -v mysql &>/dev/null && echo "MySQL: $(mysql --version)"
command -v mongod &>/dev/null && echo "MongoDB: $(mongod --version 2>&1 | head -n1)"
command -v node &>/dev/null && echo "Node.js: $(node --version)"
command -v php &>/dev/null && echo "PHP: $(php --version | head -n1)"
command -v java &>/dev/null && echo "Java: $(java --version 2>&1 | head -n1)"
command -v go &>/dev/null && echo "Go: $(go version)"
command -v ruby &>/dev/null && echo "Ruby: $(ruby --version)"
command -v rustc &>/dev/null && echo "Rust: $(rustc --version)"
command -v ffmpeg &>/dev/null && echo "FFmpeg: $(ffmpeg -version 2>&1 | head -n1)"
} >>"$TEST_LOG"
if [ $TESTS_FAILED -eq 0 ]; then
echo -e "\n${GREEN}All tests completed successfully!${NC}"
exit 0
else
echo -e "\n${RED}Some tests failed. Check the log for details.${NC}"
exit 1
fi

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,648 +0,0 @@
config_file() {
CONFIG_FILE="/opt/community-scripts/.settings"
if [[ -f "/opt/community-scripts/${NSAPP}.conf" ]]; then
CONFIG_FILE="/opt/community-scripts/${NSAPP}.conf"
fi
if CONFIG_FILE=$(whiptail --backtitle "[dev] Proxmox VE Helper Scripts" --inputbox "Set absolute path to config file" 8 58 "$CONFIG_FILE" --title "CONFIG FILE" 3>&1 1>&2 2>&3); then
if [[ ! -f "$CONFIG_FILE" ]]; then
echo -e "${CROSS}${RD}Config file not found, exiting script!.${CL}"
exit
else
echo -e "${INFO}${BOLD}${DGN}Using config File: ${BGN}$CONFIG_FILE${CL}"
source "$CONFIG_FILE"
fi
fi
if [[ -n "${CT_ID-}" ]]; then
if [[ "$CT_ID" =~ ^([0-9]{3,4})-([0-9]{3,4})$ ]]; then
MIN_ID=${BASH_REMATCH[1]}
MAX_ID=${BASH_REMATCH[2]}
if ((MIN_ID >= MAX_ID)); then
msg_error "Invalid Container ID range. The first number must be smaller than the second number, was ${CT_ID}"
exit
fi
LIST_OF_IDS=$(pvesh get /cluster/resources --type vm --output-format json 2>/dev/null | grep -oP '"vmid":\s*\K\d+') || true
if [[ -n "$LIST_OF_IDS" ]]; then
for ((ID = MIN_ID; ID <= MAX_ID; ID++)); do
if ! grep -q "^$ID$" <<<"$LIST_OF_IDS"; then
CT_ID=$ID
break
fi
done
fi
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}$CT_ID${CL}"
elif [[ "$CT_ID" =~ ^[0-9]+$ ]]; then
LIST_OF_IDS=$(pvesh get /cluster/resources --type vm --output-format json 2>/dev/null | grep -oP '"vmid":\s*\K\d+') || true
if [[ -n "$LIST_OF_IDS" ]]; then
if ! grep -q "^$CT_ID$" <<<"$LIST_OF_IDS"; then
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}$CT_ID${CL}"
else
msg_error "Container ID $CT_ID already exists"
exit
fi
else
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}$CT_ID${CL}"
fi
else
msg_error "Invalid Container ID format. Needs to be 0000-9999 or 0-9999, was ${CT_ID}"
exit
fi
else
if CT_ID=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Container ID" 8 58 "$NEXTID" --title "CONTAINER ID" 3>&1 1>&2 2>&3); then
if [ -z "$CT_ID" ]; then
CT_ID="$NEXTID"
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}$CT_ID${CL}"
else
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}$CT_ID${CL}"
fi
else
exit_script
fi
fi
if [[ -n "${CT_TYPE-}" ]]; then
if [[ "$CT_TYPE" -eq 0 ]]; then
CT_TYPE_DESC="Privileged"
elif [[ "$CT_TYPE" -eq 1 ]]; then
CT_TYPE_DESC="Unprivileged"
else
msg_error "Unknown setting for CT_TYPE, should be 1 or 0, was ${CT_TYPE}"
exit
fi
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}"
else
if CT_TYPE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "CONTAINER TYPE" --radiolist "Choose Type" 10 58 2 \
"1" "Unprivileged" ON \
"0" "Privileged" OFF \
3>&1 1>&2 2>&3); then
if [ -n "$CT_TYPE" ]; then
CT_TYPE_DESC="Unprivileged"
if [ "$CT_TYPE" -eq 0 ]; then
CT_TYPE_DESC="Privileged"
fi
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}"
fi
else
exit_script
fi
fi
if [[ -n "${PW-}" ]]; then
if [[ "$PW" == "none" ]]; then
PW=""
else
if [[ "$PW" == *" "* ]]; then
msg_error "Password cannot be empty"
exit
elif [[ ${#PW} -lt 5 ]]; then
msg_error "Password must be at least 5 characters long"
exit
else
echo -e "${VERIFYPW}${BOLD}${DGN}Root Password: ${BGN}********${CL}"
fi
PW="-password $PW"
fi
else
while true; do
if PW1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --passwordbox "\nSet Root Password (needed for root ssh access)" 9 58 --title "PASSWORD (leave blank for automatic login)" 3>&1 1>&2 2>&3); then
if [[ -n "$PW1" ]]; then
if [[ "$PW1" == *" "* ]]; then
whiptail --msgbox "Password cannot contain spaces. Please try again." 8 58
elif [ ${#PW1} -lt 5 ]; then
whiptail --msgbox "Password must be at least 5 characters long. Please try again." 8 58
else
if PW2=$(whiptail --backtitle "Proxmox VE Helper Scripts" --passwordbox "\nVerify Root Password" 9 58 --title "PASSWORD VERIFICATION" 3>&1 1>&2 2>&3); then
if [[ "$PW1" == "$PW2" ]]; then
PW="-password $PW1"
echo -e "${VERIFYPW}${BOLD}${DGN}Root Password: ${BGN}********${CL}"
break
else
whiptail --msgbox "Passwords do not match. Please try again." 8 58
fi
else
exit_script
fi
fi
else
PW1="Automatic Login"
PW=""
echo -e "${VERIFYPW}${BOLD}${DGN}Root Password: ${BGN}$PW1${CL}"
break
fi
else
exit_script
fi
done
fi
if [[ -n "${HN-}" ]]; then
echo -e "${HOSTNAME}${BOLD}${DGN}Hostname: ${BGN}$HN${CL}"
else
if CT_NAME=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Hostname" 8 58 "$NSAPP" --title "HOSTNAME" 3>&1 1>&2 2>&3); then
if [ -z "$CT_NAME" ]; then
HN="$NSAPP"
else
HN=$(echo "${CT_NAME,,}" | tr -d ' ')
fi
echo -e "${HOSTNAME}${BOLD}${DGN}Hostname: ${BGN}$HN${CL}"
else
exit_script
fi
fi
if [[ -n "${DISK_SIZE-}" ]]; then
if [[ "$DISK_SIZE" =~ ^-?[0-9]+$ ]]; then
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
else
msg_error "DISK_SIZE must be an integer, was ${DISK_SIZE}"
exit
fi
else
if DISK_SIZE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Disk Size in GB" 8 58 "$var_disk" --title "DISK SIZE" 3>&1 1>&2 2>&3); then
if [ -z "$DISK_SIZE" ]; then
DISK_SIZE="$var_disk"
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
else
if ! [[ $DISK_SIZE =~ $INTEGER ]]; then
echo -e "{INFO}${HOLD}${RD} DISK SIZE MUST BE AN INTEGER NUMBER!${CL}"
advanced_settings
fi
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
fi
else
exit_script
fi
fi
if [[ -n "${CORE_COUNT-}" ]]; then
if [[ "$CORE_COUNT" =~ ^-?[0-9]+$ ]]; then
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}${CORE_COUNT}${CL}"
else
msg_error "CORE_COUNT must be an integer, was ${CORE_COUNT}"
exit
fi
else
if CORE_COUNT=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Allocate CPU Cores" 8 58 "$var_cpu" --title "CORE COUNT" 3>&1 1>&2 2>&3); then
if [ -z "$CORE_COUNT" ]; then
CORE_COUNT="$var_cpu"
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}$CORE_COUNT${CL}"
else
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}$CORE_COUNT${CL}"
fi
else
exit_script
fi
fi
if [[ -n "${RAM_SIZE-}" ]]; then
if [[ "$RAM_SIZE" =~ ^-?[0-9]+$ ]]; then
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
else
msg_error "RAM_SIZE must be an integer, was ${RAM_SIZE}"
exit
fi
else
if RAM_SIZE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Allocate RAM in MiB" 8 58 "$var_ram" --title "RAM" 3>&1 1>&2 2>&3); then
if [ -z "$RAM_SIZE" ]; then
RAM_SIZE="$var_ram"
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
else
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
fi
else
exit_script
fi
fi
IFACE_FILEPATH_LIST="/etc/network/interfaces"$'\n'$(find "/etc/network/interfaces.d/" -type f)
BRIDGES=""
OLD_IFS=$IFS
IFS=$'\n'
for iface_filepath in ${IFACE_FILEPATH_LIST}; do
iface_indexes_tmpfile=$(mktemp -q -u '.iface-XXXX')
( grep -Pn '^\s*iface' "${iface_filepath}" | cut -d':' -f1 && wc -l "${iface_filepath}" | cut -d' ' -f1 ) | awk 'FNR==1 {line=$0; next} {print line":"$0-1; line=$0}' > "${iface_indexes_tmpfile}" || true
if [ -f "${iface_indexes_tmpfile}" ]; then
while read -r pair; do
start=$(echo "${pair}" | cut -d':' -f1)
end=$(echo "${pair}" | cut -d':' -f2)
if awk "NR >= ${start} && NR <= ${end}" "${iface_filepath}" | grep -qP '^\s*(bridge[-_](ports|stp|fd|vlan-aware|vids)|ovs_type\s+OVSBridge)\b'; then
iface_name=$(sed "${start}q;d" "${iface_filepath}" | awk '{print $2}')
BRIDGES="${iface_name}"$'\n'"${BRIDGES}"
fi
done < "${iface_indexes_tmpfile}"
rm -f "${iface_indexes_tmpfile}"
fi
done
IFS=$OLD_IFS
BRIDGES=$(echo "$BRIDGES" | grep -v '^\s*$' | sort | uniq)
if [[ -n "${BRG-}" ]]; then
if echo "$BRIDGES" | grep -q "${BRG}"; then
echo -e "${BRIDGE}${BOLD}${DGN}Bridge: ${BGN}$BRG${CL}"
else
msg_error "Bridge '${BRG}' does not exist in /etc/network/interfaces or /etc/network/interfaces.d/sdn"
exit
fi
else
BRG=$(whiptail --backtitle "Proxmox VE Helper Scripts" --menu "Select network bridge:" 15 40 6 $(echo "$BRIDGES" | awk '{print $0, "Bridge"}') 3>&1 1>&2 2>&3)
if [ -z "$BRG" ]; then
exit_script
else
echo -e "${BRIDGE}${BOLD}${DGN}Bridge: ${BGN}$BRG${CL}"
fi
fi
local ip_cidr_regex='^([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})/([0-9]{1,2})$'
local ip_regex='^([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$'
if [[ -n ${NET-} ]]; then
if [ "$NET" == "dhcp" ]; then
echo -e "${NETWORK}${BOLD}${DGN}IP Address: ${BGN}DHCP${CL}"
echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}Default${CL}"
elif [[ "$NET" =~ $ip_cidr_regex ]]; then
echo -e "${NETWORK}${BOLD}${DGN}IP Address: ${BGN}$NET${CL}"
if [ ! -z "$GATE" ]; then
if [[ "$GATE" =~ $ip_regex ]]; then
echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}$GATE${CL}"
GATE=",gw=$GATE"
else
msg_error "Invalid IP Address format for Gateway. Needs to be 0.0.0.0, was ${GATE}"
exit
fi
else
while true; do
GATE1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Enter gateway IP address" 8 58 --title "Gateway IP" 3>&1 1>&2 2>&3)
if [ -z "$GATE1" ]; then
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox "Gateway IP address cannot be empty" 8 58
elif [[ ! "$GATE1" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox "Invalid IP address format" 8 58
else
GATE=",gw=$GATE1"
echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}$GATE1${CL}"
break
fi
done
fi
elif [[ "$NET" == *-* ]]; then
IFS="-" read -r ip_start ip_end <<< "$NET"
if [[ ! "$ip_start" =~ $ip_cidr_regex ]] || [[ ! "$ip_end" =~ $ip_cidr_regex ]]; then
msg_error "Invalid IP range format, was $NET should be 0.0.0.0/0-0.0.0.0/0"
exit 1
fi
ip1="${ip_start%%/*}"
ip2="${ip_end%%/*}"
cidr="${ip_start##*/}"
ip_to_int() {
local IFS=.
read -r i1 i2 i3 i4 <<< "$1"
echo $(( (i1 << 24) + (i2 << 16) + (i3 << 8) + i4 ))
}
int_to_ip() {
local ip=$1
echo "$(( (ip >> 24) & 0xFF )).$(( (ip >> 16) & 0xFF )).$(( (ip >> 8) & 0xFF )).$(( ip & 0xFF ))"
}
start_int=$(ip_to_int "$ip1")
end_int=$(ip_to_int "$ip2")
for ((ip_int=start_int; ip_int<=end_int; ip_int++)); do
ip=$(int_to_ip $ip_int)
msg_info "Checking IP: $ip"
if ! ping -c 2 -W 1 "$ip" >/dev/null 2>&1; then
NET="$ip/$cidr"
msg_ok "Using free IP Address: ${BGN}$NET${CL}"
sleep 3
break
fi
done
if [[ "$NET" == *-* ]]; then
msg_error "No free IP found in range"
exit 1
fi
if [ -n "$GATE" ]; then
if [[ "$GATE" =~ $ip_regex ]]; then
echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}$GATE${CL}"
GATE=",gw=$GATE"
else
msg_error "Invalid IP Address format for Gateway. Needs to be 0.0.0.0, was ${GATE}"
exit
fi
else
while true; do
GATE1=$(whiptail --backtitle "[dev] Proxmox VE Helper Scripts" --inputbox "Enter gateway IP address" 8 58 --title "Gateway IP" 3>&1 1>&2 2>&3)
if [ -z "$GATE1" ]; then
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox "Gateway IP address cannot be empty" 8 58
elif [[ ! "$GATE1" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox "Invalid IP address format" 8 58
else
GATE=",gw=$GATE1"
echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}$GATE1${CL}"
break
fi
done
fi
else
msg_error "Invalid IP Address format. Needs to be 0.0.0.0/0 or a range like 10.0.0.1/24-10.0.0.10/24, was ${NET}"
exit
fi
else
while true; do
NET=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a Static IPv4 CIDR Address (/24)" 8 58 dhcp --title "IP ADDRESS" 3>&1 1>&2 2>&3)
exit_status=$?
if [ $exit_status -eq 0 ]; then
if [ "$NET" = "dhcp" ]; then
echo -e "${NETWORK}${BOLD}${DGN}IP Address: ${BGN}$NET${CL}"
break
else
if [[ "$NET" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}/([0-9]|[1-2][0-9]|3[0-2])$ ]]; then
echo -e "${NETWORK}${BOLD}${DGN}IP Address: ${BGN}$NET${CL}"
break
else
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox "$NET is an invalid IPv4 CIDR address. Please enter a valid IPv4 CIDR address or 'dhcp'" 8 58
fi
fi
else
exit_script
fi
done
if [ "$NET" != "dhcp" ]; then
while true; do
GATE1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Enter gateway IP address" 8 58 --title "Gateway IP" 3>&1 1>&2 2>&3)
if [ -z "$GATE1" ]; then
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox "Gateway IP address cannot be empty" 8 58
elif [[ ! "$GATE1" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then
whiptail --backtitle "Proxmox VE Helper Scripts" --msgbox "Invalid IP address format" 8 58
else
GATE=",gw=$GATE1"
echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}$GATE1${CL}"
break
fi
done
else
GATE=""
echo -e "${GATEWAY}${BOLD}${DGN}Gateway IP Address: ${BGN}Default${CL}"
fi
fi
if [ "$var_os" == "alpine" ]; then
APT_CACHER=""
APT_CACHER_IP=""
else
if [[ -n "${APT_CACHER_IP-}" ]]; then
if [[ ! $APT_CACHER_IP == "none" ]]; then
APT_CACHER="yes"
echo -e "${NETWORK}${BOLD}${DGN}APT-CACHER IP Address: ${BGN}$APT_CACHER_IP${CL}"
else
APT_CACHER=""
echo -e "${NETWORK}${BOLD}${DGN}APT-Cacher IP Address: ${BGN}No${CL}"
fi
else
if APT_CACHER_IP=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set APT-Cacher IP (leave blank for none)" 8 58 --title "APT-Cacher IP" 3>&1 1>&2 2>&3); then
APT_CACHER="${APT_CACHER_IP:+yes}"
echo -e "${NETWORK}${BOLD}${DGN}APT-Cacher IP Address: ${BGN}${APT_CACHER_IP:-Default}${CL}"
if [[ -n $APT_CACHER_IP ]]; then
APT_CACHER_IP="none"
fi
else
exit_script
fi
fi
fi
if [[ "${DISABLEIP6-}" == "yes" ]]; then
echo -e "${DISABLEIPV6}${BOLD}${DGN}Disable IPv6: ${BGN}Yes${CL}"
elif [[ "${DISABLEIP6-}" == "no" ]]; then
echo -e "${DISABLEIPV6}${BOLD}${DGN}Disable IPv6: ${BGN}No${CL}"
else
if (whiptail --backtitle "Proxmox VE Helper Scripts" --defaultno --title "IPv6" --yesno "Disable IPv6?" 10 58); then
DISABLEIP6="yes"
else
DISABLEIP6="no"
fi
echo -e "${DISABLEIPV6}${BOLD}${DGN}Disable IPv6: ${BGN}$DISABLEIP6${CL}"
fi
if [[ -n "${MTU-}" ]]; then
if [[ "$MTU" =~ ^-?[0-9]+$ ]]; then
echo -e "${DEFAULT}${BOLD}${DGN}Interface MTU Size: ${BGN}$MTU${CL}"
MTU=",mtu=$MTU"
else
msg_error "MTU must be an integer, was ${MTU}"
exit
fi
else
if MTU1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Interface MTU Size (leave blank for default [The MTU of your selected vmbr, default is 1500])" 8 58 --title "MTU SIZE" 3>&1 1>&2 2>&3); then
if [ -z "$MTU1" ]; then
MTU1="Default"
MTU=""
else
MTU=",mtu=$MTU1"
fi
echo -e "${DEFAULT}${BOLD}${DGN}Interface MTU Size: ${BGN}$MTU1${CL}"
else
exit_script
fi
fi
if [[ -n "${SD-}" ]]; then
if [[ "$SD" == "none" ]]; then
SD=""
echo -e "${SEARCH}${BOLD}${DGN}DNS Search Domain: ${BGN}Host${CL}"
else
echo -e "${SEARCH}${BOLD}${DGN}DNS Search Domain: ${BGN}$SD${CL}"
SD="-searchdomain=$SD"
fi
else
if SD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a DNS Search Domain (leave blank for HOST)" 8 58 --title "DNS Search Domain" 3>&1 1>&2 2>&3); then
if [ -z "$SD" ]; then
SX=Host
SD=""
else
SX=$SD
SD="-searchdomain=$SD"
fi
echo -e "${SEARCH}${BOLD}${DGN}DNS Search Domain: ${BGN}$SX${CL}"
else
exit_script
fi
fi
if [[ -n "${NS-}" ]]; then
if [[ $NS == "none" ]]; then
NS=""
echo -e "${NETWORK}${BOLD}${DGN}DNS Server IP Address: ${BGN}Host${CL}"
else
if [[ "$NS" =~ $ip_regex ]]; then
echo -e "${NETWORK}${BOLD}${DGN}DNS Server IP Address: ${BGN}$NS${CL}"
NS="-nameserver=$NS"
else
msg_error "Invalid IP Address format for DNS Server. Needs to be 0.0.0.0, was ${NS}"
exit
fi
fi
else
if NX=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a DNS Server IP (leave blank for HOST)" 8 58 --title "DNS SERVER IP" 3>&1 1>&2 2>&3); then
if [ -z "$NX" ]; then
NX=Host
NS=""
else
NS="-nameserver=$NX"
fi
echo -e "${NETWORK}${BOLD}${DGN}DNS Server IP Address: ${BGN}$NX${CL}"
else
exit_script
fi
fi
if [[ -n "${MAC-}" ]]; then
if [[ "$MAC" == "none" ]]; then
MAC=""
echo -e "${MACADDRESS}${BOLD}${DGN}MAC Address: ${BGN}Host${CL}"
else
if [[ "$MAC" =~ ^([A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}$ ]]; then
echo -e "${MACADDRESS}${BOLD}${DGN}MAC Address: ${BGN}$MAC${CL}"
MAC=",hwaddr=$MAC"
else
msg_error "MAC Address must be in the format xx:xx:xx:xx:xx:xx, was ${MAC}"
exit
fi
fi
else
if MAC1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a MAC Address(leave blank for generated MAC)" 8 58 --title "MAC ADDRESS" 3>&1 1>&2 2>&3); then
if [ -z "$MAC1" ]; then
MAC1="Default"
MAC=""
else
MAC=",hwaddr=$MAC1"
echo -e "${MACADDRESS}${BOLD}${DGN}MAC Address: ${BGN}$MAC1${CL}"
fi
else
exit_script
fi
fi
if [[ -n "${VLAN-}" ]]; then
if [[ "$VLAN" == "none" ]]; then
VLAN=""
echo -e "${VLANTAG}${BOLD}${DGN}Vlan: ${BGN}Host${CL}"
else
if [[ "$VLAN" =~ ^-?[0-9]+$ ]]; then
echo -e "${VLANTAG}${BOLD}${DGN}Vlan: ${BGN}$VLAN${CL}"
VLAN=",tag=$VLAN"
else
msg_error "VLAN must be an integer, was ${VLAN}"
exit
fi
fi
else
if VLAN1=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set a Vlan(leave blank for no VLAN)" 8 58 --title "VLAN" 3>&1 1>&2 2>&3); then
if [ -z "$VLAN1" ]; then
VLAN1="Default"
VLAN=""
else
VLAN=",tag=$VLAN1"
fi
echo -e "${VLANTAG}${BOLD}${DGN}Vlan: ${BGN}$VLAN1${CL}"
else
exit_script
fi
fi
if [[ -n "${TAGS-}" ]]; then
if [[ "$TAGS" == *"DEFAULT"* ]]; then
TAGS="${TAGS//DEFAULT/}"
TAGS="${TAGS//;/}"
TAGS="$TAGS;${var_tags:-}"
echo -e "${NETWORK}${BOLD}${DGN}Tags: ${BGN}$TAGS${CL}"
fi
else
TAGS="community-scripts;"
if ADV_TAGS=$(whiptail --backtitle "Proxmox VE Helper Scripts" --inputbox "Set Custom Tags?[If you remove all, there will be no tags!]" 8 58 "${TAGS}" --title "Advanced Tags" 3>&1 1>&2 2>&3); then
if [ -n "${ADV_TAGS}" ]; then
ADV_TAGS=$(echo "$ADV_TAGS" | tr -d '[:space:]')
TAGS="${ADV_TAGS}"
else
TAGS=";"
fi
echo -e "${NETWORK}${BOLD}${DGN}Tags: ${BGN}$TAGS${CL}"
else
exit_script
fi
fi
if [[ -n "${SSH-}" ]]; then
if [[ "$SSH" == "yes" ]]; then
echo -e "${ROOTSSH}${BOLD}${DGN}Root SSH Access: ${BGN}$SSH${CL}"
if [[ ! -z "$SSH_AUTHORIZED_KEY" ]]; then
echo -e "${ROOTSSH}${BOLD}${DGN}SSH Authorized Key: ${BGN}********************${CL}"
else
echo -e "${ROOTSSH}${BOLD}${DGN}SSH Authorized Key: ${BGN}None${CL}"
fi
elif [[ "$SSH" == "no" ]]; then
echo -e "${ROOTSSH}${BOLD}${DGN}Root SSH Access: ${BGN}$SSH${CL}"
else
msg_error "SSH needs to be 'yes' or 'no', was ${SSH}"
exit
fi
else
SSH_AUTHORIZED_KEY="$(whiptail --backtitle "[dev] Proxmox VE Helper Scripts" --inputbox "SSH Authorized key for root (leave empty for none)" 8 58 --title "SSH Key" 3>&1 1>&2 2>&3)"
if [[ -z "${SSH_AUTHORIZED_KEY}" ]]; then
SSH_AUTHORIZED_KEY=""
fi
if [[ "$PW" == -password* || -n "$SSH_AUTHORIZED_KEY" ]]; then
if (whiptail --backtitle "[dev] Proxmox VE Helper Scripts" --defaultno --title "SSH ACCESS" --yesno "Enable Root SSH Access?" 10 58); then
SSH="yes"
else
SSH="no"
fi
echo -e "${ROOTSSH}${BOLD}${DGN}Root SSH Access: ${BGN}$SSH${CL}"
else
SSH="no"
echo -e "${ROOTSSH}${BOLD}${DGN}Root SSH Access: ${BGN}$SSH${CL}"
fi
fi
if [[ -n "${VERBOSE-}" ]]; then
if [[ "$VERBOSE" == "yes" ]]; then
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}$VERBOSE${CL}"
elif [[ "$VERBOSE" == "no" ]]; then
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}No${CL}"
else
msg_error "Verbose Mode needs to be 'yes' or 'no', was ${VERBOSE}"
exit
fi
else
if (whiptail --backtitle "Proxmox VE Helper Scripts" --defaultno --title "VERBOSE MODE" --yesno "Enable Verbose Mode?" 10 58); then
VERBOSE="yes"
else
VERBOSE="no"
fi
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}$VERBOSE${CL}"
fi
if (whiptail --backtitle "[dev] Proxmox VE Helper Scripts" --title "ADVANCED SETTINGS WITH CONFIG FILE COMPLETE" --yesno "Ready to create ${APP} LXC?" 10 58); then
echo -e "${CREATING}${BOLD}${RD}Creating a ${APP} LXC using the above settings${CL}"
else
clear
header_info
echo -e "${INFO}${HOLD} ${GN}Using Config File on node $PVEHOST_NAME${CL}"
config_file
fi
}

View File

@ -1,633 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster)
# Co-Author: MickLesk
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# ------------------------------------------------------------------------------
# Optional verbose mode (debug tracing)
# ------------------------------------------------------------------------------
if [[ "${CREATE_LXC_VERBOSE:-no}" == "yes" ]]; then set -x; fi
# ------------------------------------------------------------------------------
# Load core functions (msg_info/msg_ok/msg_error/…)
# ------------------------------------------------------------------------------
if command -v curl >/dev/null 2>&1; then
# Achtung: bewusst exakt diese URL-Struktur
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func)
load_functions
elif command -v wget >/dev/null 2>&1; then
source <(wget -qO- https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func)
load_functions
fi
# ------------------------------------------------------------------------------
# Strict error handling
# ------------------------------------------------------------------------------
# set -Eeuo pipefail
# trap 'error_handler $? $LINENO "$BASH_COMMAND"' ERR
# trap on_exit EXIT
# trap on_interrupt INT
# trap on_terminate TERM
# error_handler() {
# local exit_code="$1"
# local line_number="$2"
# local command="${3:-}"
# if [[ "$exit_code" -eq 0 ]]; then
# return 0
# fi
# printf "\e[?25h"
# echo -e "\n${RD}[ERROR]${CL} in line ${RD}${line_number}${CL}: exit code ${RD}${exit_code}${CL}: while executing command ${YW}${command}${CL}\n"
# exit "$exit_code"
# }
# on_exit() {
# local exit_code="$?"
# [[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
# exit "$exit_code"
# }
# on_interrupt() {
# echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
# exit 130
# }
# on_terminate() {
# echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
# exit 143
# }
exit_script() {
clear
printf "\e[?25h"
echo -e "\n${CROSS}${RD}User exited script${CL}\n"
kill 0
exit 1
}
# ------------------------------------------------------------------------------
# Helpers (dynamic versioning / template parsing)
# ------------------------------------------------------------------------------
pkg_ver() { dpkg-query -W -f='${Version}\n' "$1" 2>/dev/null || echo ""; }
pkg_cand() { apt-cache policy "$1" 2>/dev/null | awk '/Candidate:/ {print $2}'; }
ver_ge() { dpkg --compare-versions "$1" ge "$2"; }
ver_gt() { dpkg --compare-versions "$1" gt "$2"; }
ver_lt() { dpkg --compare-versions "$1" lt "$2"; }
# Extract Debian OS minor from template name: debian-13-standard_13.1-1_amd64.tar.zst => "13.1"
parse_template_osver() { sed -n 's/.*_\([0-9][0-9]*\(\.[0-9]\+\)\?\)-.*/\1/p' <<<"$1"; }
# Offer upgrade for pve-container/lxc-pve if candidate > installed; optional auto-retry pct create
# Returns:
# 0 = no upgrade needed
# 1 = upgraded (and if do_retry=yes and retry succeeded, creation done)
# 2 = user declined
# 3 = upgrade attempted but failed OR retry failed
offer_lxc_stack_upgrade_and_maybe_retry() {
local do_retry="${1:-no}" # yes|no
local _pvec_i _pvec_c _lxcp_i _lxcp_c need=0
_pvec_i="$(pkg_ver pve-container)"
_lxcp_i="$(pkg_ver lxc-pve)"
_pvec_c="$(pkg_cand pve-container)"
_lxcp_c="$(pkg_cand lxc-pve)"
if [[ -n "$_pvec_c" && "$_pvec_c" != "none" ]]; then
ver_gt "$_pvec_c" "${_pvec_i:-0}" && need=1
fi
if [[ -n "$_lxcp_c" && "$_lxcp_c" != "none" ]]; then
ver_gt "$_lxcp_c" "${_lxcp_i:-0}" && need=1
fi
if [[ $need -eq 0 ]]; then
msg_debug "No newer candidate for pve-container/lxc-pve (installed=$_pvec_i/$_lxcp_i, cand=$_pvec_c/$_lxcp_c)"
return 0
fi
echo
echo "An update for the Proxmox LXC stack is available:"
echo " pve-container: installed=${_pvec_i:-n/a} candidate=${_pvec_c:-n/a}"
echo " lxc-pve : installed=${_lxcp_i:-n/a} candidate=${_lxcp_c:-n/a}"
echo
read -rp "Do you want to upgrade now? [y/N] " _ans
case "${_ans,,}" in
y | yes)
msg_info "Upgrading Proxmox LXC stack (pve-container, lxc-pve)"
if apt-get update -qq >/dev/null && apt-get install -y --only-upgrade pve-container lxc-pve >/dev/null; then
msg_ok "LXC stack upgraded."
if [[ "$do_retry" == "yes" ]]; then
msg_info "Retrying container creation after upgrade"
if pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" >>"$LOGFILE" 2>&1; then
msg_ok "Container created successfully after upgrade."
return 1
else
msg_error "pct create still failed after upgrade. See $LOGFILE"
return 3
fi
fi
return 1
else
msg_error "Upgrade failed. Please check APT output."
return 3
fi
;;
*) return 2 ;;
esac
}
# ------------------------------------------------------------------------------
# Storage discovery / selection helpers
# ------------------------------------------------------------------------------
resolve_storage_preselect() {
local class="$1" preselect="$2" required_content=""
case "$class" in
template) required_content="vztmpl" ;;
container) required_content="rootdir" ;;
*) return 1 ;;
esac
[[ -z "$preselect" ]] && return 1
if ! pvesm status -content "$required_content" | awk 'NR>1{print $1}' | grep -qx -- "$preselect"; then
msg_warn "Preselected storage '${preselect}' does not support content '${required_content}' (or not found)"
return 1
fi
local line total used free
line="$(pvesm status | awk -v s="$preselect" 'NR>1 && $1==s {print $0}')"
if [[ -z "$line" ]]; then
STORAGE_INFO="n/a"
else
total="$(awk '{print $4}' <<<"$line")"
used="$(awk '{print $5}' <<<"$line")"
free="$(awk '{print $6}' <<<"$line")"
local total_h used_h free_h
if command -v numfmt >/dev/null 2>&1; then
total_h="$(numfmt --to=iec --suffix=B --format %.1f "$total" 2>/dev/null || echo "$total")"
used_h="$(numfmt --to=iec --suffix=B --format %.1f "$used" 2>/dev/null || echo "$used")"
free_h="$(numfmt --to=iec --suffix=B --format %.1f "$free" 2>/dev/null || echo "$free")"
STORAGE_INFO="Free: ${free_h} Used: ${used_h}"
else
STORAGE_INFO="Free: ${free} Used: ${used}"
fi
fi
STORAGE_RESULT="$preselect"
return 0
}
check_storage_support() {
local CONTENT="$1" VALID=0
while IFS= read -r line; do
local STORAGE_NAME
STORAGE_NAME=$(awk '{print $1}' <<<"$line")
[[ -n "$STORAGE_NAME" ]] && VALID=1
done < <(pvesm status -content "$CONTENT" 2>/dev/null | awk 'NR>1')
[[ $VALID -eq 1 ]]
}
select_storage() {
local CLASS=$1 CONTENT CONTENT_LABEL
case $CLASS in
container)
CONTENT='rootdir'
CONTENT_LABEL='Container'
;;
template)
CONTENT='vztmpl'
CONTENT_LABEL='Container template'
;;
iso)
CONTENT='iso'
CONTENT_LABEL='ISO image'
;;
images)
CONTENT='images'
CONTENT_LABEL='VM Disk image'
;;
backup)
CONTENT='backup'
CONTENT_LABEL='Backup'
;;
snippets)
CONTENT='snippets'
CONTENT_LABEL='Snippets'
;;
*)
msg_error "Invalid storage class '$CLASS'"
return 1
;;
esac
if [[ "$CONTENT" == "rootdir" && -n "${STORAGE:-}" ]]; then
if pvesm status -content "$CONTENT" | awk 'NR>1 {print $1}' | grep -qx "$STORAGE"; then
STORAGE_RESULT="$STORAGE"
msg_info "Using preset storage: $STORAGE_RESULT for $CONTENT_LABEL"
return 0
else
msg_error "Preset storage '$STORAGE' is not valid for content type '$CONTENT'."
return 2
fi
fi
declare -A STORAGE_MAP
local -a MENU=()
local COL_WIDTH=0
while read -r TAG TYPE _ TOTAL USED FREE _; do
[[ -n "$TAG" && -n "$TYPE" ]] || continue
local DISPLAY="${TAG} (${TYPE})"
local USED_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$USED")
local FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$FREE")
local INFO="Free: ${FREE_FMT}B Used: ${USED_FMT}B"
STORAGE_MAP["$DISPLAY"]="$TAG"
MENU+=("$DISPLAY" "$INFO" "OFF")
((${#DISPLAY} > COL_WIDTH)) && COL_WIDTH=${#DISPLAY}
done < <(pvesm status -content "$CONTENT" | awk 'NR>1')
if [[ ${#MENU[@]} -eq 0 ]]; then
msg_error "No storage found for content type '$CONTENT'."
return 2
fi
if [[ $((${#MENU[@]} / 3)) -eq 1 ]]; then
STORAGE_RESULT="${STORAGE_MAP[${MENU[0]}]}"
STORAGE_INFO="${MENU[1]}"
return 0
fi
local WIDTH=$((COL_WIDTH + 42))
while true; do
local DISPLAY_SELECTED
DISPLAY_SELECTED=$(whiptail --backtitle "Proxmox VE Helper Scripts" \
--title "Storage Pools" \
--radiolist "Which storage pool for ${CONTENT_LABEL,,}?\n(Spacebar to select)" \
16 "$WIDTH" 6 "${MENU[@]}" 3>&1 1>&2 2>&3) || exit_script
DISPLAY_SELECTED=$(sed 's/[[:space:]]*$//' <<<"$DISPLAY_SELECTED")
if [[ -z "$DISPLAY_SELECTED" || -z "${STORAGE_MAP[$DISPLAY_SELECTED]+_}" ]]; then
whiptail --msgbox "No valid storage selected. Please try again." 8 58
continue
fi
STORAGE_RESULT="${STORAGE_MAP[$DISPLAY_SELECTED]}"
for ((i = 0; i < ${#MENU[@]}; i += 3)); do
if [[ "${MENU[$i]}" == "$DISPLAY_SELECTED" ]]; then
STORAGE_INFO="${MENU[$i + 1]}"
break
fi
done
return 0
done
}
# ------------------------------------------------------------------------------
# Required input variables
# ------------------------------------------------------------------------------
[[ "${CTID:-}" ]] || {
msg_error "You need to set 'CTID' variable."
exit 203
}
[[ "${PCT_OSTYPE:-}" ]] || {
msg_error "You need to set 'PCT_OSTYPE' variable."
exit 204
}
msg_debug "CTID=$CTID"
msg_debug "PCT_OSTYPE=$PCT_OSTYPE"
msg_debug "PCT_OSVERSION=${PCT_OSVERSION:-default}"
# ID checks
[[ "$CTID" -ge 100 ]] || {
msg_error "ID cannot be less than 100."
exit 205
}
if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then
echo -e "ID '$CTID' is already in use."
unset CTID
msg_error "Cannot use ID that is already in use."
exit 206
fi
# Storage capability check
check_storage_support "rootdir" || {
msg_error "No valid storage found for 'rootdir' [Container]"
exit 1
}
check_storage_support "vztmpl" || {
msg_error "No valid storage found for 'vztmpl' [Template]"
exit 1
}
# Template storage selection
if resolve_storage_preselect template "${TEMPLATE_STORAGE:-}"; then
TEMPLATE_STORAGE="$STORAGE_RESULT"
TEMPLATE_STORAGE_INFO="$STORAGE_INFO"
msg_ok "Storage ${BL}${TEMPLATE_STORAGE}${CL} (${TEMPLATE_STORAGE_INFO}) [Template]"
else
while true; do
if select_storage template; then
TEMPLATE_STORAGE="$STORAGE_RESULT"
TEMPLATE_STORAGE_INFO="$STORAGE_INFO"
msg_ok "Storage ${BL}${TEMPLATE_STORAGE}${CL} (${TEMPLATE_STORAGE_INFO}) [Template]"
break
fi
done
fi
# Container storage selection
if resolve_storage_preselect container "${CONTAINER_STORAGE:-}"; then
CONTAINER_STORAGE="$STORAGE_RESULT"
CONTAINER_STORAGE_INFO="$STORAGE_INFO"
msg_ok "Storage ${BL}${CONTAINER_STORAGE}${CL} (${CONTAINER_STORAGE_INFO}) [Container]"
else
while true; do
if select_storage container; then
CONTAINER_STORAGE="$STORAGE_RESULT"
CONTAINER_STORAGE_INFO="$STORAGE_INFO"
msg_ok "Storage ${BL}${CONTAINER_STORAGE}${CL} (${CONTAINER_STORAGE_INFO}) [Container]"
break
fi
done
fi
# Validate content types
msg_info "Validating content types of storage '$CONTAINER_STORAGE'"
STORAGE_CONTENT=$(grep -A4 -E "^(zfspool|dir|lvmthin|lvm): $CONTAINER_STORAGE" /etc/pve/storage.cfg | grep content | awk '{$1=""; print $0}' | xargs)
msg_debug "Storage '$CONTAINER_STORAGE' has content types: $STORAGE_CONTENT"
grep -qw "rootdir" <<<"$STORAGE_CONTENT" || {
msg_error "Storage '$CONTAINER_STORAGE' does not support 'rootdir'. Cannot create LXC."
exit 217
}
$STD msg_ok "Storage '$CONTAINER_STORAGE' supports 'rootdir'"
msg_info "Validating content types of template storage '$TEMPLATE_STORAGE'"
TEMPLATE_CONTENT=$(grep -A4 -E "^[^:]+: $TEMPLATE_STORAGE" /etc/pve/storage.cfg | grep content | awk '{$1=""; print $0}' | xargs)
msg_debug "Template storage '$TEMPLATE_STORAGE' has content types: $TEMPLATE_CONTENT"
if ! grep -qw "vztmpl" <<<"$TEMPLATE_CONTENT"; then
msg_warn "Template storage '$TEMPLATE_STORAGE' does not declare 'vztmpl'. This may cause pct create to fail."
else
$STD msg_ok "Template storage '$TEMPLATE_STORAGE' supports 'vztmpl'"
fi
# Free space check
STORAGE_FREE=$(pvesm status | awk -v s="$CONTAINER_STORAGE" '$1 == s { print $6 }')
REQUIRED_KB=$((${PCT_DISK_SIZE:-8} * 1024 * 1024))
[[ "$STORAGE_FREE" -ge "$REQUIRED_KB" ]] || {
msg_error "Not enough space on '$CONTAINER_STORAGE'. Needed: ${PCT_DISK_SIZE:-8}G."
exit 214
}
# Cluster quorum (if cluster)
if [[ -f /etc/pve/corosync.conf ]]; then
msg_info "Checking cluster quorum"
if ! pvecm status | awk -F':' '/^Quorate/ { exit ($2 ~ /Yes/) ? 0 : 1 }'; then
msg_error "Cluster is not quorate. Start all nodes or configure quorum device (QDevice)."
exit 210
fi
msg_ok "Cluster is quorate"
fi
# ------------------------------------------------------------------------------
# Template discovery & validation
# ------------------------------------------------------------------------------
TEMPLATE_SEARCH="${PCT_OSTYPE}-${PCT_OSVERSION:-}"
case "$PCT_OSTYPE" in
debian | ubuntu) TEMPLATE_PATTERN="-standard_" ;;
alpine | fedora | rocky | centos) TEMPLATE_PATTERN="-default_" ;;
*) TEMPLATE_PATTERN="" ;;
esac
msg_info "Searching for template '$TEMPLATE_SEARCH'"
mapfile -t LOCAL_TEMPLATES < <(
pveam list "$TEMPLATE_STORAGE" 2>/dev/null |
awk -v s="$TEMPLATE_SEARCH" -v p="$TEMPLATE_PATTERN" '$1 ~ s && $1 ~ p {print $1}' |
sed 's|.*/||' | sort -t - -k 2 -V
)
pveam update >/dev/null 2>&1 || msg_warn "Could not update template catalog (pveam update failed)."
mapfile -t ONLINE_TEMPLATES < <(
pveam available -section system 2>/dev/null |
sed -n "s/.*\($TEMPLATE_SEARCH.*$TEMPLATE_PATTERN.*\)/\1/p" |
sort -t - -k 2 -V
)
ONLINE_TEMPLATE=""
[[ ${#ONLINE_TEMPLATES[@]} -gt 0 ]] && ONLINE_TEMPLATE="${ONLINE_TEMPLATES[-1]}"
if [[ ${#LOCAL_TEMPLATES[@]} -gt 0 ]]; then
TEMPLATE="${LOCAL_TEMPLATES[-1]}"
TEMPLATE_SOURCE="local"
else
TEMPLATE="$ONLINE_TEMPLATE"
TEMPLATE_SOURCE="online"
fi
TEMPLATE_PATH="$(pvesm path $TEMPLATE_STORAGE:vztmpl/$TEMPLATE 2>/dev/null || true)"
if [[ -z "$TEMPLATE_PATH" ]]; then
TEMPLATE_BASE=$(awk -v s="$TEMPLATE_STORAGE" '$1==s {f=1} f && /path/ {print $2; exit}' /etc/pve/storage.cfg)
[[ -n "$TEMPLATE_BASE" ]] && TEMPLATE_PATH="$TEMPLATE_BASE/template/cache/$TEMPLATE"
fi
[[ -n "$TEMPLATE_PATH" ]] || {
msg_error "Unable to resolve template path for $TEMPLATE_STORAGE. Check storage type and permissions."
exit 220
}
msg_ok "Template ${BL}$TEMPLATE${CL} [$TEMPLATE_SOURCE]"
msg_debug "Resolved TEMPLATE_PATH=$TEMPLATE_PATH"
NEED_DOWNLOAD=0
if [[ ! -f "$TEMPLATE_PATH" ]]; then
msg_info "Template not present locally will download."
NEED_DOWNLOAD=1
elif [[ ! -r "$TEMPLATE_PATH" ]]; then
msg_error "Template file exists but is not readable check permissions."
exit 221
elif [[ "$(stat -c%s "$TEMPLATE_PATH")" -lt 1000000 ]]; then
if [[ -n "$ONLINE_TEMPLATE" ]]; then
msg_warn "Template file too small (<1MB) re-downloading."
NEED_DOWNLOAD=1
else
msg_warn "Template looks too small, but no online version exists. Keeping local file."
fi
elif ! tar -tf "$TEMPLATE_PATH" &>/dev/null; then
if [[ -n "$ONLINE_TEMPLATE" ]]; then
msg_warn "Template appears corrupted re-downloading."
NEED_DOWNLOAD=1
else
msg_warn "Template appears corrupted, but no online version exists. Keeping local file."
fi
else
$STD msg_ok "Template $TEMPLATE is present and valid."
fi
if [[ "$TEMPLATE_SOURCE" == "local" && -n "$ONLINE_TEMPLATE" && "$TEMPLATE" != "$ONLINE_TEMPLATE" ]]; then
msg_warn "Local template is outdated: $TEMPLATE (latest available: $ONLINE_TEMPLATE)"
if whiptail --yesno "A newer template is available:\n$ONLINE_TEMPLATE\n\nDo you want to download and use it instead?" 12 70; then
TEMPLATE="$ONLINE_TEMPLATE"
NEED_DOWNLOAD=1
else
msg_info "Continuing with local template $TEMPLATE"
fi
fi
if [[ "$NEED_DOWNLOAD" -eq 1 ]]; then
[[ -f "$TEMPLATE_PATH" ]] && rm -f "$TEMPLATE_PATH"
for attempt in {1..3}; do
msg_info "Attempt $attempt: Downloading template $TEMPLATE to $TEMPLATE_STORAGE"
if pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null 2>&1; then
msg_ok "Template download successful."
break
fi
if [[ $attempt -eq 3 ]]; then
msg_error "Failed after 3 attempts. Please check network access, permissions, or manually run:\n pveam download $TEMPLATE_STORAGE $TEMPLATE"
exit 222
fi
sleep $((attempt * 5))
done
fi
if ! pveam list "$TEMPLATE_STORAGE" 2>/dev/null | grep -q "$TEMPLATE"; then
msg_error "Template $TEMPLATE not available in storage $TEMPLATE_STORAGE after download."
exit 223
fi
# ------------------------------------------------------------------------------
# Dynamic preflight for Debian 13.x: offer upgrade if available (no hard mins)
# ------------------------------------------------------------------------------
if [[ "$PCT_OSTYPE" == "debian" ]]; then
OSVER="$(parse_template_osver "$TEMPLATE")"
if [[ -n "$OSVER" ]]; then
# Proactive, aber ohne Abbruch nur Angebot
offer_lxc_stack_upgrade_and_maybe_retry "no" || true
fi
fi
# ------------------------------------------------------------------------------
# Create LXC Container
# ------------------------------------------------------------------------------
msg_info "Creating LXC container"
# Ensure subuid/subgid entries exist
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
# Assemble pct options
PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}})
[[ " ${PCT_OPTIONS[*]} " =~ " -rootfs " ]] || PCT_OPTIONS+=(-rootfs "$CONTAINER_STORAGE:${PCT_DISK_SIZE:-8}")
# Lock by template file (avoid concurrent downloads/creates)
lockfile="/tmp/template.${TEMPLATE}.lock"
exec 9>"$lockfile" || {
msg_error "Failed to create lock file '$lockfile'."
exit 200
}
flock -w 60 9 || {
msg_error "Timeout while waiting for template lock."
exit 211
}
LOGFILE="/tmp/pct_create_${CTID}.log"
msg_debug "pct create command: pct create $CTID ${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE} ${PCT_OPTIONS[*]}"
msg_debug "Logfile: $LOGFILE"
# First attempt
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" >"$LOGFILE" 2>&1; then
msg_error "Container creation failed on ${TEMPLATE_STORAGE}. Checking template..."
# Validate template file
if [[ ! -s "$TEMPLATE_PATH" || "$(stat -c%s "$TEMPLATE_PATH")" -lt 1000000 ]]; then
msg_warn "Template file too small or missing re-downloading."
rm -f "$TEMPLATE_PATH"
pveam download "$TEMPLATE_STORAGE" "$TEMPLATE"
elif ! tar -tf "$TEMPLATE_PATH" &>/dev/null; then
if [[ -n "$ONLINE_TEMPLATE" ]]; then
msg_warn "Template appears corrupted re-downloading."
rm -f "$TEMPLATE_PATH"
pveam download "$TEMPLATE_STORAGE" "$TEMPLATE"
else
msg_warn "Template appears corrupted, but no online version exists. Skipping re-download."
fi
fi
# Retry after repair
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" >>"$LOGFILE" 2>&1; then
# Fallback to local storage
if [[ "$TEMPLATE_STORAGE" != "local" ]]; then
msg_warn "Retrying container creation with fallback to local storage..."
LOCAL_TEMPLATE_PATH="/var/lib/vz/template/cache/$TEMPLATE"
if [[ ! -f "$LOCAL_TEMPLATE_PATH" ]]; then
msg_info "Downloading template to local..."
pveam download local "$TEMPLATE" >/dev/null 2>&1
fi
if pct create "$CTID" "local:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" >>"$LOGFILE" 2>&1; then
msg_ok "Container successfully created using local fallback."
else
# --- Dynamic stack upgrade + auto-retry on the well-known error pattern ---
if grep -qiE 'unsupported .* version' "$LOGFILE"; then
echo
echo "pct reported 'unsupported ... version' your LXC stack might be too old for this template."
echo "We can try to upgrade 'pve-container' and 'lxc-pve' now and retry automatically."
if offer_lxc_stack_upgrade_and_maybe_retry "yes"; then
: # success after retry
else
rc=$?
case $rc in
2) echo "Upgrade was declined. Please update and re-run:
apt update && apt install --only-upgrade pve-container lxc-pve" ;;
3) echo "Upgrade and/or retry failed. Please inspect: $LOGFILE" ;;
esac
exit 231
fi
else
msg_error "Container creation failed even with local fallback. See $LOGFILE"
if whiptail --yesno "pct create failed.\nDo you want to enable verbose debug mode and view detailed logs?" 12 70; then
set -x
bash -x -c "pct create $CTID local:vztmpl/${TEMPLATE} ${PCT_OPTIONS[*]}" 2>&1 | tee -a "$LOGFILE"
set +x
fi
exit 209
fi
fi
else
msg_error "Container creation failed on local storage. See $LOGFILE"
# --- Dynamic stack upgrade + auto-retry on the well-known error pattern ---
if grep -qiE 'unsupported .* version' "$LOGFILE"; then
echo
echo "pct reported 'unsupported ... version' your LXC stack might be too old for this template."
echo "We can try to upgrade 'pve-container' and 'lxc-pve' now and retry automatically."
if offer_lxc_stack_upgrade_and_maybe_retry "yes"; then
: # success after retry
else
rc=$?
case $rc in
2) echo "Upgrade was declined. Please update and re-run:
apt update && apt install --only-upgrade pve-container lxc-pve" ;;
3) echo "Upgrade and/or retry failed. Please inspect: $LOGFILE" ;;
esac
exit 231
fi
else
if whiptail --yesno "pct create failed.\nDo you want to enable verbose debug mode and view detailed logs?" 12 70; then
set -x
bash -x -c "pct create $CTID local:vztmpl/${TEMPLATE} ${PCT_OPTIONS[*]}" 2>&1 | tee -a "$LOGFILE"
set +x
fi
exit 209
fi
fi
fi
fi
# Verify container exists
pct list | awk '{print $1}' | grep -qx "$CTID" || {
msg_error "Container ID $CTID not listed in 'pct list'. See $LOGFILE"
exit 215
}
# Verify config rootfs
grep -q '^rootfs:' "/etc/pve/lxc/$CTID.conf" || {
msg_error "RootFS entry missing in container config. See $LOGFILE"
exit 216
}
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."

View File

@ -1,41 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://almalinux.org/
APP="AlmaLinux"
var_tags="${var_tags:-os}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_disk="${var_disk:-4}"
var_os="${var_os:-almalinux}"
var_version="${var_version:-10}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD dnf -y upgrade
msg_ok "Updated $APP LXC"
exit
}
start
build_container
description
msg_ok "Completed Successfully!"
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"

88
ct/alpine-bitmagnet.sh Normal file
View File

@ -0,0 +1,88 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: Slaviša Arežina (tremor021)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/bitmagnet-io/bitmagnet
APP="Alpine-bitmagnet"
var_tags="${var_tags:-alpine;torrent}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-256}"
var_disk="${var_disk:-3}"
var_os="${var_os:-alpine}"
var_version="${var_version:-3.21}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
if [[ ! -d /opt/bitmagnet ]]; then
msg_error "No ${APP} Installation Found!"
exit 1
fi
RELEASE=$(curl -s https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [ "${RELEASE}" != "$(cat /opt/bitmagnet_version.txt)" ] || [ ! -f /opt/bitmagnet_version.txt ]; then
msg_info "Backing up database"
rm -f /tmp/backup.sql
$STD sudo -u postgres pg_dump \
--column-inserts \
--data-only \
--on-conflict-do-nothing \
--rows-per-insert=1000 \
--table=metadata_sources \
--table=content \
--table=content_attributes \
--table=content_collections \
--table=content_collections_content \
--table=torrent_sources \
--table=torrents \
--table=torrent_files \
--table=torrent_hints \
--table=torrent_contents \
--table=torrent_tags \
--table=torrents_torrent_sources \
--table=key_values \
bitmagnet \
>/tmp/backup.sql
mv /tmp/backup.sql /opt/
msg_ok "Database backed up"
msg_info "Updating ${APP} from $(cat /opt/bitmagnet_version.txt) to ${RELEASE}"
$STD apk -U upgrade
$STD service bitmagnet stop
[ -f /opt/bitmagnet/.env ] && cp /opt/bitmagnet/.env /opt/
[ -f /opt/bitmagnet/config.yml ] && cp /opt/bitmagnet/config.yml /opt/
rm -rf /opt/bitmagnet/*
temp_file=$(mktemp)
curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
cd /opt/bitmagnet
$STD go build
chmod +x bitmagnet
[ -f "/opt/.env" ] && cp "/opt/.env" /opt/bitmagnet/
[ -f "/opt/config.yml" ] && cp "/opt/config.yml" /opt/bitmagnet/
rm -f "$temp_file"
echo "${RELEASE}" >/opt/bitmagnet_version.txt
$STD service bitmagnet start
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit 0
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following IP:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3333${CL}"

View File

@ -1,48 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: cobalt (cobaltgit)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://ntfy.sh/
APP="Alpine-ntfy"
var_tags="${var_tags:-notification}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-256}"
var_disk="${var_disk:-2}"
var_os="${var_os:-alpine}"
var_version="${var_version:-3.22}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /etc/ntfy ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD apk -U upgrade
setcap 'cap_net_bind_service=+ep' /usr/bin/ntfy
msg_ok "Updated $APP LXC"
msg_info "Restarting ntfy"
rc-service ntfy restart
msg_ok "Restarted ntfy"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"

46
ct/alpine-syncthing.sh Normal file
View File

@ -0,0 +1,46 @@
#!/usr/bin/env bash
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://syncthing.net/
APP="Alpine-Syncthing"
var_tags="${var_tags:-alpine;networking}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-256}"
var_disk="${var_disk:-1}"
var_os="${var_os:-alpine}"
var_version="${var_version:-3.21}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
msg_info "Updating Alpine Packages"
$STD apk update
$STD apk upgrade
msg_ok "Updated Alpine Packages"
msg_info "Updating Syncthing"
$STD $STD apk upgrade syncthing
msg_ok "Updated Syncthing"
msg_info "Restarting Syncthing"
$STD rc-service syncthing restart
msg_ok "Restarted Syncthing"
exit 1
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8384${CL}"

View File

@ -1,5 +1,5 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
@ -7,11 +7,11 @@ source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxV
APP="Alpine"
var_tags="${var_tags:-os;alpine}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-5}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_disk="${var_disk:-0.1}"
var_os="${var_os:-alpine}"
var_version="${var_version:-3.22}"
var_version="${var_version:-3.21}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
@ -20,18 +20,18 @@ color
catch_errors
function update_script() {
header_info
#check_container_storage
#check_container_resources
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 1 \
"1" "Check for Alpine Updates" ON \
3>&1 1>&2 2>&3)
header_info
#check_container_storage
#check_container_resources
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 1 \
"1" "Check for Alpine Updates" ON \
3>&1 1>&2 2>&3)
header_info
if [ "$UPD" == "1" ]; then
apk update && apk upgrade
exit
fi
header_info
if [ "$UPD" == "1" ]; then
apk update && apk upgrade
exit
fi
}
start

View File

@ -1,5 +1,5 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE

39
ct/asterisk.sh Normal file
View File

@ -0,0 +1,39 @@
#!/usr/bin/env bash
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: michelroegl-brunner
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://asterisk.org/
APP="Asterisk"
var_tags="${var_tags:-os}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "No Update function provided for ${APP} LXC"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"

91
ct/authentik.sh Normal file
View File

@ -0,0 +1,91 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: remz1337
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://goauthentik.io/
APP="Authentik"
var_tags="${var_tags:-identity-provider}"
var_disk="${var_disk:-12}"
var_cpu="${var_cpu:-6}"
var_ram="${var_ram:-10240}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /etc/systemd/system/authentik-server.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/goauthentik/authentik/releases/latest | grep "tarball_url" | awk '{print substr($2, 2, length($2)-3)}')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
NODE_VERSION="22"
PG_VERSION="16"
setup_uv
install_postgresql
install_node_and_modules
install_go
msg_info "Stopping ${APP}"
systemctl stop authentik-server
systemctl stop authentik-worker
msg_ok "Stopped ${APP}"
msg_info "Building ${APP} website"
mkdir -p /opt/authentik
curl -fsSL "${RELEASE}" -o "authentik.tar.gz"
tar -xzf authentik.tar.gz -C /opt/authentik --strip-components 1 --overwrite
rm -rf authentik.tar.gz
cd /opt/authentik/website
$STD npm install
$STD npm run build-bundled
cd /opt/authentik/web
$STD npm install
$STD npm run build
msg_ok "Built ${APP} website"
msg_info "Building ${APP} server"
cd /opt/authentik
go mod download
go build -o /go/authentik ./cmd/server
go build -o /opt/authentik/authentik-server /opt/authentik/cmd/server/
msg_ok "Built ${APP} server"
msg_info "Building Authentik"
cd /opt/authentik
$STD uv sync --frozen --no-install-project --no-dev
uv run python -m lifecycle.migrate
ln -s /opt/authentik/.venv/bin/gunicorn /usr/local/bin/gunicorn
ln -s /opt/authentik/.venv/bin/celery /usr/local/bin/celery
msg_ok "Authentik built"
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Starting ${APP}"
systemctl start authentik-server
systemctl start authentik-worker
msg_ok "Started ${APP}"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start
build_container
description
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9000/if/flow/initial-setup/${CL}"

View File

@ -1,17 +1,18 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source:
APP="Pixelfed"
var_tags="${var_tags:-pictures}"
var_disk="${var_disk:-7}"
APP="BabyBuddy"
var_tags="${var_tags:-baby}"
var_disk="${var_disk:-5}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
@ -22,18 +23,16 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/pixelfed ]]; then
if [[ ! -d /opt/maxun ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/xxxx/xxxx/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
RELEASE=$(curl -s https://api.github.com/repos/xxxxx/xxxxx/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Updating ${APP} to ${RELEASE}"
cd /opt
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
msg_info "Stopping Services"
systemctl stop APP
msg_ok "Services Stopped"
fi
exit
}
start
@ -41,5 +40,6 @@ build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${APP} Setup should be reachable by going to the following URL.
${BL}http://${IP}:8000${CL} \n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"

136
ct/bar-assistant.sh Normal file
View File

@ -0,0 +1,136 @@
#!/usr/bin/env bash
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: bvdberg01
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/karlomikus/bar-assistant
# Source: https://github.com/karlomikus/vue-salt-rim
# Source: https://www.meilisearch.com/
APP="Bar-Assistant"
var_tags="${var_tags:-inventory;drinks}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/bar-assistant ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE_MEILISEARCH=$(curl -s https://api.github.com/repos/meilisearch/meilisearch/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
RELEASE_BARASSISTANT=$(curl -s https://api.github.com/repos/karlomikus/bar-assistant/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
RELEASE_SALTRIM=$(curl -s https://api.github.com/repos/karlomikus/vue-salt-rim/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE_BARASSISTANT}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Service"
systemctl stop nginx
msg_ok "Stopped Service"
msg_info "Updating ${APP} to v${RELEASE_BARASSISTANT}"
cd /opt
mv /opt/bar-assistant /opt/bar-assistant-backup
curl -fsSL "https://github.com/karlomikus/bar-assistant/archive/refs/tags/v${RELEASE_BARASSISTANT}.zip" -o barassistant.zip
unzip -q barassistant.zip
mv /opt/bar-assistant-${RELEASE_BARASSISTANT}/ /opt/bar-assistant
cp /opt/bar-assistant-backup/.env /opt/bar-assistant/.env
cp /opt/bar-assistant-backup/storage/bar-assistant /opt/bar-assistant/storage/bar-assistant
cd /opt/bar-assistant
composer install
php artisan migrate --force
php artisan storage:link
php artisan bar:setup-meilisearch
php artisan scout:sync-index-settings
php artisan config:cache
php artisan route:cache
php artisan event:cache
echo "${RELEASE_BARASSISTANT}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE_BARASSISTANT}"
msg_info "Starting Service"
systemctl start service nginx
msg_ok "Started Service"
msg_info "Cleaning up"
rm -rf /opt/barassistant.zip
rm -rf /opt/bar-assistant-backup
msg_ok "Cleaned"
else
msg_ok "No update required. ${APP} is already at v${RELEASE_BARASSISTANT}"
fi
if [[ ! -f /opt/vue-salt-rim_version.txt ]] || [[ "${RELEASE_SALTRIM}" != "$(cat /opt/vue-salt-rim_version.txt)" ]]; then
msg_info "Stopping Service"
systemctl stop nginx
msg_ok "Stopped Service"
msg_info "Updating Salt Rim to v${RELEASE_SALTRIM}"
cd /opt
mv /opt/vue-salt-rim /opt/vue-salt-rim-backup
curl -fsSL "https://github.com/karlomikus/vue-salt-rim/archive/refs/tags/v${RELEASE_SALTRIM}.zip" -o saltrim.zip
unzip -q saltrim.zip
mv /opt/vue-salt-rim-${RELEASE_SALTRIM}/ /opt/vue-salt-rim
cp /opt/vue-salt-rim-backup/public/config.js /opt/vue-salt-rim/public/config.js
cd /opt/vue-salt-rim
npm run build
echo "${RELEASE_SALTRIM}" >/opt/vue-salt-rim_version.txt
msg_ok "Updated $APP to v${RELEASE_SALTRIM}"
msg_info "Starting Service"
systemctl start service nginx
msg_ok "Started Service"
msg_info "Cleaning up"
rm -rf /opt/saltrim.zip
rm -rf /opt/vue-salt-rim-backup
msg_ok "Cleaned"
else
msg_ok "No update required. Salt Rim is already at v${RELEASE_SALTRIM}"
fi
if [[ ! -f /opt/meilisearch_version.txt ]] || [[ "${RELEASE_MEILISEARCH}" != "$(cat /opt/meilisearch_version.txt)" ]]; then
msg_info "Stopping Service"
systemctl stop meilisearch
msg_ok "Stopped Service"
msg_info "Updating Meilisearch to ${RELEASE_MEILISEARCH}"
cd /opt
RELEASE=$(curl -s https://api.github.com/repos/meilisearch/meilisearch/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
curl -fsSL https://github.com/meilisearch/meilisearch/releases/latest/download/meilisearch.deb -o meilisearch.deb
$STD dpkg -i meilisearch.deb
echo "${RELEASE_MEILISEARCH}" >/opt/meilisearch_version.txt
msg_ok "Updated Meilisearch to v${RELEASE_MEILISEARCH}"
msg_info "Starting Service"
systemctl start meilisearch
msg_ok "Started Service"
msg_info "Cleaning up"
rm -rf "/opt/meilisearch.deb"
msg_ok "Cleaned"
msg_ok "Updated Meilisearch"
else
msg_ok "No update required. Meilisearch is already at ${RELEASE_MEILISEARCH}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"

75
ct/bluecherry.sh Normal file
View File

@ -0,0 +1,75 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: Slaviša Arežina (tremor021)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/TwiN/gatus
APP="bluecherry"
var_tags="${var_tags:-video;dvr}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-15}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/gatus ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Updating $APP"
msg_info "Stopping $APP"
systemctl stop gatus
msg_ok "Stopped $APP"
msg_info "Updating $APP to v${RELEASE}"
mv /opt/gatus/config/config.yaml /opt
rm -rf /opt/gatus/*
temp_file=$(mktemp)
curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
cd /opt/gatus
$STD go mod tidy
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
setcap CAP_NET_RAW+ep gatus
mv /opt/config.yaml config
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting $APP"
systemctl start gatus
msg_ok "Started $APP"
msg_info "Cleaning Up"
rm -f "$temp_file"
msg_ok "Cleanup Completed"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"

76
ct/bookstack.sh Normal file
View File

@ -0,0 +1,76 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/BookStackApp/BookStack
APP="Bookstack"
var_tags="${var_tags:-organizer}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/bookstack ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Apache2"
systemctl stop apache2
msg_ok "Services Stopped"
msg_info "Updating ${APP} to v${RELEASE}"
mv /opt/bookstack /opt/bookstack-backup
fetch_and_deploy_gh_release BookstackApp/BookStack
cp /opt/bookstack-backup/.env /opt/bookstack/.env
cp -r /opt/bookstack-backup/public/uploads/* /opt/bookstack/public/uploads/ || true
cp -r /opt/bookstack-backup/storage/uploads/* /opt/bookstack/storage/uploads/ || true
cp -r /opt/bookstack-backup/themes/* /opt/bookstack/themes/ || true
cd /opt/bookstack
export COMPOSER_ALLOW_SUPERUSER=1
$STD composer install --no-dev
$STD php artisan migrate --force
chown www-data:www-data -R /opt/bookstack /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads /opt/bookstack/storage
chmod -R 755 /opt/bookstack /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads /opt/bookstack/storage
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
chmod -R 640 /opt/bookstack/.env
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Starting Apache2"
systemctl start apache2
msg_ok "Started Apache2"
msg_info "Cleaning Up"
rm -rf /opt/bookstack-backup
rm -rf "/opt/BookStack-${RELEASE}.zip"
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"

View File

@ -1,53 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: luismco
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/ThePhaseless/Byparr
APP="Byparr"
var_tags="${var_tags:-proxy}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/Byparr ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_release "Byparr" "ThePhaseless/Byparr"; then
msg_info "Stopping Service"
systemctl stop byparr
msg_ok "Stopped Service"
fetch_and_deploy_gh_release "Byparr" "ThePhaseless/Byparr"
msg_info "Starting Service"
systemctl start byparr
msg_ok "Started Service"
msg_ok "Updated Successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8191${CL}"

View File

@ -1,41 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://www.centos.org/centos-stream/
APP="CentOS Stream"
var_tags="${var_tags:-os}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_disk="${var_disk:-4}"
var_os="${var_os:-centos}"
var_version="${var_version:-9}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD dnf -y upgrade
msg_ok "Updated $APP LXC"
exit
}
start
build_container
description
msg_ok "Completed Successfully!"
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"

View File

@ -1,15 +1,15 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://www.debian.org/
# Author: edoardop13
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/favonia/cloudflare-ddns
APP="Ente"
var_tags="${var_tags:-photos}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-10}"
APP="Cloudflare-DDNS"
var_tags="${var_tags:-network}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_disk="${var_disk:-3}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
@ -23,20 +23,15 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
if [[ ! -f /etc/systemd/system/cloudflare-ddns.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD apt-get update
$STD apt-get -y upgrade
msg_ok "Updated $APP LXC"
msg_error "There is no update function for ${APP}."
exit
}
start
build_container
description
msg_ok "Completed Successfully!"
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"
msg_ok "Completed Successfully!\n"

248
ct/create_lxc.sh Normal file
View File

@ -0,0 +1,248 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster)
# Co-Author: MickLesk
# License: MIT
# https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# This sets verbose mode if the global variable is set to "yes"
# if [ "$VERBOSE" == "yes" ]; then set -x; fi
if command -v curl >/dev/null 2>&1; then
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func)
load_functions
#echo "(create-lxc.sh) Loaded core.func via curl"
elif command -v wget >/dev/null 2>&1; then
source <(wget -qO- https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func)
load_functions
#echo "(create-lxc.sh) Loaded core.func via wget"
fi
# This sets error handling options and defines the error_handler function to handle errors
set -Eeuo pipefail
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
# This function handles errors
function error_handler() {
printf "\e[?25h"
local exit_code="$?"
local line_number="$1"
local command="$2"
local error_message="${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}"
echo -e "\n$error_message\n"
exit 200
}
# This checks for the presence of valid Container Storage and Template Storage locations
msg_info "Validating Storage"
VALIDCT=$(pvesm status -content rootdir | awk 'NR>1')
if [ -z "$VALIDCT" ]; then
msg_error "Unable to detect a valid Container Storage location."
exit 1
fi
VALIDTMP=$(pvesm status -content vztmpl | awk 'NR>1')
if [ -z "$VALIDTMP" ]; then
msg_error "Unable to detect a valid Template Storage location."
exit 1
fi
# This function is used to select the storage class and determine the corresponding storage content type and label.
function select_storage() {
local CLASS=$1
local CONTENT
local CONTENT_LABEL
case $CLASS in
container)
CONTENT='rootdir'
CONTENT_LABEL='Container'
;;
template)
CONTENT='vztmpl'
CONTENT_LABEL='Container template'
;;
*) false || {
msg_error "Invalid storage class."
exit 201
} ;;
esac
# This Queries all storage locations
local -a MENU
while read -r line; do
local TAG=$(echo $line | awk '{print $1}')
local TYPE=$(echo $line | awk '{printf "%-10s", $2}')
local FREE=$(echo $line | numfmt --field 4-6 --from-unit=K --to=iec --format %.2f | awk '{printf( "%9sB", $6)}')
local ITEM="Type: $TYPE Free: $FREE "
local OFFSET=2
if [[ $((${#ITEM} + $OFFSET)) -gt ${MSG_MAX_LENGTH:-} ]]; then
local MSG_MAX_LENGTH=$((${#ITEM} + $OFFSET))
fi
MENU+=("$TAG" "$ITEM" "OFF")
done < <(pvesm status -content $CONTENT | awk 'NR>1')
# Select storage location
if [ $((${#MENU[@]} / 3)) -eq 1 ]; then
printf ${MENU[0]}
else
local STORAGE
while [ -z "${STORAGE:+x}" ]; do
STORAGE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Storage Pools" --radiolist \
"Which storage pool you would like to use for the ${CONTENT_LABEL,,}?\nTo make a selection, use the Spacebar.\n" \
16 $(($MSG_MAX_LENGTH + 23)) 6 \
"${MENU[@]}" 3>&1 1>&2 2>&3) || {
msg_error "Menu aborted."
exit 202
}
if [ $? -ne 0 ]; then
echo -e "${CROSS}${RD} Menu aborted by user.${CL}"
exit 0
fi
done
printf "%s" "$STORAGE"
fi
}
# Test if required variables are set
[[ "${CTID:-}" ]] || {
msg_error "You need to set 'CTID' variable."
exit 203
}
[[ "${PCT_OSTYPE:-}" ]] || {
msg_error "You need to set 'PCT_OSTYPE' variable."
exit 204
}
# Test if ID is valid
[ "$CTID" -ge "100" ] || {
msg_error "ID cannot be less than 100."
exit 205
}
# Test if ID is in use
if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then
echo -e "ID '$CTID' is already in use."
unset CTID
msg_error "Cannot use ID that is already in use."
exit 206
fi
# Get template storage
TEMPLATE_STORAGE=$(select_storage template) || exit
msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
# Get container storage
CONTAINER_STORAGE=$(select_storage container) || exit
msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
# Update LXC template list
msg_info "Updating LXC Template List"
#check_network
pveam update >/dev/null
msg_ok "Updated LXC Template List"
# Get LXC template string
TEMPLATE_SEARCH=${PCT_OSTYPE}-${PCT_OSVERSION:-}
mapfile -t TEMPLATES < <(pveam available -section system | sed -n "s/.*\($TEMPLATE_SEARCH.*\)/\1/p" | sort -t - -k 2 -V)
[ ${#TEMPLATES[@]} -gt 0 ] || {
msg_error "Unable to find a template when searching for '$TEMPLATE_SEARCH'."
exit 207
}
TEMPLATE="${TEMPLATES[-1]}"
TEMPLATE_PATH="$(pvesm path $TEMPLATE_STORAGE:vztmpl/$TEMPLATE)"
# Without NAS/Mount: TEMPLATE_PATH="/var/lib/vz/template/cache/$TEMPLATE"
# Check if template exists, if corrupt remove and redownload
if ! pveam list "$TEMPLATE_STORAGE" | grep -q "$TEMPLATE" || ! zstdcat "$TEMPLATE_PATH" | tar -tf - >/dev/null 2>&1; then
msg_warn "Template $TEMPLATE not found in storage or seems to be corrupted. Redownloading."
[[ -f "$TEMPLATE_PATH" ]] && rm -f "$TEMPLATE_PATH"
# Download with 3 attempts
for attempt in {1..3}; do
msg_info "Attempt $attempt: Downloading LXC template..."
if timeout 120 pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null; then
msg_ok "Template download successful."
break
fi
if [ $attempt -eq 3 ]; then
msg_error "Three failed attempts. Aborting."
exit 208
fi
sleep $((attempt * 5))
done
fi
msg_ok "LXC Template is ready to use."
# Check and fix subuid/subgid
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
# Combine all options
PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}})
[[ " ${PCT_OPTIONS[@]} " =~ " -rootfs " ]] || PCT_OPTIONS+=(-rootfs "$CONTAINER_STORAGE:${PCT_DISK_SIZE:-8}")
msg_info "Creating LXC Container"
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
msg_error "Container creation failed. Checking if template is corrupted."
if ! zstdcat "$TEMPLATE_PATH" | tar -tf - >/dev/null 2>&1; then
msg_error "Template appears to be corrupted. Removing and re-downloading."
rm -f "$TEMPLATE_PATH"
if ! timeout 120 pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null; then
msg_error "Failed to re-download template."
exit 208
fi
msg_ok "Re-downloaded LXC Template"
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
msg_error "Container creation failed after re-downloading template."
exit 200
fi
else
msg_error "Container creation failed, but template is not corrupted."
exit 209
fi
fi
: "${UDHCPC_FIX:=}"
if [ "$UDHCPC_FIX" == "yes" ]; then
# Ensure container is mounted
if ! mount | grep -q "/var/lib/lxc/${CTID}/rootfs"; then
pct mount "$CTID" >/dev/null 2>&1
MOUNTED_HERE=true
else
MOUNTED_HERE=false
fi
CONFIG_FILE="/var/lib/lxc/${CTID}/rootfs/etc/udhcpc/udhcpc.conf"
for i in {1..10}; do
[ -f "$CONFIG_FILE" ] && break
sleep 0.5
done
if [ -f "$CONFIG_FILE" ]; then
msg_info "Patching udhcpc.conf for Alpine DNS override"
sed -i '/^#*RESOLV_CONF="/d' "$CONFIG_FILE"
awk '
/^# Do not overwrite \/etc\/resolv\.conf/ {
print
print "RESOLV_CONF=\"no\""
next
}
{ print }
' "$CONFIG_FILE" >"${CONFIG_FILE}.tmp" && mv "${CONFIG_FILE}.tmp" "$CONFIG_FILE"
msg_ok "Patched udhcpc.conf (RESOLV_CONF=\"no\")"
else
msg_error "udhcpc.conf not found in $CONFIG_FILE after waiting"
fi
# Clean up: only unmount if we mounted it here
if [ "${MOUNTED_HERE}" = true ]; then
pct unmount "$CTID" >/dev/null 2>&1
fi
fi
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."

View File

@ -1,21 +1,18 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source:
# Source: https://www.debian.org/
APP="Debian"
var_tags="${var_tags:-}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-8192}"
var_disk="${var_disk:-20}"
var_tags="${var_tags:-os}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_disk="${var_disk:-2}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
var_gpu="${var_gpu:-yes}"
#var_fuse="${var_fuse:-no}"
#var_tun="${var_tun:-no}"
header_info "$APP"
variables
@ -23,24 +20,23 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD apt-get update
$STD apt-get -y upgrade
msg_ok "Updated $APP LXC"
exit
fi
msg_info "Updating $APP LXC"
$STD apt update
$STD apt upgrade -y
msg_ok "Updated $APP LXC"
cleanup_lxc
exit
}
start
build_container
description
msg_ok "Completed Successfully!"
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"

View File

@ -1,114 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster) | Co-Author: MickLesk (Canbiz) | Co-Author: CrazyWolf13
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://homarr.dev/
APP="alpine-homarr"
var_tags="${var_tags:-arr;dashboard}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-8}"
var_os="${var_os:-alpine}"
var_version="${var_version:-3.21}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
RELEASE=$(curl -fsSL https://api.github.com/repos/homarr-labs/homarr/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Services (Patience)"
systemctl stop homarr
msg_ok "Services Stopped"
msg_info "Backup Data"
mkdir -p /opt/homarr-data-backup
cp /opt/homarr/.env /opt/homarr-data-backup/.env
msg_ok "Backup Data"
msg_info "Updating and rebuilding ${APP} to v${RELEASE} (Patience)"
rm /opt/run_homarr.sh
cat <<'EOF' >/opt/run_homarr.sh
#!/bin/bash
set -a
source /opt/homarr/.env
set +a
export DB_DIALECT='sqlite'
export AUTH_SECRET=$(openssl rand -base64 32)
node /opt/homarr_db/migrations/$DB_DIALECT/migrate.cjs /opt/homarr_db/migrations/$DB_DIALECT
for dir in $(find /opt/homarr_db/migrations/migrations -mindepth 1 -maxdepth 1 -type d); do
dirname=$(basename "$dir")
mkdir -p "/opt/homarr_db/migrations/$dirname"
cp -r "$dir"/* "/opt/homarr_db/migrations/$dirname/" 2>/dev/null || true
done
export HOSTNAME=$(ip route get 1.1.1.1 | grep -oP 'src \K[^ ]+')
envsubst '${HOSTNAME}' < /etc/nginx/templates/nginx.conf > /etc/nginx/nginx.conf
nginx -g 'daemon off;' &
redis-server /opt/homarr/packages/redis/redis.conf &
node apps/tasks/tasks.cjs &
node apps/websocket/wssServer.cjs &
node apps/nextjs/server.js & PID=$!
wait $PID
EOF
chmod +x /opt/run_homarr.sh
NODE_VERSION=$(curl -fsSL https://raw.githubusercontent.com/homarr-labs/homarr/dev/package.json | jq -r '.engines.node | split(">=")[1] | split(".")[0]')
NODE_MODULE="pnpm@$(curl -fsSL https://raw.githubusercontent.com/homarr-labs/homarr/dev/package.json | jq -r '.packageManager | split("@")[1]')"
install_node_and_modules
rm -rf /opt/homarr
fetch_and_deploy_gh_release "homarr-labs/homarr"
mv /opt/homarr-data-backup/.env /opt/homarr/.env
cd /opt/homarr
echo "test2"
export NODE_ENV=""
$STD pnpm install --recursive --frozen-lockfile --shamefully-hoist
$STD pnpm build
cp /opt/homarr/apps/nextjs/next.config.ts .
cp /opt/homarr/apps/nextjs/package.json .
cp -r /opt/homarr/packages/db/migrations /opt/homarr_db/migrations
cp -r /opt/homarr/apps/nextjs/.next/standalone/* /opt/homarr
mkdir -p /appdata/redis
cp /opt/homarr/packages/redis/redis.conf /opt/homarr/redis.conf
rm /etc/nginx/nginx.conf
mkdir -p /etc/nginx/templates
cp /opt/homarr/nginx.conf /etc/nginx/templates/nginx.conf
mkdir -p /opt/homarr/apps/cli
cp /opt/homarr/packages/cli/cli.cjs /opt/homarr/apps/cli/cli.cjs
echo $'#!/bin/bash\ncd /opt/homarr/apps/cli && node ./cli.cjs "$@"' >/usr/bin/homarr
chmod +x /usr/bin/homarr
mkdir /opt/homarr/build
cp ./node_modules/better-sqlite3/build/Release/better_sqlite3.node ./build/better_sqlite3.node
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP}"
msg_info "Starting Services"
systemctl start homarr
msg_ok "Started Services"
msg_ok "Updated Successfully"
read -p "It's recommended to reboot the LXC after an update, would you like to reboot the LXC now ? (y/n): " choice
if [[ "$choice" =~ ^[Yy]$ ]]; then
reboot
fi
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:7575${CL}"

View File

@ -1,35 +0,0 @@
{
"name": "Docspell",
"slug": "docspell",
"categories": [
12
],
"date_created": "2025-07-02",
"type": "ct",
"updateable": true,
"privileged": false,
"config_path": "/opt/docspell/.env",
"interface_port": 3000,
"documentation": "https://docspell.io/",
"website": "https://docspell.io/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/docspell.svg",
"description": "Docspell is an open-source self-hosted application.",
"install_methods": [
{
"type": "default",
"script": "ct/docspell.sh",
"resources": {
"cpu": 1,
"ram": 1024,
"hdd": 2,
"os": "Debian",
"version": "12"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": []
}

View File

@ -1,65 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: Nícolas Pastorello (opastorello)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/jumpserver/jumpserver
APP="JumpServer"
var_tags="bastion-host;pam"
var_cpu="2"
var_ram="8192"
var_disk="60"
var_os="debian"
var_version="12"
var_unprivileged="1"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/jumpserver ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/jumpserver/installer/releases/latest | grep '"tag_name"' | sed -E 's/.*"tag_name": "([^"]+)".*/\1/')
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Updating ${APP} to ${RELEASE}"
if [[ -d /opt/jumpserver/config ]]; then
cp -r /opt/jumpserver/config /opt/jumpserver_config_backup
fi
echo "${RELEASE}" >/opt/${APP}_version.txt
rm -rf /opt/jumpserver
cd /opt
curl -fsSL "https://github.com/jumpserver/installer/releases/download/${RELEASE}/jumpserver-installer-${RELEASE}.tar.gz" -o jumpserver-installer-${RELEASE}.tar.gz
mkdir -p /opt/jumpserver
$STD tar -xzvf jumpserver-installer-${RELEASE}.tar.gz -C /opt/jumpserver --strip-components=1
if [[ -d /opt/jumpserver_config_backup ]]; then
cp -r /opt/jumpserver_config_backup /opt/jumpserver/config
rm -rf /opt/jumpserver_config_backup
fi
cd /opt/jumpserver
yes y | head -n 3 | $STD ./jmsctl.sh upgrade
$STD ./jmsctl.sh start
rm -rf /opt/jumpserver-installer-${RELEASE}.tar.gz
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}."
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"

View File

@ -1,35 +0,0 @@
{
"name": "Maxun",
"slug": "maxun",
"categories": [
0
],
"date_created": "2025-07-02",
"type": "ct",
"updateable": true,
"privileged": false,
"config_path": "/opt/maxun/.env",
"interface_port": 3000,
"documentation": "https://maxun.io/",
"website": "https://maxun.io/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/svg/maxun.svg",
"description": "Maxun is an open-source self-hosted application.",
"install_methods": [
{
"type": "default",
"script": "ct/maxun.sh",
"resources": {
"cpu": 1,
"ram": 1024,
"hdd": 2,
"os": "Debian",
"version": "12"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": []
}

View File

@ -1,53 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://github.com/getmaxun/maxun
APP="Maxun"
var_tags="${var_tags:-automation;scraper}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-10}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/maxun ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
# NOTE: Updates temporarily disabled due to upstream TypeScript build errors in v0.0.27+
# The mcp-worker.ts file has type instantiation issues that prevent compilation
# Pinned to v0.0.26 until upstream fixes the issue
# See: https://github.com/getmaxun/maxun/releases
msg_warn "Updates are temporarily disabled due to upstream build issues"
msg_info "Current pinned version: v0.0.26"
msg_info "Check https://github.com/getmaxun/maxun/releases for fixes"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
echo -e "${INFO}${YW} MinIO Console:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9001${CL}"
echo -e "${INFO}${YW} Credentials saved in:${CL}"
echo -e "${TAB}/root/maxun.creds"

View File

@ -1,129 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2023 tteck
# Author: tteck (tteckster)
# License: MIT
# https://github.com/tteck/Proxmox/raw/main/LICENSE
function header_info {
clear
cat <<"EOF"
__ __ __
____ ___ / /_/ /_ ____ ____ / /_ _ ____ ______
/ __ \/ _ \/ __/ __ \/ __ \/ __ \/ __/ | |/_/ / / /_ /
/ / / / __/ /_/ /_/ / /_/ / /_/ / /__ _> </ /_/ / / /_
/_/ /_/\___/\__/_.___/\____/\____/\__(_)_/|_|\__, / /___/
/____/
EOF
}
header_info
echo -e "Loading..."
APP="netboot.xyz"
var_disk="${var_disk:-2}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
variables
color
catch_errors
function default_settings() {
CT_TYPE="1"
PW=""
CT_ID=$NEXTID
HN=$NSAPP
DISK_SIZE="$var_disk"
CORE_COUNT="$var_cpu"
RAM_SIZE="$var_ram"
BRG="vmbr0"
NET="dhcp"
GATE=""
DISABLEIP6="no"
MTU=""
SD=""
NS=""
MAC=""
VLAN=""
SSH="no"
VERB="no"
echo_default
}
function update_script() {
header_info
if [[ ! -d /opt/netboot.xyz ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Stopping ${APP}"
systemctl disable netbootxyz.service &>/dev/null
systemctl stop netbootxyz
sleep 1
msg_ok "Stopped ${APP}"
msg_info "Backing up Data"
cp -R /opt/netboot.xyz/config config-backup
cp -R /opt/netboot.xyz/assets assets-backup
sleep 1
msg_ok "Backed up Data"
RELEASE=$(curl -fsSLX GET "https://api.github.com/repos/netbootxyz/netboot.xyz/releases/latest" | awk '/tag_name/{print $4;exit}' FS='[""]')
msg_info "Updating netboot.xyz to ${RELEASE}"
curl --silent -o ${RELEASE}.tar.gz -L "https://github.com/netbootxyz/netboot.xyz/archive/${RELEASE}.tar.gz" &>/dev/null
tar xvzf ${RELEASE}.tar.gz &>/dev/null
VER=$(curl -fsSL https://api.github.com/repos/netbootxyz/netboot.xyz/releases/latest |
grep "tag_name" |
awk '{print substr($2, 2, length($2)-3) }')
if [ ! -d "/opt/netboot.xyz" ]; then
mv netboot.xyz-${VER} /opt/netboot.xyz
else
cp -R netboot.xyz-${VER}/* /opt/netboot.xyz
fi
service_path="/etc/systemd/system/netbootxyz.service"
echo "[Unit]
Description=netboot.xyz
After=network.target
[Service]
Restart=always
RestartSec=5
Type=simple
User=root
WorkingDirectory=/opt/netboot.xyz
ExecStart="ansible-playbook" -i inventory site.yml
TimeoutStopSec=30
[Install]
WantedBy=multi-user.target" >$service_path
msg_ok "Updated netboot.xyz to ${RELEASE}"
msg_info "Restoring Data"
cp -R config-backup/* /opt/netboot.xyz/config
cp -R assets-backup/* /opt/netboot.xyz/assets
sleep 1
msg_ok "Restored Data"
msg_info "Cleanup"
rm -rf ${RELEASE}.tar.gz
rm -rf netboot.xyz-${VER}
rm -rf config-backup
rm -rf assets-backup
sleep 1
msg_ok "Cleaned"
msg_info "Starting ${APP}"
systemctl enable --now netbootxyz.service &>/dev/null
sleep 2
msg_ok "Started ${APP}"
msg_ok "Updated Successfully"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${APP} should be reachable by going to the following URL.
${BL}http://${IP}:3000${CL} \n"

View File

@ -1,160 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 tteck
# Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://nginxproxymanager.com/
APP="Nginx Proxy Manager"
var_tags="${var_tags:-proxy}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /lib/systemd/system/npm.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if ! command -v pnpm &>/dev/null; then
msg_info "Installing pnpm"
#export NODE_OPTIONS=--openssl-legacy-provider
$STD npm install -g pnpm@8.15
msg_ok "Installed pnpm"
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/NginxProxyManager/nginx-proxy-manager/releases/latest |
grep "tag_name" |
awk '{print substr($2, 3, length($2)-4) }')
msg_info "Stopping Services"
systemctl stop openresty
systemctl stop npm
msg_ok "Stopped Services"
msg_info "Cleaning Old Files"
rm -rf /app \
/var/www/html \
/etc/nginx \
/var/log/nginx \
/var/lib/nginx \
$STD /var/cache/nginx
msg_ok "Cleaned Old Files"
msg_info "Downloading NPM v${RELEASE}"
curl -fsSL https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/v${RELEASE} -o - | tar -xz
cd nginx-proxy-manager-${RELEASE}
msg_ok "Downloaded NPM v${RELEASE}"
msg_info "Setting up Enviroment"
ln -sf /usr/bin/python3 /usr/bin/python
ln -sf /usr/bin/certbot /opt/certbot/bin/certbot
ln -sf /usr/local/openresty/nginx/sbin/nginx /usr/sbin/nginx
ln -sf /usr/local/openresty/nginx/ /etc/nginx
sed -i "s|\"version\": \"0.0.0\"|\"version\": \"$RELEASE\"|" backend/package.json
sed -i "s|\"version\": \"0.0.0\"|\"version\": \"$RELEASE\"|" frontend/package.json
sed -i 's|"fork-me": ".*"|"fork-me": "Proxmox VE Helper-Scripts"|' frontend/js/i18n/messages.json
sed -i "s|https://github.com.*source=nginx-proxy-manager|https://helper-scripts.com|g" frontend/js/app/ui/footer/main.ejs
sed -i 's+^daemon+#daemon+g' docker/rootfs/etc/nginx/nginx.conf
NGINX_CONFS=$(find "$(pwd)" -type f -name "*.conf")
for NGINX_CONF in $NGINX_CONFS; do
sed -i 's+include conf.d+include /etc/nginx/conf.d+g' "$NGINX_CONF"
done
mkdir -p /var/www/html /etc/nginx/logs
cp -r docker/rootfs/var/www/html/* /var/www/html/
cp -r docker/rootfs/etc/nginx/* /etc/nginx/
cp docker/rootfs/etc/letsencrypt.ini /etc/letsencrypt.ini
cp docker/rootfs/etc/logrotate.d/nginx-proxy-manager /etc/logrotate.d/nginx-proxy-manager
ln -sf /etc/nginx/nginx.conf /etc/nginx/conf/nginx.conf
rm -f /etc/nginx/conf.d/dev.conf
mkdir -p /tmp/nginx/body \
/run/nginx \
/data/nginx \
/data/custom_ssl \
/data/logs \
/data/access \
/data/nginx/default_host \
/data/nginx/default_www \
/data/nginx/proxy_host \
/data/nginx/redirection_host \
/data/nginx/stream \
/data/nginx/dead_host \
/data/nginx/temp \
/var/lib/nginx/cache/public \
/var/lib/nginx/cache/private \
/var/cache/nginx/proxy_temp
chmod -R 777 /var/cache/nginx
chown root /tmp/nginx
echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" {print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf);" >/etc/nginx/conf.d/include/resolvers.conf
if [ ! -f /data/nginx/dummycert.pem ] || [ ! -f /data/nginx/dummykey.pem ]; then
$STD openssl req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj "/O=Nginx Proxy Manager/OU=Dummy Certificate/CN=localhost" -keyout /data/nginx/dummykey.pem -out /data/nginx/dummycert.pem
fi
mkdir -p /app/global /app/frontend/images
cp -r backend/* /app
cp -r global/* /app/global
$STD python3 -m pip install --no-cache-dir certbot-dns-cloudflare
msg_ok "Setup Enviroment"
msg_info "Building Frontend"
cd ./frontend
$STD pnpm install
$STD pnpm upgrade
$STD pnpm run build
cp -r dist/* /app/frontend
cp -r app-images/* /app/frontend/images
msg_ok "Built Frontend"
msg_info "Initializing Backend"
$STD rm -rf /app/config/default.json
if [ ! -f /app/config/production.json ]; then
cat <<'EOF' >/app/config/production.json
{
"database": {
"engine": "knex-native",
"knex": {
"client": "sqlite3",
"connection": {
"filename": "/data/database.sqlite"
}
}
}
}
EOF
fi
cd /app
$STD pnpm install
msg_ok "Initialized Backend"
msg_info "Starting Services"
sed -i 's/user npm/user root/g; s/^pid/#pid/g' /usr/local/openresty/nginx/conf/nginx.conf
sed -i 's/su npm npm/su root root/g' /etc/logrotate.d/nginx-proxy-manager
sed -i 's/include-system-site-packages = false/include-system-site-packages = true/g' /opt/certbot/pyvenv.cfg
systemctl enable -q --now openresty
systemctl enable -q --now npm
msg_ok "Started Services"
msg_info "Cleaning up"
rm -rf ~/nginx-proxy-manager-*
msg_ok "Cleaned"
msg_ok "Updated Successfully"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:81${CL}"

View File

@ -1,64 +0,0 @@
{
"name": "OpenCloud",
"slug": "opencloud",
"categories": [
2
],
"date_created": "2025-06-11",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 443,
"documentation": "https://docs.opencloud.eu",
"config_path": "/etc/opencloud/opencloud.env, /etc/opencloud/opencloud.yaml, /etc/opencloud/csp.yaml",
"website": "https://opencloud.eu",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/opencloud.webp",
"description": "OpenCloud is the file sharing and collaboration solution of the Heinlein Group. Through intelligent file management and a strong open source community, files become valuable resources, effectively structured and usable in the long term. With flexible data rooms and intelligent access rights, teams can access and work together on data anytime, anywhere without barriers, but with a lot of productivity.",
"install_methods": [
{
"type": "default",
"script": "ct/opencloud.sh",
"resources": {
"cpu": 2,
"ram": 2048,
"hdd": 6,
"os": "Debian",
"version": "12"
}
}
],
"default_credentials": {
"username": "admin",
"password": "randomly generated during installation process"
},
"notes": [
{
"text": "Valid TLS certificates and fully-qualified domain names behind a reverse proxy (Caddy) for 3 services - OpenCloud, Collabora, and WOPI are **REQUIRED**",
"type": "warning"
},
{
"text": "Forgot your admin password? Check `admin_password` in the 'idm' section in `/etc/opencloud/opencloud.yaml`",
"type": "info"
},
{
"text": "**Optional External Apps**: extract zip archives from App Store to `/etc/opencloud/assets/apps`",
"type": "info"
},
{
"text": "**Optional CalDAV and CardDAV**: requires separate Radicale install. Edit and rename `/opt/opencloud/proxy.yaml.bak` and change your Radicale config to use `http_x_remote_user` as the auth method",
"type": "info"
},
{
"text": "**Optional OpenID**: Authelia and PocketID supported. Uncomment relevant lines in `/opt/opencloud/opencloud.env` and consult OpenCloud GitHub discussions for configuration tips",
"type": "info"
},
{
"text": "**Optional Full-text Search with Apache Tika**: requires your own Tika LXC. See `https://community-scripts.github.io/ProxmoxVE/scripts?id=apache-tika`",
"type": "info"
},
{
"text": "**Relevant services**: `opencloud.service`, `opencloud-wopi.service`, `coolwsd.service`",
"type": "info"
}
]
}

View File

@ -1,64 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source:
APP="Roundcubemail"
var_tags="${var_tags:-mail}"
var_disk="${var_disk:-5}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-1024}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
if [[ ! -d /opt/roundcubemail ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if (($(df /boot | awk 'NR==2{gsub("%","",$5); print $5}') > 80)); then
read -r -p "Warning: Storage is dangerously low, continue anyway? <y/N> " prompt
[[ ${prompt,,} =~ ^(y|yes)$ ]] || exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/roundcube/roundcubemail/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Updating ${APP} to ${RELEASE}"
cd /opt
curl -fsSL "https://github.com/roundcube/roundcubemail/releases/download/${RELEASE}/roundcubemail-${RELEASE}-complete.tar.gz"
tar -xf roundcubemail-${RELEASE}-complete.tar.gz
mv roundcubemail-${RELEASE} /opt/roundcubemail
cd /opt/roundcubemail
COMPOSER_ALLOW_SUPERUSER=1 composer install --no-dev
chown -R www-data:www-data temp/ logs/
msg_ok "Updated ${APP}"
msg_info "Reload Apache2"
systemctl reload apache2
msg_ok "Apache2 Reloaded"
msg_info "Cleaning Up"
rm -rf /opt/roundcubemail-${RELEASE}-complete.tar.gz
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${APP} should be reachable by going to the following URL.
${BL}http://${IP}/installer ${CL} \n"

View File

@ -1,9 +1,9 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/community-scripts/ProxmoxVE
# Source:
APP="Docspell"
var_tags="${var_tags:-document}"

View File

@ -1,41 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://fedoraproject.org/
APP="Fedora"
var_tags="${var_tags:-os}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"
var_disk="${var_disk:-4}"
var_os="${var_os:-fedora}"
var_version="${var_version:-42}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD dnf -y upgrade
msg_ok "Updated $APP LXC"
exit
}
start
build_container
description
msg_ok "Completed Successfully!"
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"

View File

@ -1,16 +1,15 @@
#!/usr/bin/env bash
source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/refs/heads/freepbx/misc/build.func)
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: Arian Nasr (arian-nasr)
# Updated by: Javier Pastor (vsc55)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://www.freepbx.org/
APP="FreePBX"
var_tags="pbx;voip;telephony"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-10}"
var_tags=""
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-20}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
@ -25,39 +24,16 @@ function update_script() {
check_container_storage
check_container_resources
# Check if installation is present | -f for file, -d for folder
if [[ ! -f /lib/systemd/system/freepbx.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD apt-get update
$STD apt-get -y upgrade
msg_ok "Updated $APP LXC"
msg_info "Updating $APP Modules"
$STD fwconsole ma updateall
$STD fwconsole reload
msg_ok "Updated $APP Modules"
msg_error "Currently we don't provide an update function for this ${APP}."
exit
}
start
if whiptail --title "Commercial Modules" --yesno "Remove Commercial modules?" --defaultno 10 50; then
export ONLY_OPENSOURCE="yes"
if whiptail --title "Firewall Module" --yesno "Do you want to KEEP the Firewall module (and sysadmin)?" 10 50; then
export REMOVE_FIREWALL="no"
else
export REMOVE_FIREWALL="yes"
fi
else
export ONLY_OPENSOURCE="no"
export REMOVE_FIREWALL="no"
fi
build_container
description

View File

@ -1,34 +1,38 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Authors: MickLesk (CanbiZ) | Co-Author: remz1337
# Authors: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://frigate.video/
# App Default Values
APP="Frigate"
var_tags="${var_tags:-nvr}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-20}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_version="${var_version:-11}"
var_unprivileged="${var_unprivileged:-0}"
# App Output
header_info "$APP"
# Core
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /etc/systemd/system/frigate.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_error "To update Frigate, create a new container and transfer your configuration."
header_info
check_container_storage
check_container_resources
if [[ ! -f /etc/systemd/system/frigate.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_error "To update Frigate, create a new container and transfer your configuration."
exit
}
start

View File

@ -1,101 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/raw/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: aliaksei135
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/arpanghosh8453/garmin-grafana
APP="garmin-grafana"
var_tags="${var_tags:-sports;visualization}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_disk="${var_disk:-8}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
# this only updates garmin-grafana, not influxdb or grafana, which are upgraded with apt
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/garmin-grafana/ ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/arpanghosh8453/garmin-grafana/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ ! -d /opt/garmin-grafana/ ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Stopping $APP"
systemctl stop garmin-grafana
systemctl stop grafana-server
systemctl stop influxdb
msg_ok "Stopped $APP"
if [[ ! -f /opt/garmin-grafana/.env ]]; then
msg_error "No .env file found in /opt/garmin-grafana/.env"
exit
fi
source /opt/garmin-grafana/.env
if [[ -z "${INFLUXDB_USER}" || -z "${INFLUXDB_PASSWORD}" || -z "${INFLUXDB_NAME}" ]]; then
msg_error "INFLUXDB_USER, INFLUXDB_PASSWORD, or INFLUXDB_NAME not set in .env file"
exit
fi
msg_info "Creating Backup"
tar -czf "/opt/${APP}_backup_$(date +%F).tar.gz" /opt/garmin-grafana/.garminconnect /opt/garmin-grafana/.env
mv /opt/garmin-grafana/ /opt/garmin-grafana-backup/
msg_ok "Backup Created"
msg_info "Updating $APP to v${RELEASE}"
curl -fsSL -o "${RELEASE}.zip" "https://github.com/arpanghosh8453/garmin-grafana/archive/refs/tags/${RELEASE}.zip"
unzip -q "${RELEASE}.zip"
mv "garmin-grafana-${RELEASE}/" "/opt/garmin-grafana"
rm -f "${RELEASE}.zip"
$STD uv sync --locked --project /opt/garmin-grafana/
# shellcheck disable=SC2016
sed -i 's/\${DS_GARMIN_STATS}/garmin_influxdb/g' /opt/garmin-grafana/Grafana_Dashboard/Garmin-Grafana-Dashboard.json
sed -i 's/influxdb:8086/localhost:8086/' /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
sed -i "s/influxdb_user/${INFLUXDB_USER}/" /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
sed -i "s/influxdb_secret_password/${INFLUXDB_PASSWORD}/" /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
sed -i "s/GarminStats/${INFLUXDB_NAME}/" /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
# Copy across grafana data
cp -r /opt/garmin-grafana/Grafana_Datasource/* /etc/grafana/provisioning/datasources
cp -r /opt/garmin-grafana/Grafana_Dashboard/* /etc/grafana/provisioning/dashboards
# Copy back the env and token files
cp /opt/garmin-grafana-backup/.env /opt/garmin-grafana/.env
cp -r /opt/garmin-grafana-backup/.garminconnect /opt/garmin-grafana/.garminconnect
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting $APP"
systemctl start garmin-grafana
systemctl start grafana-server
systemctl start influxdb
msg_ok "Started $APP"
msg_info "Cleaning Up"
rm -rf /opt/garmin-grafana-backup
msg_ok "Cleanup Completed"
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"

75
ct/gatus.sh Normal file
View File

@ -0,0 +1,75 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: Slaviša Arežina (tremor021)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/TwiN/gatus
APP="gatus"
var_tags="${var_tags:-monitoring}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-4}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/gatus ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Updating $APP"
msg_info "Stopping $APP"
systemctl stop gatus
msg_ok "Stopped $APP"
msg_info "Updating $APP to v${RELEASE}"
mv /opt/gatus/config/config.yaml /opt
rm -rf /opt/gatus/*
temp_file=$(mktemp)
curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
cd /opt/gatus
$STD go mod tidy
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
setcap CAP_NET_RAW+ep gatus
mv /opt/config.yaml config
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting $APP"
systemctl start gatus
msg_ok "Started $APP"
msg_info "Cleaning Up"
rm -f "$temp_file"
msg_ok "Cleanup Completed"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"

View File

@ -1,42 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://www.gentoo.org/
APP="Gentoo"
var_tags="${var_tags:-os}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-8}"
var_os="${var_os:-gentoo}"
var_version="${var_version:-current}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD emerge --sync
$STD emerge --quiet --update --deep @world
msg_ok "Updated $APP LXC"
exit
}
start
build_container
description
msg_ok "Completed Successfully!"
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"

42
ct/ghostfolio.sh Normal file
View File

@ -0,0 +1,42 @@
#!/usr/bin/env bash
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
# Copyright (c) 2021-2025 community-scripts ORG
# Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source:
APP="Ghostfolio"
var_tags="${var_tags:-portfolio}"
var_disk="${var_disk:-6}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-2048}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
if [[ ! -d /opt/ghostfolio ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating OS"
apt-get update &>/dev/null
apt-get -y upgrade &>/dev/null
msg_ok "Updated Successfully"
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3333${CL}"

View File

@ -0,0 +1,6 @@
___ __ _ __ _ __ __
/ | / /___ (_)___ ___ / /_ (_) /_____ ___ ____ _____ _____ ___ / /_
/ /| | / / __ \/ / __ \/ _ \______/ __ \/ / __/ __ `__ \/ __ `/ __ `/ __ \/ _ \/ __/
/ ___ |/ / /_/ / / / / / __/_____/ /_/ / / /_/ / / / / / /_/ / /_/ / / / / __/ /_
/_/ |_/_/ .___/_/_/ /_/\___/ /_.___/_/\__/_/ /_/ /_/\__,_/\__, /_/ /_/\___/\__/
/_/ /____/

View File

@ -1,6 +0,0 @@
___ __ _ __ ____
/ | / /___ (_)___ ___ ____ / /_/ __/_ __
/ /| | / / __ \/ / __ \/ _ \______/ __ \/ __/ /_/ / / /
/ ___ |/ / /_/ / / / / / __/_____/ / / / /_/ __/ /_/ /
/_/ |_/_/ .___/_/_/ /_/\___/ /_/ /_/\__/_/ \__, /
/_/ /____/

View File

@ -0,0 +1,6 @@
___ __ _ _____ __ __ _
/ | / /___ (_)___ ___ / ___/__ ______ _____/ /_/ /_ (_)___ ____ _
/ /| | / / __ \/ / __ \/ _ \______\__ \/ / / / __ \/ ___/ __/ __ \/ / __ \/ __ `/
/ ___ |/ / /_/ / / / / / __/_____/__/ / /_/ / / / / /__/ /_/ / / / / / / / /_/ /
/_/ |_/_/ .___/_/_/ /_/\___/ /____/\__, /_/ /_/\___/\__/_/ /_/_/_/ /_/\__, /
/_/ /____/ /____/

6
ct/headers/ampache Normal file
View File

@ -0,0 +1,6 @@
___ __
/ | ____ ___ ____ ____ ______/ /_ ___
/ /| | / __ `__ \/ __ \/ __ `/ ___/ __ \/ _ \
/ ___ |/ / / / / / /_/ / /_/ / /__/ / / / __/
/_/ |_/_/ /_/ /_/ .___/\__,_/\___/_/ /_/\___/
/_/

6
ct/headers/asterisk Normal file
View File

@ -0,0 +1,6 @@
___ __ _ __
/ | _____/ /____ _____(_)____/ /__
/ /| | / ___/ __/ _ \/ ___/ / ___/ //_/
/ ___ |(__ ) /_/ __/ / / (__ ) ,<
/_/ |_/____/\__/\___/_/ /_/____/_/|_|

6
ct/headers/authentik Normal file
View File

@ -0,0 +1,6 @@
___ __ __ __ _ __
/ | __ __/ /_/ /_ ___ ____ / /_(_) /__
/ /| |/ / / / __/ __ \/ _ \/ __ \/ __/ / //_/
/ ___ / /_/ / /_/ / / / __/ / / / /_/ / ,<
/_/ |_\__,_/\__/_/ /_/\___/_/ /_/\__/_/_/|_|

6
ct/headers/babybuddy Normal file
View File

@ -0,0 +1,6 @@
____ __ ____ __ __
/ __ )____ _/ /_ __ __/ __ )__ ______/ /___/ /_ __
/ __ / __ `/ __ \/ / / / __ / / / / __ / __ / / / /
/ /_/ / /_/ / /_/ / /_/ / /_/ / /_/ / /_/ / /_/ / /_/ /
/_____/\__,_/_.___/\__, /_____/\__,_/\__,_/\__,_/\__, /
/____/ /____/

6
ct/headers/bar-assistant Normal file
View File

@ -0,0 +1,6 @@
____ ___ _ __ __
/ __ )____ ______ / | __________(_)____/ /_____ _____ / /_
/ __ / __ `/ ___/_____/ /| | / ___/ ___/ / ___/ __/ __ `/ __ \/ __/
/ /_/ / /_/ / / /_____/ ___ |(__ |__ ) (__ ) /_/ /_/ / / / / /_
/_____/\__,_/_/ /_/ |_/____/____/_/____/\__/\__,_/_/ /_/\__/

6
ct/headers/bluecherry Normal file
View File

@ -0,0 +1,6 @@
__ __ __
/ /_ / /_ _____ _____/ /_ ___ ____________ __
/ __ \/ / / / / _ \/ ___/ __ \/ _ \/ ___/ ___/ / / /
/ /_/ / / /_/ / __/ /__/ / / / __/ / / / / /_/ /
/_.___/_/\__,_/\___/\___/_/ /_/\___/_/ /_/ \__, /
/____/

6
ct/headers/bookstack Normal file
View File

@ -0,0 +1,6 @@
____ __ __ __
/ __ )____ ____ / /_______/ /_____ ______/ /__
/ __ / __ \/ __ \/ //_/ ___/ __/ __ `/ ___/ //_/
/ /_/ / /_/ / /_/ / ,< (__ ) /_/ /_/ / /__/ ,<
/_____/\____/\____/_/|_/____/\__/\__,_/\___/_/|_|

View File

@ -0,0 +1,6 @@
________ ________ ____ ____ _ _______
/ ____/ /___ __ ______/ / __/ /___ _________ / __ \/ __ \/ | / / ___/
/ / / / __ \/ / / / __ / /_/ / __ `/ ___/ _ \______/ / / / / / / |/ /\__ \
/ /___/ / /_/ / /_/ / /_/ / __/ / /_/ / / / __/_____/ /_/ / /_/ / /| /___/ /
\____/_/\____/\__,_/\__,_/_/ /_/\__,_/_/ \___/ /_____/_____/_/ |_//____/

View File

@ -1,6 +0,0 @@
____ __
/ __ \____ _____/ /_____ _____
/ / / / __ \/ ___/ //_/ _ \/ ___/
/ /_/ / /_/ / /__/ ,< / __/ /
/_____/\____/\___/_/|_|\___/_/

6
ct/headers/docspell Normal file
View File

@ -0,0 +1,6 @@
____ ____
/ __ \____ ______________ ___ / / /
/ / / / __ \/ ___/ ___/ __ \/ _ \/ / /
/ /_/ / /_/ / /__(__ ) /_/ / __/ / /
/_____/\____/\___/____/ .___/\___/_/_/
/_/

View File

@ -1,6 +0,0 @@
______ __
/ ____/___ / /____
/ __/ / __ \/ __/ _ \
/ /___/ / / / /_/ __/
/_____/_/ /_/\__/\___/

View File

@ -1,6 +0,0 @@
_ ____
____ _____ __________ ___ (_)___ ____ __________ _/ __/___ _____ ____ _
/ __ `/ __ `/ ___/ __ `__ \/ / __ \______/ __ `/ ___/ __ `/ /_/ __ `/ __ \/ __ `/
/ /_/ / /_/ / / / / / / / / / / / /_____/ /_/ / / / /_/ / __/ /_/ / / / / /_/ /
\__, /\__,_/_/ /_/ /_/ /_/_/_/ /_/ \__, /_/ \__,_/_/ \__,_/_/ /_/\__,_/
/____/ /____/

6
ct/headers/gatus Normal file
View File

@ -0,0 +1,6 @@
__
____ _____ _/ /___ _______
/ __ `/ __ `/ __/ / / / ___/
/ /_/ / /_/ / /_/ /_/ (__ )
\__, /\__,_/\__/\__,_/____/
/____/

6
ct/headers/ghostfolio Normal file
View File

@ -0,0 +1,6 @@
________ __ ____ ___
/ ____/ /_ ____ _____/ /_/ __/___ / (_)___
/ / __/ __ \/ __ \/ ___/ __/ /_/ __ \/ / / __ \
/ /_/ / / / / /_/ (__ ) /_/ __/ /_/ / / / /_/ /
\____/_/ /_/\____/____/\__/_/ \____/_/_/\____/

Some files were not shown because too many files have changed in this diff Show More