Compare commits
1 Commits
main
...
change_to_
Author | SHA1 | Date | |
---|---|---|---|
530c6a463b |
4
.github/CONTRIBUTOR_AND_GUIDES/CODE-AUDIT.md
vendored
4
.github/CONTRIBUTOR_AND_GUIDES/CODE-AUDIT.md
vendored
@ -5,10 +5,10 @@
|
||||
|
||||
1) [adguard.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/adguard.sh): This script collects system parameters. (Also holds the function to update the application.)
|
||||
2) [build.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/build.func): Adds user settings and integrates collected information.
|
||||
3) [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/create_lxc.sh): Constructs the LXC container.
|
||||
3) [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/create_lxc.sh): Constructs the LXC container.
|
||||
4) [adguard-install.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/install/adguard-install.sh): Executes functions from [install.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/install.func), and installs the application.
|
||||
5) [adguard.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/adguard.sh) (again): To display the completion message.
|
||||
|
||||
The installation process uses reusable scripts: [build.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/build.func), [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/create_lxc.sh), and [install.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/install.func), which are not specific to any particular application.
|
||||
The installation process uses reusable scripts: [build.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/build.func), [create_lxc.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/ct/create_lxc.sh), and [install.func](https://github.com/community-scripts/ProxmoxVE/blob/main/misc/install.func), which are not specific to any particular application.
|
||||
|
||||
To gain a better understanding, focus on reviewing [adguard-install.sh](https://github.com/community-scripts/ProxmoxVE/blob/main/install/adguard-install.sh). This script contains the commands and configurations for installing and configuring AdGuard Home within the LXC container.
|
||||
|
19
.github/ISSUE_TEMPLATE/new-script.yaml
vendored
19
.github/ISSUE_TEMPLATE/new-script.yaml
vendored
@ -1,24 +1,25 @@
|
||||
---
|
||||
name: 🛠️ New Script
|
||||
description: New Script proposal.
|
||||
labels: [task]
|
||||
name: "🛠️ New Script"
|
||||
description: "New Script proposal."
|
||||
labels: ["task"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# 🛠️ **New Script**
|
||||
Create an Issue when you want to merge a new Script. The name of the Issue must be the same as your APP.sh file. (Example: SnipeIT, snipeit.sh; Alpine-Docker, alpine-docker.sh)
|
||||
Create a Issue when you want to merge a new Script. The name of the Issue must be the same as your APP.sh file. (Example: SnipeIT, snipeit.sh; Alpine-Docker, alpine-docker.sh)
|
||||
|
||||
- type: input
|
||||
id: task_summary
|
||||
attributes:
|
||||
label: Name of the Script
|
||||
placeholder: e.g., SnipeIT
|
||||
label: "Name of the Script"
|
||||
placeholder: "e.g., SnipeIT"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: task_details
|
||||
attributes:
|
||||
label: 📋 Script Details
|
||||
placeholder: Explain what is needed or special about this script
|
||||
label: "📋 Scritpt Details"
|
||||
placeholder: "Explain what is needed or special about this script"
|
||||
validations:
|
||||
required: true
|
||||
|
211
.github/autolabeler-config.json
vendored
211
.github/autolabeler-config.json
vendored
@ -1,125 +1,90 @@
|
||||
{
|
||||
"new script": [
|
||||
{
|
||||
"fileStatus": "added",
|
||||
"includeGlobs": [
|
||||
"ct/**",
|
||||
"install/**",
|
||||
"misc/**",
|
||||
"turnkey/**",
|
||||
"vm/**"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"update script": [
|
||||
{
|
||||
"fileStatus": "modified",
|
||||
"includeGlobs": [
|
||||
"ct/**",
|
||||
"install/**",
|
||||
"misc/**",
|
||||
"turnkey/**",
|
||||
"vm/**"
|
||||
],
|
||||
"excludeGlobs": [
|
||||
"misc/build.func",
|
||||
"misc/install.func",
|
||||
"misc/api.func"
|
||||
]
|
||||
}
|
||||
],
|
||||
"delete script": [
|
||||
{
|
||||
"fileStatus": "removed",
|
||||
"includeGlobs": [
|
||||
"ct/**",
|
||||
"install/**",
|
||||
"misc/**",
|
||||
"turnkey/**",
|
||||
"vm/**"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"maintenance": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"*.md",
|
||||
".github/**",
|
||||
"misc/*.func",
|
||||
"misc/create_lxc.sh",
|
||||
"api/**"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"core": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"misc/*.func",
|
||||
"misc/create_lxc.sh"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"website": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"frontend/**"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"api": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"api/**",
|
||||
"misc/api.func"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"github": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
".github/**"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"json": [
|
||||
{
|
||||
"fileStatus": "modified",
|
||||
"includeGlobs": [
|
||||
"frontend/public/json/**"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"high risk": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"misc/build.func",
|
||||
"misc/install.func",
|
||||
"misc/create_lxc.sh"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"documentation": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"*.md"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
]
|
||||
"new script": [
|
||||
{
|
||||
"fileStatus": "added",
|
||||
"includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"update script": [
|
||||
{
|
||||
"fileStatus": "modified",
|
||||
"includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
|
||||
"excludeGlobs": ["misc/build.func", "misc/install.func", "misc/api.func"]
|
||||
}
|
||||
],
|
||||
"delete script": [
|
||||
{
|
||||
"fileStatus": "removed",
|
||||
"includeGlobs": ["ct/**", "install/**", "misc/**", "turnkey/**", "vm/**"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"maintenance": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"*.md",
|
||||
".github/**",
|
||||
"misc/*.func",
|
||||
"ct/create_lxc.sh",
|
||||
"api/**"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"core": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": ["misc/*.func", "ct/create_lxc.sh"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"website": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": ["frontend/**"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"api": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": ["api/**", "misc/api.func"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"github": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [".github/**"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"json": [
|
||||
{
|
||||
"fileStatus": "modified",
|
||||
"includeGlobs": ["frontend/publuc/json/**"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
|
||||
"high risk": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": [
|
||||
"misc/build.func",
|
||||
"misc/install.func",
|
||||
"ct/create_lxc.sh"
|
||||
],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
],
|
||||
"documentation": [
|
||||
{
|
||||
"fileStatus": null,
|
||||
"includeGlobs": ["*.md"],
|
||||
"excludeGlobs": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
25
.github/workflows/auto-update-app-headers.yml
vendored
25
.github/workflows/auto-update-app-headers.yml
vendored
@ -20,22 +20,10 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
|
||||
- name: Generate a token for PR approval and merge
|
||||
id: generate-token-merge
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
# Step 1: Checkout repository
|
||||
- name: Checkout repository
|
||||
@ -103,17 +91,14 @@ jobs:
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: Approve pull request and merge
|
||||
- name: Re-approve pull request after update
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
git config --global user.name "github-actions-automege[bot]"
|
||||
git config --global user.email "github-actions-automege[bot]@users.noreply.github.com"
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[0].number')
|
||||
PR_NUMBER=$(gh pr list --head "pr-update-app-files" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review "$PR_NUMBER" --approve
|
||||
gh pr merge "$PR_NUMBER" --squash --admin
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
# Step 8: Output success message when no changes
|
||||
|
60
.github/workflows/backup/check_and_update_json_date.yml
vendored
Normal file
60
.github/workflows/backup/check_and_update_json_date.yml
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
name: Update date_created in JSON files
|
||||
|
||||
on:
|
||||
# Dieser Trigger wird für das Öffnen von PRs sowie für das Aktualisieren von offenen PRs verwendet
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
schedule:
|
||||
# Dieser Trigger wird 4x am Tag ausgelöst, um sicherzustellen, dass das Datum aktualisiert wird
|
||||
- cron: "0 0,6,12,18 * * *" # Führt alle 6 Stunden aus
|
||||
workflow_dispatch: # Manuelle Ausführung des Workflows möglich
|
||||
|
||||
jobs:
|
||||
update-date:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install yq
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y yq
|
||||
|
||||
- name: Set the current date
|
||||
id: set_date
|
||||
run: echo "TODAY=$(date -u +%Y-%m-%d)" >> $GITHUB_ENV
|
||||
|
||||
- name: Check for changes in PR
|
||||
run: |
|
||||
# Hole den PR-Branch
|
||||
PR_BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
|
||||
git fetch origin $PR_BRANCH
|
||||
|
||||
# Liste alle JSON-Dateien im PR auf, die geändert wurden
|
||||
CHANGED_JSON_FILES=$(git diff --name-only origin/main...$PR_BRANCH | grep '.json')
|
||||
|
||||
if [ -z "$CHANGED_JSON_FILES" ]; then
|
||||
echo "No JSON files changed in this PR."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Gehe alle geänderten JSON-Dateien durch und aktualisiere das Datum
|
||||
for file in $CHANGED_JSON_FILES; do
|
||||
echo "Updating date_created in $file"
|
||||
# Setze das aktuelle Datum
|
||||
yq eval ".date_created = \"${{ env.TODAY }}\"" -i "$file"
|
||||
git add "$file"
|
||||
done
|
||||
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
# Prüfe, ob es Änderungen gibt und committe sie
|
||||
git config user.name "json-updater-bot"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
git commit -m "Update date_created to ${{ env.TODAY }}" || echo "No changes to commit"
|
||||
|
||||
# Push zurück in den PR-Branch
|
||||
git push origin $PR_BRANCH
|
60
.github/workflows/backup/shellcheck.yml
vendored
Normal file
60
.github/workflows/backup/shellcheck.yml
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
name: Shellcheck
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "5 1 * * *"
|
||||
|
||||
jobs:
|
||||
shellcheck:
|
||||
name: Shellcheck
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: |
|
||||
**.sh
|
||||
|
||||
- name: Download ShellCheck
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_VERSION: "v0.10.0"
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ "${{ runner.os }}" == "macOS" ]]; then
|
||||
osvariant="darwin"
|
||||
else
|
||||
osvariant="linux"
|
||||
fi
|
||||
|
||||
baseurl="https://github.com/koalaman/shellcheck/releases/download"
|
||||
curl -Lso "${{ github.workspace }}/sc.tar.xz" \
|
||||
"${baseurl}/${INPUT_VERSION}/shellcheck-${INPUT_VERSION}.${osvariant}.x86_64.tar.xz"
|
||||
|
||||
tar -xf "${{ github.workspace }}/sc.tar.xz" -C "${{ github.workspace }}"
|
||||
mv "${{ github.workspace }}/shellcheck-${INPUT_VERSION}/shellcheck" \
|
||||
"${{ github.workspace }}/shellcheck"
|
||||
|
||||
- name: Verify ShellCheck binary
|
||||
run: |
|
||||
ls -l "${{ github.workspace }}/shellcheck"
|
||||
|
||||
- name: Display ShellCheck version
|
||||
run: |
|
||||
"${{ github.workspace }}/shellcheck" --version
|
||||
|
||||
- name: Run ShellCheck
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
env:
|
||||
ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||
run: |
|
||||
echo "${ALL_CHANGED_FILES}" | xargs "${{ github.workspace }}/shellcheck"
|
88
.github/workflows/backup/update_json_date.yml.bak
vendored
Normal file
88
.github/workflows/backup/update_json_date.yml.bak
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
name: Auto Update JSON-Date
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-json-dates:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Full history for proper detection
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Actions"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Find JSON files with incorrect date_created
|
||||
id: find_wrong_json
|
||||
run: |
|
||||
TODAY=$(date -u +"%Y-%m-%d")
|
||||
> incorrect_json_files.txt
|
||||
|
||||
for FILE in json/*.json; do
|
||||
if [[ -f "$FILE" ]]; then
|
||||
DATE_IN_JSON=$(jq -r '.date_created' "$FILE" 2>/dev/null || echo "")
|
||||
|
||||
if [[ "$DATE_IN_JSON" != "$TODAY" ]]; then
|
||||
echo "$FILE" >> incorrect_json_files.txt
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -s incorrect_json_files.txt ]]; then
|
||||
echo "CHANGED=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Run update script
|
||||
if: env.CHANGED == 'true'
|
||||
run: |
|
||||
chmod +x .github/workflows/scripts/update-json.sh
|
||||
while read -r FILE; do
|
||||
.github/workflows/scripts/update-json.sh "$FILE"
|
||||
done < incorrect_json_files.txt
|
||||
|
||||
- name: Commit and create PR if changes exist
|
||||
if: env.CHANGED == 'true'
|
||||
run: |
|
||||
git add json/*.json
|
||||
git commit -m "Auto-update date_created in incorrect JSON files"
|
||||
git checkout -b pr-fix-json-dates
|
||||
git push origin pr-fix-json-dates --force
|
||||
gh pr create --title "[core] Fix incorrect JSON date_created fields" \
|
||||
--body "This PR is auto-generated to fix incorrect `date_created` fields in JSON files." \
|
||||
--head pr-fix-json-dates \
|
||||
--base main \
|
||||
--label "automated pr"
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.CHANGED == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "pr-fix-json-dates" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
133
.github/workflows/backup/validate-formatting.yaml.bak
vendored
Normal file
133
.github/workflows/backup/validate-formatting.yaml.bak
vendored
Normal file
@ -0,0 +1,133 @@
|
||||
name: Validate script formatting
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "**/*.sh"
|
||||
- "**/*.func"
|
||||
|
||||
jobs:
|
||||
shfmt:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if ${{ github.event_name == 'pull_request_target' }}; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Set up Go
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
uses: actions/setup-go@v5
|
||||
|
||||
- name: Install shfmt
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
run: |
|
||||
go install mvdan.cc/sh/v3/cmd/shfmt@latest
|
||||
echo "$GOPATH/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Run shfmt
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
id: shfmt
|
||||
run: |
|
||||
set +e
|
||||
|
||||
|
||||
shfmt_output=$(shfmt -d ${{ steps.changed-files.outputs.files }})
|
||||
if [[ $? -eq 0 ]]; then
|
||||
exit 0
|
||||
else
|
||||
echo "diff=\"$(echo -n "$shfmt_output" | base64 -w 0)\"" >> $GITHUB_OUTPUT
|
||||
printf "%s" "$shfmt_output"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post comment with results
|
||||
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = "${{ job.status }}" === "success" ? "success" : "failure";
|
||||
const diff = Buffer.from(
|
||||
${{ steps.shfmt.outputs.diff }},
|
||||
"base64",
|
||||
).toString();
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: null;
|
||||
const commentIdentifier = "validate-formatting";
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script formatting\n\n`;
|
||||
|
||||
if (result === "failure") {
|
||||
newCommentBody +=
|
||||
`:x: We found issues in the formatting of the following changed files:\n\n\`\`\`diff\n${diff}\n\`\`\`\n`;
|
||||
} else {
|
||||
newCommentBody += `:rocket: All changed shell scripts are formatted correctly!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(
|
||||
(comment) => comment.user.login === "github-actions[bot]",
|
||||
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
if (existingComment.body.includes(commentIdentifier)) {
|
||||
const re = new RegExp(
|
||||
String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`,
|
||||
"",
|
||||
);
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
} else {
|
||||
newCommentBody = existingComment.body + "\n\n---\n\n" + newCommentBody;
|
||||
}
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody,
|
||||
});
|
||||
}
|
||||
}
|
234
.github/workflows/backup/validate-scripts.yml.bak
vendored
Normal file
234
.github/workflows/backup/validate-scripts.yml.bak
vendored
Normal file
@ -0,0 +1,234 @@
|
||||
name: Validate scripts
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "ct/*.sh"
|
||||
- "install/*.sh"
|
||||
|
||||
jobs:
|
||||
check-scripts:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Debug event payload
|
||||
run: |
|
||||
echo "Event name: ${{ github.event_name }}"
|
||||
echo "Payload: $(cat $GITHUB_EVENT_PATH)"
|
||||
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check build.func line
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: build-func
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ "$FILE" == ct/* ]] && [[ $(sed -n '2p' "$FILE") != "source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Build.func line missing or incorrect in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check executable permissions
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-executable
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ ! -x "$FILE" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Files not executable:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check copyright
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-copyright
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '3p' "$FILE" | grep -qE "^# Copyright \(c\) [0-9]{4}(-[0-9]{4})? (tteck \| community-scripts ORG|community-scripts ORG|tteck)$"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Copyright header missing or not on line 3 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check author
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-author
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '4p' "$FILE" | grep -qE "^# Author: .+"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Author header missing or invalid on line 4 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check license
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-license
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ "$(sed -n '5p' "$FILE")" != "# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "License header missing or not on line 5 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check source
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-source
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '6p' "$FILE" | grep -qE "^# Source: .+"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Source header missing or not on line 6 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post results and comment
|
||||
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = '${{ job.status }}' === 'success' ? 'success' : 'failure';
|
||||
const nonCompliantFiles = {
|
||||
'Invalid build.func source': "${{ steps.build-func.outputs.files || '' }}",
|
||||
'Not executable': "${{ steps.check-executable.outputs.files || '' }}",
|
||||
'Copyright header line missing or invalid': "${{ steps.check-copyright.outputs.files || '' }}",
|
||||
'Author header line missing or invalid': "${{ steps.check-author.outputs.files || '' }}",
|
||||
'License header line missing or invalid': "${{ steps.check-license.outputs.files || '' }}",
|
||||
'Source header line missing or invalid': "${{ steps.check-source.outputs.files || '' }}"
|
||||
};
|
||||
|
||||
const issueNumber = context.payload.pull_request ? context.payload.pull_request.number : null;
|
||||
const commentIdentifier = 'validate-scripts';
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script validation\n\n`;
|
||||
|
||||
if (result === 'failure') {
|
||||
newCommentBody += ':x: We found issues in the following changed files:\n\n';
|
||||
for (const [check, files] of Object.entries(nonCompliantFiles)) {
|
||||
if (files) {
|
||||
newCommentBody += `**${check}:**\n`;
|
||||
files.trim().split(' ').forEach(file => {
|
||||
newCommentBody += `- ${file}: ${check}\n`;
|
||||
});
|
||||
newCommentBody += `\n`;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
newCommentBody += `:rocket: All changed shell scripts passed validation!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber
|
||||
});
|
||||
|
||||
const existingComment = comments.find(comment =>
|
||||
comment.body.includes(`<!-- ${commentIdentifier}-start -->`) &&
|
||||
comment.user.login === 'github-actions[bot]'
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\\s\\S]*?<!-- ${commentIdentifier}-end -->`, "m");
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody
|
||||
});
|
||||
}
|
||||
}
|
272
.github/workflows/changelog-pr.yaml
vendored
Normal file
272
.github/workflows/changelog-pr.yaml
vendored
Normal file
@ -0,0 +1,272 @@
|
||||
name: Create Changelog Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-changelog-pull-request:
|
||||
if: github.repository == 'community-scripts/ProxmoxVED'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CONFIG_PATH: .github/changelog-pr-config.json
|
||||
BRANCH_NAME: github-action-update-changelog
|
||||
AUTOMATED_PR_LABEL: "automated pr"
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get latest dates in changelog
|
||||
run: |
|
||||
DATES=$(grep -E '^## [0-9]{4}-[0-9]{2}-[0-9]{2}' CHANGELOG.md | head -n 2 | awk '{print $2}')
|
||||
|
||||
LATEST_DATE=$(echo "$DATES" | sed -n '1p')
|
||||
SECOND_LATEST_DATE=$(echo "$DATES" | sed -n '2p')
|
||||
TODAY=$(date -u +%Y-%m-%d)
|
||||
|
||||
echo "TODAY=$TODAY" >> $GITHUB_ENV
|
||||
if [[ "$LATEST_DATE" == "$TODAY" ]]; then
|
||||
echo "LATEST_DATE=$SECOND_LATEST_DATE" >> $GITHUB_ENV
|
||||
else
|
||||
echo "LATEST_DATE=$LATEST_DATE" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Get categorized pull requests
|
||||
id: get-categorized-prs
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
async function main() {
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const configPath = path.resolve(process.env.CONFIG_PATH);
|
||||
const fileContent = await fs.readFile(configPath, 'utf-8');
|
||||
const changelogConfig = JSON.parse(fileContent);
|
||||
|
||||
const categorizedPRs = changelogConfig.map(obj => ({
|
||||
...obj,
|
||||
notes: [],
|
||||
subCategories: obj.subCategories ?? (
|
||||
obj.labels.includes("update script") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "🔧 Refactor", labels: ["refactor"], notes: [] },
|
||||
] :
|
||||
obj.labels.includes("maintenance") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "📡 API", labels: ["api"], notes: [] },
|
||||
{ title: "Github", labels: ["github"], notes: [] },
|
||||
{ title: "📝 Documentation", labels: ["documentation"], notes: [] },
|
||||
{ title: "🔧 Refactor", labels: ["refactor"], notes: [] }
|
||||
] :
|
||||
obj.labels.includes("website") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "Script Information", labels: ["json"], notes: [] }
|
||||
] : []
|
||||
)
|
||||
}));
|
||||
|
||||
const latestDateInChangelog = new Date(process.env.LATEST_DATE);
|
||||
latestDateInChangelog.setUTCHours(23, 59, 59, 999);
|
||||
|
||||
const { data: pulls } = await github.rest.pulls.list({
|
||||
owner: context.repo.owner,
|
||||
repo: "ProxmoxVE",
|
||||
base: "main",
|
||||
state: "closed",
|
||||
sort: "updated",
|
||||
direction: "desc",
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const filteredPRs = pulls.filter(pr =>
|
||||
pr.merged_at &&
|
||||
new Date(pr.merged_at) > latestDateInChangelog &&
|
||||
!pr.labels.some(label =>
|
||||
["invalid", "wontdo", process.env.AUTOMATED_PR_LABEL].includes(label.name.toLowerCase())
|
||||
)
|
||||
);
|
||||
|
||||
for (const pr of filteredPRs) {
|
||||
const prLabels = pr.labels.map(label => label.name.toLowerCase());
|
||||
if (pr.user.login.includes("push-app-to-main[bot]")) {
|
||||
|
||||
const scriptName = pr.title;
|
||||
try {
|
||||
const { data: relatedIssues } = await github.rest.issues.listForRepo({
|
||||
owner: context.repo.owner,
|
||||
repo: "ProxmoxVED",
|
||||
state: "all",
|
||||
labels: ["Started Migration To ProxmoxVE"]
|
||||
});
|
||||
|
||||
const matchingIssue = relatedIssues.find(issue =>
|
||||
issue.title.toLowerCase().includes(scriptName.toLowerCase())
|
||||
);
|
||||
|
||||
if (matchingIssue) {
|
||||
const issueAuthor = matchingIssue.user.login;
|
||||
const issueAuthorUrl = `https://github.com/${issueAuthor}`;
|
||||
prNote = `- ${pr.title} [@${issueAuthor}](${issueAuthorUrl}) ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
else {
|
||||
prNote = `- ${pr.title} ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error fetching related issues: ${error}`);
|
||||
prNote = `- ${pr.title} ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
}else{
|
||||
prNote = `- ${pr.title} [@${pr.user.login}](https://github.com/${pr.user.login}) ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
|
||||
|
||||
if (prLabels.includes("new script")) {
|
||||
const newScriptCategory = categorizedPRs.find(category =>
|
||||
category.title === "New Scripts" || category.labels.includes("new script"));
|
||||
if (newScriptCategory) {
|
||||
newScriptCategory.notes.push(prNote);
|
||||
}
|
||||
} else {
|
||||
|
||||
let categorized = false;
|
||||
const priorityCategories = categorizedPRs.slice();
|
||||
for (const category of priorityCategories) {
|
||||
if (categorized) break;
|
||||
if (category.labels.some(label => prLabels.includes(label))) {
|
||||
if (category.subCategories && category.subCategories.length > 0) {
|
||||
const subCategory = category.subCategories.find(sub =>
|
||||
sub.labels.some(label => prLabels.includes(label))
|
||||
);
|
||||
|
||||
if (subCategory) {
|
||||
subCategory.notes.push(prNote);
|
||||
} else {
|
||||
category.notes.push(prNote);
|
||||
}
|
||||
} else {
|
||||
category.notes.push(prNote);
|
||||
}
|
||||
categorized = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return categorizedPRs;
|
||||
}
|
||||
|
||||
return await main();
|
||||
|
||||
- name: Update CHANGELOG.md
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const today = process.env.TODAY;
|
||||
const latestDateInChangelog = process.env.LATEST_DATE;
|
||||
const changelogPath = path.resolve('CHANGELOG.md');
|
||||
const categorizedPRs = ${{ steps.get-categorized-prs.outputs.result }};
|
||||
|
||||
let newReleaseNotes = `## ${today}\n\n`;
|
||||
for (const { title, notes, subCategories } of categorizedPRs) {
|
||||
const hasSubcategories = subCategories && subCategories.length > 0;
|
||||
const hasMainNotes = notes.length > 0;
|
||||
const hasSubNotes = hasSubcategories && subCategories.some(sub => sub.notes && sub.notes.length > 0);
|
||||
|
||||
if (hasMainNotes || hasSubNotes) {
|
||||
newReleaseNotes += `### ${title}\n\n`;
|
||||
}
|
||||
|
||||
if (hasMainNotes) {
|
||||
newReleaseNotes += ` ${notes.join("\n")}\n\n`;
|
||||
}
|
||||
if (hasSubcategories) {
|
||||
for (const { title: subTitle, notes: subNotes } of subCategories) {
|
||||
if (subNotes && subNotes.length > 0) {
|
||||
newReleaseNotes += ` - #### ${subTitle}\n\n`;
|
||||
newReleaseNotes += ` ${subNotes.join("\n ")}\n\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const changelogContent = await fs.readFile(changelogPath, 'utf-8');
|
||||
const changelogIncludesTodaysReleaseNotes = changelogContent.includes(`\n## ${today}`);
|
||||
|
||||
const regex = changelogIncludesTodaysReleaseNotes
|
||||
? new RegExp(`## ${today}.*(?=## ${latestDateInChangelog})`, "gs")
|
||||
: new RegExp(`(?=## ${latestDateInChangelog})`, "gs");
|
||||
|
||||
const newChangelogContent = changelogContent.replace(regex, newReleaseNotes);
|
||||
await fs.writeFile(changelogPath, newChangelogContent);
|
||||
|
||||
- name: Check for changes
|
||||
id: verify-diff
|
||||
run: |
|
||||
git diff --quiet . || echo "changed=true" >> $GITHUB_ENV
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.changed == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add CHANGELOG.md
|
||||
git commit -m "Update CHANGELOG.md"
|
||||
git checkout -b $BRANCH_NAME || git checkout $BRANCH_NAME
|
||||
git push origin $BRANCH_NAME --force
|
||||
|
||||
- name: Create pull request if not exists
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
run: |
|
||||
PR_EXISTS=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -z "$PR_EXISTS" ]; then
|
||||
gh pr create --title "[Github Action] Update CHANGELOG.md" \
|
||||
--body "This PR is auto-generated by a Github Action to update the CHANGELOG.md file." \
|
||||
--head $BRANCH_NAME \
|
||||
--base main \
|
||||
--label "$AUTOMATED_PR_LABEL"
|
||||
fi
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: Re-approve pull request after update
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
164
.github/workflows/close-discussion.yaml
vendored
Normal file
164
.github/workflows/close-discussion.yaml
vendored
Normal file
@ -0,0 +1,164 @@
|
||||
name: Close Discussion on PR Merge
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
discussions: write
|
||||
|
||||
jobs:
|
||||
close-discussion:
|
||||
if: github.repository == 'community-scripts/ProxmoxVED'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install zx @octokit/graphql
|
||||
|
||||
- name: Close Discussion
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
run: |
|
||||
npx zx << 'EOF'
|
||||
import { graphql } from "@octokit/graphql";
|
||||
|
||||
(async function () {
|
||||
try {
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
const commitSha = process.env.GITHUB_SHA;
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/");
|
||||
|
||||
if (!token || !commitSha || !owner || !repo) {
|
||||
console.log("Missing required environment variables.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const graphqlWithAuth = graphql.defaults({
|
||||
headers: { authorization: `Bearer ${token}` },
|
||||
});
|
||||
|
||||
// Find PR from commit SHA
|
||||
const searchQuery = `
|
||||
query($owner: String!, $repo: String!, $sha: GitObjectID!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
object(oid: $sha) {
|
||||
... on Commit {
|
||||
associatedPullRequests(first: 1) {
|
||||
nodes {
|
||||
number
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const prResult = await graphqlWithAuth(searchQuery, {
|
||||
owner,
|
||||
repo,
|
||||
sha: commitSha,
|
||||
});
|
||||
|
||||
const pr = prResult.repository.object.associatedPullRequests.nodes[0];
|
||||
if (!pr) {
|
||||
console.log("No PR found for this commit.");
|
||||
return;
|
||||
}
|
||||
|
||||
const prNumber = pr.number;
|
||||
const prBody = pr.body;
|
||||
|
||||
const match = prBody.match(/#(\d+)/);
|
||||
if (!match) {
|
||||
console.log("No discussion ID found in PR body.");
|
||||
return;
|
||||
}
|
||||
|
||||
const discussionNumber = match[1];
|
||||
console.log(`Extracted Discussion Number: ${discussionNumber}`);
|
||||
|
||||
// Fetch GraphQL discussion ID
|
||||
const discussionQuery = `
|
||||
query($owner: String!, $repo: String!, $number: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
discussion(number: $number) {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
//
|
||||
try {
|
||||
const discussionResponse = await graphqlWithAuth(discussionQuery, {
|
||||
owner,
|
||||
repo,
|
||||
number: parseInt(discussionNumber, 10),
|
||||
});
|
||||
|
||||
const discussionQLId = discussionResponse.repository.discussion.id;
|
||||
if (!discussionQLId) {
|
||||
console.log("Failed to fetch discussion GraphQL ID.");
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Discussion not found or error occurred while fetching discussion:", error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Post comment
|
||||
const commentMutation = `
|
||||
mutation($discussionId: ID!, $body: String!) {
|
||||
addDiscussionComment(input: { discussionId: $discussionId, body: $body }) {
|
||||
comment { id body }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const commentResponse = await graphqlWithAuth(commentMutation, {
|
||||
discussionId: discussionQLId,
|
||||
body: `Merged with PR #${prNumber}`,
|
||||
});
|
||||
|
||||
const commentId = commentResponse.addDiscussionComment.comment.id;
|
||||
if (!commentId) {
|
||||
console.log("Failed to post the comment.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Comment Posted Successfully! Comment ID: ${commentId}`);
|
||||
|
||||
// Mark comment as answer
|
||||
const markAnswerMutation = `
|
||||
mutation($id: ID!) {
|
||||
markDiscussionCommentAsAnswer(input: { id: $id }) {
|
||||
discussion { id title }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
await graphqlWithAuth(markAnswerMutation, { id: commentId });
|
||||
|
||||
console.log("Comment marked as answer successfully!");
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
EOF
|
53
.github/workflows/close-ttek-issue.yaml
vendored
Normal file
53
.github/workflows/close-ttek-issue.yaml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: Auto-Close tteck Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
close_tteck_issues:
|
||||
if: github.repository == 'community-scripts/ProxmoxVED'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Auto-close if tteck script detected
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const issue = context.payload.issue;
|
||||
const content = `${issue.title}\n${issue.body}`;
|
||||
const issueNumber = issue.number;
|
||||
|
||||
// Check for tteck script mention
|
||||
if (content.includes("tteck") || content.includes("tteck/Proxmox")) {
|
||||
const message = `Hello, it looks like you are referencing the **old tteck repo**.
|
||||
|
||||
This repository is no longer used for active scripts.
|
||||
**Please update your bookmarks** and use: [https://helper-scripts.com](https://helper-scripts.com)
|
||||
|
||||
Also make sure your Bash command starts with:
|
||||
\`\`\`bash
|
||||
bash <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/...)
|
||||
\`\`\`
|
||||
|
||||
This issue is being closed automatically.`;
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: message
|
||||
});
|
||||
|
||||
// Optionally apply a label like "not planned"
|
||||
await github.rest.issues.addLabels({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
labels: ["not planned"]
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
state: "closed"
|
||||
});
|
||||
}
|
@ -48,11 +48,7 @@ jobs:
|
||||
id: create_message
|
||||
run: |
|
||||
VAR="The ${{ env.TITLE }} script is ready for testing:\n"
|
||||
if [[ "${{ env.TITLE }}" != *"vm"* ]]; then
|
||||
VAR+="\`\`\`bash -c \"\$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/ct/${{ env.TITLE }}.sh)\"\`\`\`\n"
|
||||
else
|
||||
VAR+="\`\`\`bash -c \"\$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/vm/${{ env.TITLE }}.sh)\"\`\`\`\n"
|
||||
fi
|
||||
if [[ " ${EXISTING_FILES[@]} " =~ " frontend/public/json/${TITLE}.json " ]]; then
|
||||
JSON=$(curl -fsSL https://github.com/community-scripts/ProxmoxVED/raw/main/frontend/public/json/${{ env.TITLE }}.json)
|
||||
username=$(echo "$JSON" | jq -r '.default_credentials.username')
|
||||
@ -84,25 +80,7 @@ jobs:
|
||||
VAR+="${{ github.event.issue.html_url }}"
|
||||
echo "message=$VAR" >> $GITHUB_ENV
|
||||
|
||||
- name: Check if Discord thread exists
|
||||
id: check_thread
|
||||
run: |
|
||||
ISSUE_TITLE="${{ github.event.issue.title }}"
|
||||
|
||||
THREAD_ID=$(curl -s -X GET "https://discord.com/api/v10/guilds/${{ secrets.DISCORD_GUILD_ID }}/threads/active" \
|
||||
-H "Authorization: Bot ${{ secrets.DISCORD_BOT_TOKEN }}" \
|
||||
-H "Content-Type: application/json" | \
|
||||
jq -r --arg TITLE "$ISSUE_TITLE" --arg PARENT_ID "${{ secrets.DISCORD_CHANNEL_ID }}" \
|
||||
'.threads[] | select(.parent_id == $PARENT_ID and .name == ("Wanted Tester for " + $TITLE)) | .id')
|
||||
|
||||
if [ -n "$THREAD_ID" ]; then
|
||||
echo "thread_exists=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "thread_exists=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Create a forumpost in Discord
|
||||
if: steps.check_thread.outputs.thread_exists != 'true'
|
||||
id: post_to_discord
|
||||
env:
|
||||
DISCORD_CHANNEL_ID: ${{ secrets.DISCORD_CHANNEL_ID }}
|
||||
@ -118,16 +96,8 @@ jobs:
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$JSON_PAYLOAD")
|
||||
|
||||
HTTP_BODY=$(echo "$RESPONSE" | head -n -1)
|
||||
HTTP_CODE=$(echo "$RESPONSE" | tail -n1)
|
||||
THREAD_ID=$(echo "$HTTP_BODY" | jq -r '.id')
|
||||
|
||||
STATUS_CODE=$(echo "$RESPONSE" | tail -n 1)
|
||||
if [[ "$HTTP_CODE" == "201" && -n "$THREAD_ID" ]]; then
|
||||
LOCK_RESPONSE=$(curl -s -X PATCH "https://discord.com/api/v10/channels/$THREAD_ID" \
|
||||
-H "Authorization: Bot $DISCORD_BOT_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"locked": true}')
|
||||
if [ "$STATUS_CODE" -eq 201 ]; then
|
||||
echo "Discord post created successfully!"
|
||||
else
|
||||
echo "Response: $RESPONSE"
|
||||
@ -136,7 +106,6 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Comment on Issue
|
||||
if: steps.check_thread.outputs.thread_exists != 'true'
|
||||
id: comment_on_issue
|
||||
env:
|
||||
MESSAGE: ${{ env.message }}
|
||||
|
4
.github/workflows/delete-discord-thread.yml
vendored
4
.github/workflows/delete-discord-thread.yml
vendored
@ -9,12 +9,12 @@ jobs:
|
||||
close_discord_thread:
|
||||
if: github.repository == 'community-scripts/ProxmoxVED'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
|
||||
steps:
|
||||
- name: Get thread-ID op and close thread
|
||||
run: |
|
||||
ISSUE_TITLE="${{ github.event.issue.title }}"
|
||||
|
||||
THREAD_ID=$(curl -s -X GET "https://discord.com/api/v10/guilds/${{ secrets.DISCORD_GUILD_ID }}/threads/active" \
|
||||
-H "Authorization: Bot ${{ secrets.DISCORD_BOT_TOKEN }}" \
|
||||
-H "Content-Type: application/json" | \
|
||||
|
26
.github/workflows/delete_new_script.yaml
vendored
26
.github/workflows/delete_new_script.yaml
vendored
@ -14,15 +14,6 @@ jobs:
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate a token for PR approval and merge
|
||||
id: generate-token-merge
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ secrets.APP_ID_APPROVE_AND_MERGE }}
|
||||
private-key: ${{ secrets.APP_KEY_APPROVE_AND_MERGE }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Extract Issue Title (Lowercase & Underscores)
|
||||
id: extract_title
|
||||
run: echo "TITLE=$(echo '${{ github.event.issue.title }}' | tr '[:upper:]' '[:lower:]' | sed 's/ /_/g')" >> $GITHUB_ENV
|
||||
@ -61,7 +52,7 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
branch="delete_files"
|
||||
branch=$(echo "delete-files_${{ github.event.issue.number }}_${TITLE}" | tr '[:upper:]' '[:lower:]' | sed 's/ /_/g')
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git checkout -b $branch
|
||||
@ -74,26 +65,13 @@ jobs:
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "Deleted files for issue: ${{ github.event.issue.title }}"
|
||||
git push origin $branch --force
|
||||
git push origin $branch
|
||||
gh pr create --title "Delete Files for ${{ github.event.issue.title }} after Merge to Main" --body "Delete files after merge in main repo." --base main --head $branch
|
||||
|
||||
pr_number=$(gh pr list | grep -m 1 $branch | awk '{print $1}')
|
||||
#gh pr merge $pr_number --squash
|
||||
echo pr_number=$pr_number >> $GITHUB_ENV
|
||||
|
||||
- name: Approve pull request and merge
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
|
||||
run: |
|
||||
git config --global user.name "github-actions-automege[bot]"
|
||||
git config --global user.email "github-actions-automege[bot]@users.noreply.github.com"
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
gh pr merge $PR_NUMBER --squash --admin
|
||||
fi
|
||||
|
||||
- name: Comment on Issue
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
|
4
.github/workflows/get-versions-from-gh.yaml
vendored
4
.github/workflows/get-versions-from-gh.yaml
vendored
@ -24,12 +24,10 @@ jobs:
|
||||
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Crawl from Github API
|
||||
env:
|
||||
|
@ -24,21 +24,10 @@ jobs:
|
||||
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Generate a token for PR approval and merge
|
||||
id: generate-token-merge
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ secrets.APP_ID_APPROVE_AND_MERGE }}
|
||||
private-key: ${{ secrets.APP_KEY_APPROVE_AND_MERGE }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Crawl from newreleases.io
|
||||
env:
|
||||
@ -121,12 +110,10 @@ jobs:
|
||||
- name: Approve pull request and merge
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
|
||||
GH_TOKEN: ${{ secrets.PAT_MICHEL }}
|
||||
run: |
|
||||
git config --global user.name "github-actions-automege[bot]"
|
||||
git config --global user.email "github-actions-automege[bot]@users.noreply.github.com"
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[0].number')
|
||||
PR_NUMBER=$(gh pr list --head "update_versions" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review "$PR_NUMBER" --approve
|
||||
gh pr merge "$PR_NUMBER" --squash --admin
|
||||
gh pr review $PR_NUMBER --approve
|
||||
gh pr merge $PR_NUMBER --squash --delete-branch --admin
|
||||
fi
|
||||
|
226
.github/workflows/live/changelog-pr.yml
vendored
Normal file
226
.github/workflows/live/changelog-pr.yml
vendored
Normal file
@ -0,0 +1,226 @@
|
||||
name: Create Changelog Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-changelog-pull-request:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CONFIG_PATH: .github/changelog-pr-config.json
|
||||
BRANCH_NAME: github-action-update-changelog
|
||||
AUTOMATED_PR_LABEL: "automated pr"
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get latest dates in changelog
|
||||
run: |
|
||||
DATES=$(grep -E '^## [0-9]{4}-[0-9]{2}-[0-9]{2}' CHANGELOG.md | head -n 2 | awk '{print $2}')
|
||||
|
||||
LATEST_DATE=$(echo "$DATES" | sed -n '1p')
|
||||
SECOND_LATEST_DATE=$(echo "$DATES" | sed -n '2p')
|
||||
TODAY=$(date -u +%Y-%m-%d)
|
||||
|
||||
echo "TODAY=$TODAY" >> $GITHUB_ENV
|
||||
if [[ "$LATEST_DATE" == "$TODAY" ]]; then
|
||||
echo "LATEST_DATE=$SECOND_LATEST_DATE" >> $GITHUB_ENV
|
||||
else
|
||||
echo "LATEST_DATE=$LATEST_DATE" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Get categorized pull requests
|
||||
id: get-categorized-prs
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const configPath = path.resolve(process.env.CONFIG_PATH);
|
||||
const fileContent = await fs.readFile(configPath, 'utf-8');
|
||||
const changelogConfig = JSON.parse(fileContent);
|
||||
|
||||
const categorizedPRs = changelogConfig.map(obj => ({
|
||||
...obj,
|
||||
notes: [],
|
||||
subCategories: obj.subCategories ?? (
|
||||
obj.labels.includes("update script") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] }
|
||||
] :
|
||||
obj.labels.includes("maintenance") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "📡 API", labels: ["api"], notes: [] },
|
||||
{ title: "Github", labels: ["github"], notes: [] }
|
||||
] :
|
||||
obj.labels.includes("website") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "Script Information", labels: ["json"], notes: [] }
|
||||
] : []
|
||||
)
|
||||
}));
|
||||
|
||||
const latestDateInChangelog = new Date(process.env.LATEST_DATE);
|
||||
latestDateInChangelog.setUTCHours(23, 59, 59, 999);
|
||||
|
||||
const { data: pulls } = await github.rest.pulls.list({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
base: "main",
|
||||
state: "closed",
|
||||
sort: "updated",
|
||||
direction: "desc",
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
pulls.filter(pr =>
|
||||
pr.merged_at &&
|
||||
new Date(pr.merged_at) > latestDateInChangelog &&
|
||||
!pr.labels.some(label =>
|
||||
["invalid", "wontdo", process.env.AUTOMATED_PR_LABEL].includes(label.name.toLowerCase())
|
||||
)
|
||||
).forEach(pr => {
|
||||
|
||||
const prLabels = pr.labels.map(label => label.name.toLowerCase());
|
||||
const prNote = `- ${pr.title} [@${pr.user.login}](https://github.com/${pr.user.login}) ([#${pr.number}](${pr.html_url}))`;
|
||||
|
||||
const updateScriptsCategory = categorizedPRs.find(category =>
|
||||
category.labels.some(label => prLabels.includes(label))
|
||||
);
|
||||
|
||||
if (updateScriptsCategory) {
|
||||
|
||||
const subCategory = updateScriptsCategory.subCategories.find(sub =>
|
||||
sub.labels.some(label => prLabels.includes(label))
|
||||
);
|
||||
|
||||
if (subCategory) {
|
||||
subCategory.notes.push(prNote);
|
||||
} else {
|
||||
updateScriptsCategory.notes.push(prNote);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(JSON.stringify(categorizedPRs, null, 2));
|
||||
|
||||
return categorizedPRs;
|
||||
|
||||
|
||||
- name: Update CHANGELOG.md
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const today = process.env.TODAY;
|
||||
const latestDateInChangelog = process.env.LATEST_DATE;
|
||||
const changelogPath = path.resolve('CHANGELOG.md');
|
||||
const categorizedPRs = ${{ steps.get-categorized-prs.outputs.result }};
|
||||
|
||||
console.log(JSON.stringify(categorizedPRs, null, 2));
|
||||
|
||||
|
||||
let newReleaseNotes = `## ${today}\n\n`;
|
||||
for (const { title, notes, subCategories } of categorizedPRs) {
|
||||
const hasSubcategories = subCategories && subCategories.length > 0;
|
||||
const hasMainNotes = notes.length > 0;
|
||||
const hasSubNotes = hasSubcategories && subCategories.some(sub => sub.notes && sub.notes.length > 0);
|
||||
|
||||
|
||||
if (hasMainNotes || hasSubNotes) {
|
||||
newReleaseNotes += `### ${title}\n\n`;
|
||||
}
|
||||
|
||||
if (hasMainNotes) {
|
||||
newReleaseNotes += ` ${notes.join("\n")}\n\n`;
|
||||
}
|
||||
if (hasSubcategories) {
|
||||
for (const { title: subTitle, notes: subNotes } of subCategories) {
|
||||
if (subNotes && subNotes.length > 0) {
|
||||
newReleaseNotes += ` - #### ${subTitle}\n\n`;
|
||||
newReleaseNotes += ` ${subNotes.join("\n ")}\n\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const changelogContent = await fs.readFile(changelogPath, 'utf-8');
|
||||
const changelogIncludesTodaysReleaseNotes = changelogContent.includes(`\n## ${today}`);
|
||||
|
||||
const regex = changelogIncludesTodaysReleaseNotes
|
||||
? new RegExp(`## ${today}.*(?=## ${latestDateInChangelog})`, "gs")
|
||||
: new RegExp(`(?=## ${latestDateInChangelog})`, "gs");
|
||||
|
||||
const newChangelogContent = changelogContent.replace(regex, newReleaseNotes);
|
||||
await fs.writeFile(changelogPath, newChangelogContent);
|
||||
|
||||
- name: Check for changes
|
||||
id: verify-diff
|
||||
run: |
|
||||
git diff --quiet . || echo "changed=true" >> $GITHUB_ENV
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.changed == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add CHANGELOG.md
|
||||
git commit -m "Update CHANGELOG.md"
|
||||
git checkout -b $BRANCH_NAME || git checkout $BRANCH_NAME
|
||||
git push origin $BRANCH_NAME --force
|
||||
|
||||
- name: Create pull request if not exists
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
run: |
|
||||
PR_EXISTS=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -z "$PR_EXISTS" ]; then
|
||||
gh pr create --title "[Github Action] Update CHANGELOG.md" \
|
||||
--body "This PR is auto-generated by a Github Action to update the CHANGELOG.md file." \
|
||||
--head $BRANCH_NAME \
|
||||
--base main \
|
||||
--label "$AUTOMATED_PR_LABEL"
|
||||
fi
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: Re-approve pull request after update
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
122
.github/workflows/live/close-discussion.yml
vendored
Normal file
122
.github/workflows/live/close-discussion.yml
vendored
Normal file
@ -0,0 +1,122 @@
|
||||
name: Close Discussion on PR Merge
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
close-discussion:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Install Dependencies
|
||||
run: npm install zx @octokit/graphql
|
||||
|
||||
- name: Close Discussion
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
REPO_OWNER: ${{ github.repository_owner }}
|
||||
REPO_NAME: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
npx zx << 'EOF'
|
||||
import { graphql } from "@octokit/graphql";
|
||||
(async function() {
|
||||
try {
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
const prBody = process.env.PR_BODY;
|
||||
const prNumber = process.env.PR_NUMBER;
|
||||
const owner = process.env.REPO_OWNER;
|
||||
const repo = process.env.REPO_NAME;
|
||||
|
||||
if (!token || !prBody || !prNumber || !owner || !repo) {
|
||||
console.log("Missing required environment variables.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const match = prBody.match(/#(\d+)/);
|
||||
if (!match) {
|
||||
console.log("No discussion ID found in PR body.");
|
||||
return;
|
||||
}
|
||||
const discussionNumber = match[1];
|
||||
|
||||
console.log(`Extracted Discussion Number: ${discussionNumber}`);
|
||||
console.log(`PR Number: ${prNumber}`);
|
||||
console.log(`Repository: ${owner}/${repo}`);
|
||||
|
||||
const graphqlWithAuth = graphql.defaults({
|
||||
headers: { authorization: `Bearer ${token}` },
|
||||
});
|
||||
|
||||
const discussionQuery = `
|
||||
query($owner: String!, $repo: String!, $number: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
discussion(number: $number) {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const discussionResponse = await graphqlWithAuth(discussionQuery, {
|
||||
owner,
|
||||
repo,
|
||||
number: parseInt(discussionNumber, 10),
|
||||
});
|
||||
|
||||
const discussionQLId = discussionResponse.repository.discussion.id;
|
||||
if (!discussionQLId) {
|
||||
console.log("Failed to fetch discussion GraphQL ID.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`GraphQL Discussion ID: ${discussionQLId}`);
|
||||
|
||||
const commentMutation = `
|
||||
mutation($discussionId: ID!, $body: String!) {
|
||||
addDiscussionComment(input: { discussionId: $discussionId, body: $body }) {
|
||||
comment { id body }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const commentResponse = await graphqlWithAuth(commentMutation, {
|
||||
discussionId: discussionQLId,
|
||||
body: `Merged with PR #${prNumber}`,
|
||||
});
|
||||
|
||||
const commentId = commentResponse.addDiscussionComment.comment.id;
|
||||
if (!commentId) {
|
||||
console.log("Failed to post the comment.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Comment Posted Successfully! Comment ID: ${commentId}`);
|
||||
|
||||
const markAnswerMutation = `
|
||||
mutation($id: ID!) {
|
||||
markDiscussionCommentAsAnswer(input: { id: $id }) {
|
||||
discussion { id title }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
await graphqlWithAuth(markAnswerMutation, { id: commentId });
|
||||
|
||||
console.log("Comment marked as answer successfully!");
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error:", error);
|
||||
return;
|
||||
}
|
||||
})();
|
||||
EOF
|
37
.github/workflows/live/create-docker-for-runner.yml
vendored
Normal file
37
.github/workflows/live/create-docker-for-runner.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
name: Build and Publish Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- '.github/runner/docker/**'
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest #To ensure it always builds we use the github runner with all the right tooling
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
run: |
|
||||
repo_name=${{ github.repository }} # Get repository name
|
||||
repo_name_lower=$(echo $repo_name | tr '[:upper:]' '[:lower:]') # Convert to lowercase
|
||||
docker build -t ghcr.io/$repo_name_lower/gh-runner-self:latest -f .github/runner/docker/gh-runner-self.dockerfile .
|
||||
|
||||
- name: Push Docker image to GHCR
|
||||
run: |
|
||||
repo_name=${{ github.repository }} # Get repository name
|
||||
repo_name_lower=$(echo $repo_name | tr '[:upper:]' '[:lower:]') # Convert to lowercase
|
||||
docker push ghcr.io/$repo_name_lower/gh-runner-self:latest
|
28
.github/workflows/live/delete-json-branch.yml
vendored
Normal file
28
.github/workflows/live/delete-json-branch.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
|
||||
name: Delete JSON date PR Branch
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
delete_branch:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Delete PR Update Branch
|
||||
if: github.event.pull_request.merged == true && startsWith(github.event.pull_request.head.ref, 'pr-update-json-')
|
||||
run: |
|
||||
PR_BRANCH="${{ github.event.pull_request.head.ref }}"
|
||||
echo "Deleting branch $PR_BRANCH..."
|
||||
|
||||
# Avoid deleting the default branch (e.g., main)
|
||||
if [[ "$PR_BRANCH" != "main" ]]; then
|
||||
git push origin --delete "$PR_BRANCH"
|
||||
else
|
||||
echo "Skipping deletion of the main branch"
|
||||
fi
|
57
.github/workflows/live/github-release.yml
vendored
Normal file
57
.github/workflows/live/github-release.yml
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
name: Create Daily Release
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '1 0 * * *' # Runs daily at 00:01 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-daily-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Extract first 5000 characters from CHANGELOG.md
|
||||
run: head -c 5000 CHANGELOG.md > changelog_cropped.md
|
||||
|
||||
- name: Debugging - Show extracted changelog
|
||||
run: |
|
||||
echo "=== CHANGELOG EXCERPT ==="
|
||||
cat changelog_cropped.md
|
||||
echo "========================="
|
||||
|
||||
- name: Parse CHANGELOG.md and create release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
YESTERDAY=$(date -u --date="yesterday" +%Y-%m-%d)
|
||||
echo "Checking for changes on: $YESTERDAY"
|
||||
|
||||
# Ensure yesterday's date exists in the changelog
|
||||
if ! grep -q "## $YESTERDAY" changelog_cropped.md; then
|
||||
echo "No entry found for $YESTERDAY, skipping release."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract section for yesterday's date
|
||||
awk -v date="## $YESTERDAY" '
|
||||
$0 ~ date {found=1; next}
|
||||
found && /^## [0-9]{4}-[0-9]{2}-[0-9]{2}/ {exit}
|
||||
found
|
||||
' changelog_cropped.md > changelog_tmp.md
|
||||
|
||||
echo "=== Extracted Changelog ==="
|
||||
cat changelog_tmp.md
|
||||
echo "==========================="
|
||||
|
||||
# Skip if no content was found
|
||||
if [ ! -s changelog_tmp.md ]; then
|
||||
echo "No changes found for $YESTERDAY, skipping release."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create GitHub release
|
||||
gh release create "$YESTERDAY" -t "$YESTERDAY" -F changelog_tmp.md
|
177
.github/workflows/live/script-test.yml
vendored
Normal file
177
.github/workflows/live/script-test.yml
vendored
Normal file
@ -0,0 +1,177 @@
|
||||
name: Run Scripts on PVE Node for testing
|
||||
permissions:
|
||||
pull-requests: write
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'install/**.sh'
|
||||
- 'ct/**.sh'
|
||||
|
||||
jobs:
|
||||
run-install-script:
|
||||
runs-on: pvenode
|
||||
steps:
|
||||
- name: Checkout PR branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add Git safe directory
|
||||
run: |
|
||||
git config --global --add safe.directory /__w/ProxmoxVED/ProxmoxVE
|
||||
|
||||
- name: Set up GH_TOKEN
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Changed Files
|
||||
run: |
|
||||
CHANGED_FILES=$(gh pr diff ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --name-only)
|
||||
CHANGED_FILES=$(echo "$CHANGED_FILES" | tr '\n' ' ')
|
||||
echo "Changed files: $CHANGED_FILES"
|
||||
echo "SCRIPT=$CHANGED_FILES" >> $GITHUB_ENV
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
- name: Get scripts
|
||||
id: check-install-script
|
||||
run: |
|
||||
ALL_FILES=()
|
||||
ADDED_FILES=()
|
||||
for FILE in ${{ env.SCRIPT }}; do
|
||||
if [[ $FILE =~ ^install/.*-install\.sh$ ]] || [[ $FILE =~ ^ct/.*\.sh$ ]]; then
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
if [[ ! " ${ADDED_FILES[@]} " =~ " $STRIPPED_NAME " ]]; then
|
||||
ALL_FILES+=("$FILE")
|
||||
ADDED_FILES+=("$STRIPPED_NAME") # Mark this base file as added (without the path)
|
||||
fi
|
||||
fi
|
||||
done
|
||||
ALL_FILES=$(echo "${ALL_FILES[@]}" | xargs)
|
||||
echo "$ALL_FILES"
|
||||
echo "ALL_FILES=$ALL_FILES" >> $GITHUB_ENV
|
||||
|
||||
- name: Run scripts
|
||||
id: run-install
|
||||
continue-on-error: true
|
||||
run: |
|
||||
set +e
|
||||
#run for each files in /ct
|
||||
for FILE in ${{ env.ALL_FILES }}; do
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
echo "Running Test for: $STRIPPED_NAME"
|
||||
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "$FILE"; then
|
||||
echo "The script contains an interactive prompt. Skipping execution."
|
||||
continue
|
||||
fi
|
||||
if [[ $FILE =~ ^install/.*-install\.sh$ ]]; then
|
||||
CT_SCRIPT="ct/$STRIPPED_NAME.sh"
|
||||
if [[ ! -f $CT_SCRIPT ]]; then
|
||||
echo "No CT script found for $STRIPPED_NAME"
|
||||
ERROR_MSG="No CT script found for $FILE"
|
||||
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
|
||||
continue
|
||||
fi
|
||||
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "install/$STRIPPED_NAME-install.sh"; then
|
||||
echo "The script contains an interactive prompt. Skipping execution."
|
||||
continue
|
||||
fi
|
||||
echo "Found CT script for $STRIPPED_NAME"
|
||||
chmod +x "$CT_SCRIPT"
|
||||
RUNNING_FILE=$CT_SCRIPT
|
||||
elif [[ $FILE =~ ^ct/.*\.sh$ ]]; then
|
||||
INSTALL_SCRIPT="install/$STRIPPED_NAME-install.sh"
|
||||
if [[ ! -f $INSTALL_SCRIPT ]]; then
|
||||
echo "No install script found for $STRIPPED_NAME"
|
||||
ERROR_MSG="No install script found for $FILE"
|
||||
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
|
||||
continue
|
||||
fi
|
||||
echo "Found install script for $STRIPPED_NAME"
|
||||
chmod +x "$INSTALL_SCRIPT"
|
||||
RUNNING_FILE=$FILE
|
||||
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "ct/$STRIPPED_NAME.sh"; then
|
||||
echo "The script contains an interactive prompt. Skipping execution."
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
git remote add community-scripts https://github.com/community-scripts/ProxmoxVE.git
|
||||
git fetch community-scripts
|
||||
rm -f .github/workflows/scripts/app-test/pr-build.func || true
|
||||
rm -f .github/workflows/scripts/app-test/pr-install.func || true
|
||||
rm -f .github/workflows/scripts/app-test/pr-alpine-install.func || true
|
||||
rm -f .github/workflows/scripts/app-test/pr-create-lxc.sh || true
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-build.func
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-install.func
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-alpine-install.func
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-create-lxc.sh
|
||||
chmod +x $RUNNING_FILE
|
||||
chmod +x .github/workflows/scripts/app-test/pr-create-lxc.sh
|
||||
chmod +x .github/workflows/scripts/app-test/pr-install.func
|
||||
chmod +x .github/workflows/scripts/app-test/pr-alpine-install.func
|
||||
chmod +x .github/workflows/scripts/app-test/pr-build.func
|
||||
sed -i 's|source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)|source .github/workflows/scripts/app-test/pr-build.func|g' "$RUNNING_FILE"
|
||||
echo "Executing $RUNNING_FILE"
|
||||
ERROR_MSG=$(./$RUNNING_FILE 2>&1 > /dev/null)
|
||||
echo "Finished running $FILE"
|
||||
if [ -n "$ERROR_MSG" ]; then
|
||||
echo "ERROR in $STRIPPED_NAME: $ERROR_MSG"
|
||||
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
|
||||
fi
|
||||
done
|
||||
set -e # Restore exit-on-error
|
||||
|
||||
- name: Cleanup PVE Node
|
||||
run: |
|
||||
containers=$(pct list | tail -n +2 | awk '{print $0 " " $4}' | awk '{print $1}')
|
||||
|
||||
for container_id in $containers; do
|
||||
status=$(pct status $container_id | awk '{print $2}')
|
||||
if [[ $status == "running" ]]; then
|
||||
pct stop $container_id
|
||||
pct destroy $container_id
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Post error comments
|
||||
run: |
|
||||
ERROR="false"
|
||||
SEARCH_LINE=".github/workflows/scripts/app-test/pr-build.func: line 255:"
|
||||
|
||||
# Get all existing comments on the PR
|
||||
EXISTING_COMMENTS=$(gh pr view ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --json comments --jq '.comments[].body')
|
||||
|
||||
for FILE in ${{ env.ALL_FILES }}; do
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
if [[ ! -f result_$STRIPPED_NAME.log ]]; then
|
||||
continue
|
||||
fi
|
||||
ERROR_MSG=$(cat result_$STRIPPED_NAME.log)
|
||||
|
||||
if [ -n "$ERROR_MSG" ]; then
|
||||
CLEANED_ERROR_MSG=$(echo "$ERROR_MSG" | sed "s|$SEARCH_LINE.*||")
|
||||
COMMENT_BODY=":warning: The script _**$FILE**_ failed with the following message: <br> <div><strong>${CLEANED_ERROR_MSG}</strong></div>"
|
||||
|
||||
# Check if the comment already exists
|
||||
if echo "$EXISTING_COMMENTS" | grep -qF "$COMMENT_BODY"; then
|
||||
echo "Skipping duplicate comment for $FILE"
|
||||
else
|
||||
echo "Posting error message for $FILE"
|
||||
gh pr comment ${{ github.event.pull_request.number }} \
|
||||
--repo ${{ github.repository }} \
|
||||
--body "$COMMENT_BODY"
|
||||
ERROR="true"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "ERROR=$ERROR" >> $GITHUB_ENV
|
||||
|
||||
|
243
.github/workflows/live/script_format.yml
vendored
Normal file
243
.github/workflows/live/script_format.yml
vendored
Normal file
@ -0,0 +1,243 @@
|
||||
name: Script Format Check
|
||||
permissions:
|
||||
pull-requests: write
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'install/*.sh'
|
||||
- 'ct/*.sh'
|
||||
|
||||
jobs:
|
||||
run-install-script:
|
||||
runs-on: pvenode
|
||||
steps:
|
||||
- name: Checkout PR branch (supports forks)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add Git safe directory
|
||||
run: |
|
||||
git config --global --add safe.directory /__w/ProxmoxVED/ProxmoxVE
|
||||
|
||||
- name: Set up GH_TOKEN
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Changed Files
|
||||
run: |
|
||||
CHANGED_FILES=$(gh pr diff ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --name-only)
|
||||
CHANGED_FILES=$(echo "$CHANGED_FILES" | tr '\n' ' ')
|
||||
echo "Changed files: $CHANGED_FILES"
|
||||
echo "SCRIPT=$CHANGED_FILES" >> $GITHUB_ENV
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check scripts
|
||||
id: run-install
|
||||
continue-on-error: true
|
||||
run: |
|
||||
for FILE in ${{ env.SCRIPT }}; do
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
echo "Running Test for: $STRIPPED_NAME"
|
||||
FILE_STRIPPED="${FILE##*/}"
|
||||
LOG_FILE="result_$FILE_STRIPPED.log"
|
||||
|
||||
if [[ $FILE =~ ^ct/.*\.sh$ ]]; then
|
||||
|
||||
FIRST_LINE=$(sed -n '1p' "$FILE")
|
||||
[[ "$FIRST_LINE" != "#!/usr/bin/env bash" ]] && echo "Line 1 was $FIRST_LINE | Should be: #!/usr/bin/env bash" >> "$LOG_FILE"
|
||||
SECOND_LINE=$(sed -n '2p' "$FILE")
|
||||
[[ "$SECOND_LINE" != "source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" ]] &&
|
||||
echo "Line 2 was $SECOND_LINE | Should be: source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" >> "$LOG_FILE"
|
||||
THIRD_LINE=$(sed -n '3p' "$FILE")
|
||||
if ! [[ "$THIRD_LINE" =~ ^#\ Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ community-scripts\ ORG$ || "$THIRD_LINE" =~ ^Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ tteck$ ]]; then
|
||||
echo "Line 3 was $THIRD_LINE | Should be: # Copyright (c) 2021-2025 community-scripts ORG" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
EXPECTED_AUTHOR="# Author:"
|
||||
EXPECTED_LICENSE="# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE"
|
||||
EXPECTED_SOURCE="# Source:"
|
||||
EXPECTED_EMPTY=""
|
||||
|
||||
for i in {4..7}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
|
||||
case $i in
|
||||
4)
|
||||
[[ $LINE == $EXPECTED_AUTHOR* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_AUTHOR" >> $LOG_FILE
|
||||
;;
|
||||
5)
|
||||
[[ "$LINE" == "$EXPECTED_LICENSE" ]] || printf "Line %d was: '%s' | Should be: '%s'\n" "$i" "$LINE" "$EXPECTED_LICENSE" >> $LOG_FILE
|
||||
;;
|
||||
6)
|
||||
[[ $LINE == $EXPECTED_SOURCE* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_SOURCE" >> $LOG_FILE
|
||||
;;
|
||||
7)
|
||||
[[ -z $LINE ]] || printf "Line %d was: '%s' | Should be empty\n" "$i" "$LINE" >> $LOG_FILE
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
|
||||
EXPECTED_PREFIXES=(
|
||||
"APP="
|
||||
"var_tags="
|
||||
"var_cpu=" # Must be a number
|
||||
"var_ram=" # Must be a number
|
||||
"var_disk=" # Must be a number
|
||||
"var_os=" # Must be debian, alpine, or ubuntu
|
||||
"var_version="
|
||||
"var_unprivileged=" # Must be 0 or 1
|
||||
)
|
||||
|
||||
|
||||
for i in {8..15}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
INDEX=$((i - 8))
|
||||
|
||||
case $INDEX in
|
||||
2|3|4) # var_cpu, var_ram, var_disk (must be numbers)
|
||||
if [[ "$LINE" =~ ^${EXPECTED_PREFIXES[$INDEX]}([0-9]+)$ ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should be: '${EXPECTED_PREFIXES[$INDEX]}<NUMBER>'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
5) # var_os (must be debian, alpine, or ubuntu)
|
||||
if [[ "$LINE" =~ ^var_os=(debian|alpine|ubuntu)$ ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should be: 'var_os=[debian|alpine|ubuntu]'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
7) # var_unprivileged (must be 0 or 1)
|
||||
if [[ "$LINE" =~ ^var_unprivileged=[01]$ ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should be: 'var_unprivileged=[0|1]'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
*) # Other lines (must start with expected prefix)
|
||||
if [[ "$LINE" == ${EXPECTED_PREFIXES[$INDEX]}* ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should start with '${EXPECTED_PREFIXES[$INDEX]}'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
for i in {16..20}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
EXPECTED=(
|
||||
"header_info \"$APP\""
|
||||
"variables"
|
||||
"color"
|
||||
"catch_errors"
|
||||
"function update_script() {"
|
||||
)
|
||||
[[ "$LINE" != "${EXPECTED[$((i-16))]}" ]] && echo "Line $i was $LINE | Should be: ${EXPECTED[$((i-16))]}" >> "$LOG_FILE"
|
||||
done
|
||||
cat "$LOG_FILE"
|
||||
elif [[ $FILE =~ ^install/.*-install\.sh$ ]]; then
|
||||
|
||||
FIRST_LINE=$(sed -n '1p' "$FILE")
|
||||
[[ "$FIRST_LINE" != "#!/usr/bin/env bash" ]] && echo "Line 1 was $FIRST_LINE | Should be: #!/usr/bin/env bash" >> "$LOG_FILE"
|
||||
|
||||
SECOND_LINE=$(sed -n '2p' "$FILE")
|
||||
[[ -n "$SECOND_LINE" ]] && echo "Line 2 should be empty" >> "$LOG_FILE"
|
||||
|
||||
THIRD_LINE=$(sed -n '3p' "$FILE")
|
||||
if ! [[ "$THIRD_LINE" =~ ^#\ Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ community-scripts\ ORG$ || "$THIRD_LINE" =~ ^Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ tteck$ ]]; then
|
||||
echo "Line 3 was $THIRD_LINE | Should be: # Copyright (c) 2021-2025 community-scripts ORG" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
EXPECTED_AUTHOR="# Author:"
|
||||
EXPECTED_LICENSE="# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE"
|
||||
EXPECTED_SOURCE="# Source:"
|
||||
EXPECTED_EMPTY=""
|
||||
|
||||
for i in {4..7}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
|
||||
case $i in
|
||||
4)
|
||||
[[ $LINE == $EXPECTED_AUTHOR* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_AUTHOR" >> $LOG_FILE
|
||||
;;
|
||||
5)
|
||||
[[ "$LINE" == "$EXPECTED_LICENSE" ]] || printf "Line %d was: '%s' | Should be: '%s'\n" "$i" "$LINE" "$EXPECTED_LICENSE" >> $LOG_FILE
|
||||
;;
|
||||
6)
|
||||
[[ $LINE == $EXPECTED_SOURCE* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_SOURCE" >> $LOG_FILE
|
||||
;;
|
||||
7)
|
||||
[[ -z $LINE ]] || printf "Line %d was: '%s' | Should be empty\n" "$i" "$LINE" >> $LOG_FILE
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ "$(sed -n '8p' "$FILE")" != 'source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"' ]] && echo 'Line 8 should be: source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"' >> "$LOG_FILE"
|
||||
|
||||
for i in {9..14}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
EXPECTED=(
|
||||
"color"
|
||||
"verb_ip6"
|
||||
"catch_errors"
|
||||
"setting_up_container"
|
||||
"network_check"
|
||||
"update_os"
|
||||
)
|
||||
[[ "$LINE" != "${EXPECTED[$((i-9))]}" ]] && echo "Line $i was $LINE | Should be: ${EXPECTED[$((i-9))]}" >> "$LOG_FILE"
|
||||
done
|
||||
|
||||
[[ -n "$(sed -n '15p' "$FILE")" ]] && echo "Line 15 should be empty" >> "$LOG_FILE"
|
||||
[[ "$(sed -n '16p' "$FILE")" != 'msg_info "Installing Dependencies"' ]] && echo 'Line 16 should be: msg_info "Installing Dependencies"' >> "$LOG_FILE"
|
||||
|
||||
LAST_3_LINES=$(tail -n 3 "$FILE")
|
||||
[[ "$LAST_3_LINES" != *"$STD apt-get -y autoremove"* ]] && echo 'Third to last line should be: $STD apt-get -y autoremove' >> "$LOG_FILE"
|
||||
[[ "$LAST_3_LINES" != *"$STD apt-get -y autoclean"* ]] && echo 'Second to last line should be: $STD apt-get -y clean' >> "$LOG_FILE"
|
||||
[[ "$LAST_3_LINES" != *'msg_ok "Cleaned"'* ]] && echo 'Last line should be: msg_ok "Cleaned"' >> "$LOG_FILE"
|
||||
cat "$LOG_FILE"
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
|
||||
- name: Post error comments
|
||||
run: |
|
||||
ERROR="false"
|
||||
for FILE in ${{ env.SCRIPT }}; do
|
||||
FILE_STRIPPED="${FILE##*/}"
|
||||
LOG_FILE="result_$FILE_STRIPPED.log"
|
||||
echo $LOG_FILE
|
||||
if [[ ! -f $LOG_FILE ]]; then
|
||||
continue
|
||||
fi
|
||||
ERROR_MSG=$(cat $LOG_FILE)
|
||||
|
||||
if [ -n "$ERROR_MSG" ]; then
|
||||
echo "Posting error message for $FILE"
|
||||
echo ${ERROR_MSG}
|
||||
gh pr comment ${{ github.event.pull_request.number }} \
|
||||
--repo ${{ github.repository }} \
|
||||
--body ":warning: The script _**$FILE**_ has the following formatting errors: <br> <div><strong>${ERROR_MSG}</strong></div>"
|
||||
|
||||
|
||||
ERROR="true"
|
||||
fi
|
||||
done
|
||||
echo "ERROR=$ERROR" >> $GITHUB_ENV
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Fail if error
|
||||
if: ${{ env.ERROR == 'true' }}
|
||||
run: exit 1
|
131
.github/workflows/live/update-json-date.yml
vendored
Normal file
131
.github/workflows/live/update-json-date.yml
vendored
Normal file
@ -0,0 +1,131 @@
|
||||
name: Update JSON Date
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'json/**.json'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-app-files:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
|
||||
- name: Generate dynamic branch name
|
||||
id: timestamp
|
||||
run: echo "BRANCH_NAME=pr-update-json-$(date +'%Y%m%d%H%M%S')" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up GH_TOKEN
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # Ensure we have the last two commits
|
||||
|
||||
- name: Get Previous Commit
|
||||
id: prev_commit
|
||||
run: |
|
||||
PREV_COMMIT=$(git rev-parse HEAD^)
|
||||
echo "Previous commit: $PREV_COMMIT"
|
||||
echo "prev_commit=$PREV_COMMIT" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Newly Added JSON Files
|
||||
id: new_json_files
|
||||
run: |
|
||||
git diff --name-only --diff-filter=A ${{ env.prev_commit }} HEAD | grep '^json/.*\.json$' > new_files.txt || true
|
||||
echo "New files detected:"
|
||||
cat new_files.txt || echo "No new files."
|
||||
|
||||
- name: Disable file mode changes
|
||||
run: git config core.fileMode false
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Actions"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Change JSON Date
|
||||
id: change-json-date
|
||||
run: |
|
||||
current_date=$(date +"%Y-%m-%d")
|
||||
while IFS= read -r file; do
|
||||
# Skip empty lines
|
||||
[[ -z "$file" ]] && continue
|
||||
|
||||
if [[ -f "$file" ]]; then
|
||||
echo "Processing $file..."
|
||||
current_json_date=$(jq -r '.date_created // empty' "$file")
|
||||
if [[ -z "$current_json_date" || "$current_json_date" != "$current_date" ]]; then
|
||||
echo "Updating $file with date $current_date"
|
||||
jq --arg date "$current_date" '.date_created = $date' "$file" > temp.json && mv temp.json "$file"
|
||||
else
|
||||
echo "Date in $file is already up to date."
|
||||
fi
|
||||
else
|
||||
echo "Warning: File $file not found!"
|
||||
fi
|
||||
done < new_files.txt
|
||||
rm new_files.txt
|
||||
|
||||
- name: Check if there are any changes
|
||||
run: |
|
||||
echo "Checking for changes..."
|
||||
git add -A # Untracked Dateien aufnehmen
|
||||
git status
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes detected."
|
||||
echo "changed=false" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "Changes detected:"
|
||||
git diff --stat --cached
|
||||
echo "changed=true" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
# Step 7: Commit and create PR if changes exist
|
||||
- name: Commit and create PR if changes exist
|
||||
if: env.changed == 'true'
|
||||
run: |
|
||||
|
||||
|
||||
git commit -m "Update date in json"
|
||||
git checkout -b ${{ env.BRANCH_NAME }}
|
||||
git push origin ${{ env.BRANCH_NAME }}
|
||||
|
||||
gh pr create --title "[core] update date in json" \
|
||||
--body "This PR is auto-generated by a GitHub Action to update the date in json." \
|
||||
--head ${{ env.BRANCH_NAME }} \
|
||||
--base main \
|
||||
--label "automated pr"
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${{ env.BRANCH_NAME }}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: No changes detected
|
||||
if: env.changed == 'false'
|
||||
run: echo "No changes to commit. Workflow completed successfully."
|
161
.github/workflows/live/validate-filenames.yml
vendored
Normal file
161
.github/workflows/live/validate-filenames.yml
vendored
Normal file
@ -0,0 +1,161 @@
|
||||
name: Validate filenames
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "ct/*.sh"
|
||||
- "install/*.sh"
|
||||
- "json/*.json"
|
||||
|
||||
jobs:
|
||||
check-files:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if ${{ github.event_name == 'pull_request_target' }}; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Validate filenames in ct and install directory"
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-scripts
|
||||
run: |
|
||||
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^(ct|install)/.*\.sh$' || true; })
|
||||
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in $CHANGED_FILES; do
|
||||
# Datei "ct/create_lxc.sh" explizit überspringen
|
||||
if [[ "$FILE" == "ct/create_lxc.sh" ]]; then
|
||||
continue
|
||||
fi
|
||||
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
||||
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Non-compliant filenames found, change to lowercase:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: "Validate filenames in json directory."
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-json
|
||||
run: |
|
||||
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^json/.*\.json$' || true; })
|
||||
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in $CHANGED_FILES; do
|
||||
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
||||
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Non-compliant filenames found, change to lowercase:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post results and comment
|
||||
if: always() && steps.check-scripts.outputs.files != '' && steps.check-json.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = "${{ job.status }}" === "success" ? "success" : "failure";
|
||||
const nonCompliantFiles = {
|
||||
script: "${{ steps.check-scripts.outputs.files }}",
|
||||
JSON: "${{ steps.check-json.outputs.files }}",
|
||||
};
|
||||
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: null;
|
||||
const commentIdentifier = "validate-filenames";
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Filename validation\n\n`;
|
||||
|
||||
if (result === "failure") {
|
||||
newCommentBody += ":x: We found issues in the following changed files:\n\n";
|
||||
for (const [check, files] of Object.entries(nonCompliantFiles)) {
|
||||
if (files) {
|
||||
newCommentBody += `**${check.charAt(0).toUpperCase() + check.slice(1)} filename invalid:**\n${files
|
||||
.trim()
|
||||
.split(" ")
|
||||
.map((file) => `- ${file}`)
|
||||
.join("\n")}\n\n`;
|
||||
}
|
||||
}
|
||||
newCommentBody +=
|
||||
"Please change the filenames to lowercase and use only alphanumeric characters and dashes.\n";
|
||||
} else {
|
||||
newCommentBody += `:rocket: All files passed filename validation!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(
|
||||
(comment) => comment.user.login === "github-actions[bot]",
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
if (existingComment.body.includes(commentIdentifier)) {
|
||||
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`, "");
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
} else {
|
||||
newCommentBody = existingComment.body + '\n\n---\n\n' + newCommentBody;
|
||||
}
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody,
|
||||
});
|
||||
}
|
||||
}
|
17
.github/workflows/move-to-main-repo.yaml
vendored
17
.github/workflows/move-to-main-repo.yaml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: app-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ vars.PUSH_MAIN_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_MAIN_APP_SECRET }}
|
||||
@ -133,6 +133,10 @@ jobs:
|
||||
echo "install file already exists in ProxmoxVE"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -f "frontend/public/json/${script_name}.json" ]]; then
|
||||
echo "json file already exists in ProxmoxVE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git checkout -b "$branch_name"
|
||||
|
||||
@ -153,13 +157,10 @@ jobs:
|
||||
cp ../frontend/public/json/$json_file frontend/public/json/. || true
|
||||
fi
|
||||
|
||||
echo $script_name
|
||||
sed -i "s|https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func|https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func|" ct/$script_name.sh
|
||||
sed -i "s|https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func|https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func|" ct/$script_name.sh
|
||||
sed -i "s|# License: MIT \| https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE|# License: MIT \| https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE|" ct/$script_name.sh
|
||||
sed -i "s|# License: MIT \| https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE|# License: MIT \| https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE|" install/$script_name-install.sh
|
||||
|
||||
git add . > /dev/null 2>&1
|
||||
sed -i 's|source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)|source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)|' ct/$script_name.sh
|
||||
sed -i 's|# License: MIT \| https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE|# License: MIT \| https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE|' ct/$script_name.sh
|
||||
sed -i 's|# License: MIT \| https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE|# License: MIT \| https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE|' install/$script_name-install.sh
|
||||
git add .
|
||||
if git diff --cached --exit-code; then
|
||||
echo "No changes detected, skipping commit."
|
||||
exit 0
|
||||
|
40
.github/workflows/push-to-gitea.yml
vendored
40
.github/workflows/push-to-gitea.yml
vendored
@ -11,29 +11,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout source repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set Git identity for actions
|
||||
run: |
|
||||
git config --global user.name "Push From Github"
|
||||
git config --global user.email "actions@github.com"
|
||||
- name: Add Gitea remote
|
||||
run: git remote add gitea https://$GITEA_USER:$GITEA_TOKEN@git.community-scripts.org/community-scripts/ProxmoxVED.git
|
||||
env:
|
||||
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
- name: Pull Gitea changes
|
||||
run: |
|
||||
git fetch gitea
|
||||
git merge --strategy=ours gitea/main
|
||||
env:
|
||||
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
|
||||
- name: Push to Gitea
|
||||
run: git push gitea main --force
|
||||
env:
|
||||
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
- name: Checkout source repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Push to Gitea
|
||||
run: |
|
||||
git config --global user.name "Push From Github"
|
||||
git config --global user.email "actions@github.com"
|
||||
git remote add gitea https://$GITEA_USER:$GITEA_TOKEN@git.community-scripts.org/community-scripts/ProxmoxVED.git
|
||||
git push gitea --mirror
|
||||
env:
|
||||
GITEA_USER: ${{ secrets.GITEA_USERNAME }}
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
|
@ -1,50 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Function for generating Figlet headers
|
||||
generate_headers() {
|
||||
local base_dir=$1
|
||||
local target_subdir=$2
|
||||
local search_pattern=$3
|
||||
# Base directory for headers
|
||||
headers_dir="./ct/headers"
|
||||
|
||||
local headers_dir="${base_dir}/headers"
|
||||
mkdir -p "$headers_dir"
|
||||
rm -f "$headers_dir"/*
|
||||
# Ensure the headers directory exists and clear it
|
||||
mkdir -p "$headers_dir"
|
||||
rm -f "$headers_dir"/*
|
||||
|
||||
# Recursive or non-recursive search
|
||||
if [[ "$search_pattern" == "**" ]]; then
|
||||
shopt -s globstar nullglob
|
||||
file_list=("${base_dir}"/**/*.sh)
|
||||
shopt -u globstar
|
||||
else
|
||||
file_list=("${base_dir}"/*.sh)
|
||||
fi
|
||||
# Find all .sh files in ./ct directory, sorted alphabetically
|
||||
find ./ct -type f -name "*.sh" | sort | while read -r script; do
|
||||
# Extract the APP name from the APP line
|
||||
app_name=$(grep -oP '^APP="\K[^"]+' "$script" 2>/dev/null)
|
||||
|
||||
for script in "${file_list[@]}"; do
|
||||
[[ -f "$script" ]] || continue
|
||||
if [[ -n "$app_name" ]]; then
|
||||
# Define the output file name in the headers directory
|
||||
output_file="${headers_dir}/$(basename "${script%.*}")"
|
||||
|
||||
app_name=$(grep -oP '^APP="\K[^"]+' "$script" 2>/dev/null)
|
||||
if [[ -n "$app_name" ]]; then
|
||||
output_file="${headers_dir}/$(basename "${script%.*}")"
|
||||
figlet_output=$(figlet -w 500 -f slant "$app_name")
|
||||
if [[ -n "$figlet_output" ]]; then
|
||||
echo "$figlet_output" >"$output_file"
|
||||
echo "Generated: $output_file"
|
||||
else
|
||||
echo "Figlet failed for $app_name in $script"
|
||||
fi
|
||||
# Generate figlet output
|
||||
figlet_output=$(figlet -w 500 -f slant "$app_name")
|
||||
|
||||
# Check if figlet output is not empty
|
||||
if [[ -n "$figlet_output" ]]; then
|
||||
echo "$figlet_output" > "$output_file"
|
||||
echo "Generated: $output_file"
|
||||
else
|
||||
echo "No APP name found in $script, skipping."
|
||||
echo "Figlet failed for $app_name in $script"
|
||||
fi
|
||||
done
|
||||
}
|
||||
else
|
||||
echo "No APP name found in $script, skipping."
|
||||
fi
|
||||
done
|
||||
|
||||
# ct
|
||||
generate_headers "./ct" "headers" "*"
|
||||
|
||||
# tools (addon, pve, ...)
|
||||
generate_headers "./tools" "headers" "**"
|
||||
|
||||
# vm
|
||||
generate_headers "./vm" "headers" "*"
|
||||
|
||||
echo "Completed processing all sections."
|
||||
echo "Completed processing .sh files."
|
||||
|
77
CHANGELOG.md
77
CHANGELOG.md
@ -7,76 +7,6 @@
|
||||
|
||||
<h3 align="center">All notable changes to this project will be documented in this file.</h3>
|
||||
|
||||
## 2025-05-14
|
||||
|
||||
### 🆕 New Scripts
|
||||
|
||||
- odoo ([#4477](https://github.com/community-scripts/ProxmoxVE/pull/4477))
|
||||
- alpine-transmission ([#4277](https://github.com/community-scripts/ProxmoxVE/pull/4277))
|
||||
- alpine-tinyauth ([#4264](https://github.com/community-scripts/ProxmoxVE/pull/4264))
|
||||
- alpine-rclone ([#4265](https://github.com/community-scripts/ProxmoxVE/pull/4265))
|
||||
- streamlink-webui ([#4262](https://github.com/community-scripts/ProxmoxVE/pull/4262))
|
||||
- Fumadocs ([#4263](https://github.com/community-scripts/ProxmoxVE/pull/4263))
|
||||
- asterisk ([#4468](https://github.com/community-scripts/ProxmoxVE/pull/4468))
|
||||
- gatus ([#4443](https://github.com/community-scripts/ProxmoxVE/pull/4443))
|
||||
- alpine-gatus ([#4442](https://github.com/community-scripts/ProxmoxVE/pull/4442))
|
||||
- Alpine-Traefik [@MickLesk](https://github.com/MickLesk) ([#4412](https://github.com/community-scripts/ProxmoxVE/pull/4412))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
- fix: fetch_release_and_deploy function [@CrazyWolf13](https://github.com/CrazyWolf13) ([#4478](https://github.com/community-scripts/ProxmoxVE/pull/4478))
|
||||
- Website: re-add documenso & some little bugfixes [@MickLesk](https://github.com/MickLesk) ([#4456](https://github.com/community-scripts/ProxmoxVE/pull/4456))
|
||||
- update some improvements from dev (tools.func) [@MickLesk](https://github.com/MickLesk) ([#4430](https://github.com/community-scripts/ProxmoxVE/pull/4430))
|
||||
- Alpine: Use onliner for updates [@tremor021](https://github.com/tremor021) ([#4414](https://github.com/community-scripts/ProxmoxVE/pull/4414))
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
- Bugfix: Mikrotik & Pimox HAOS VM (NEXTID) [@MickLesk](https://github.com/MickLesk) ([#4313](https://github.com/community-scripts/ProxmoxVE/pull/4313))
|
||||
- Bookstack: fix copy of themes/uploads/storage [@MickLesk](https://github.com/MickLesk) ([#4457](https://github.com/community-scripts/ProxmoxVE/pull/4457))
|
||||
- homarr: fetch versions dynamically from source repo [@CrazyWolf13](https://github.com/CrazyWolf13) ([#4409](https://github.com/community-scripts/ProxmoxVE/pull/4409))
|
||||
- Authentik: change install to UV & increase resources to 10GB RAM [@MickLesk](https://github.com/MickLesk) ([#4364](https://github.com/community-scripts/ProxmoxVE/pull/4364))
|
||||
- Jellyseerr: better handling of node and pnpm [@MickLesk](https://github.com/MickLesk) ([#4365](https://github.com/community-scripts/ProxmoxVE/pull/4365))
|
||||
- Alpine-Rclone: Fix location of passwords file [@tremor021](https://github.com/tremor021) ([#4465](https://github.com/community-scripts/ProxmoxVE/pull/4465))
|
||||
- Zammad: Enable ElasticSearch service [@tremor021](https://github.com/tremor021) ([#4391](https://github.com/community-scripts/ProxmoxVE/pull/4391))
|
||||
- openhab: use zulu17-jdk [@moodyblue](https://github.com/moodyblue) ([#4438](https://github.com/community-scripts/ProxmoxVE/pull/4438))
|
||||
- (fix) Documenso: fix build failures [@vhsdream](https://github.com/vhsdream) ([#4382](https://github.com/community-scripts/ProxmoxVE/pull/4382))
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- Feature: LXC-Delete (pve helper): add "all items" [@MickLesk](https://github.com/MickLesk) ([#4296](https://github.com/community-scripts/ProxmoxVE/pull/4296))
|
||||
- Feature: get correct next VMID [@MickLesk](https://github.com/MickLesk) ([#4292](https://github.com/community-scripts/ProxmoxVE/pull/4292))
|
||||
- HomeAssistant-Core: update script for 2025.5+ [@MickLesk](https://github.com/MickLesk) ([#4363](https://github.com/community-scripts/ProxmoxVE/pull/4363))
|
||||
- Feature: autologin for Alpine [@MickLesk](https://github.com/MickLesk) ([#4344](https://github.com/community-scripts/ProxmoxVE/pull/4344))
|
||||
- monitor-all: improvements - tag based filtering [@grizmin](https://github.com/grizmin) ([#4437](https://github.com/community-scripts/ProxmoxVE/pull/4437))
|
||||
- Make apt-cacher-ng a client of its own server [@pgcudahy](https://github.com/pgcudahy) ([#4092](https://github.com/community-scripts/ProxmoxVE/pull/4092))
|
||||
|
||||
- #### 🔧 Refactor
|
||||
|
||||
- openhab. correct some typos [@moodyblue](https://github.com/moodyblue) ([#4448](https://github.com/community-scripts/ProxmoxVE/pull/4448))
|
||||
|
||||
### 🧰 Maintenance
|
||||
|
||||
- #### 💾 Core
|
||||
|
||||
- fix: improve bridge detection in all network interface configuration files [@filippolauria](https://github.com/filippolauria) ([#4413](https://github.com/community-scripts/ProxmoxVE/pull/4413))
|
||||
- Config file Function in build.func [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#4411](https://github.com/community-scripts/ProxmoxVE/pull/4411))
|
||||
- fix: detect all bridge types, not just vmbr prefix [@filippolauria](https://github.com/filippolauria) ([#4351](https://github.com/community-scripts/ProxmoxVE/pull/4351))
|
||||
|
||||
- #### 📂 Github
|
||||
|
||||
- Add Github app for auto PR merge [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#4461](https://github.com/community-scripts/ProxmoxVE/pull/4461))
|
||||
|
||||
### 🌐 Website
|
||||
|
||||
- FAQ: Explanation "updatable" [@tremor021](https://github.com/tremor021) ([#4300](https://github.com/community-scripts/ProxmoxVE/pull/4300))
|
||||
|
||||
- #### 📝 Script Information
|
||||
|
||||
- Jellyfin Media Server: Update configuration path [@tremor021](https://github.com/tremor021) ([#4434](https://github.com/community-scripts/ProxmoxVE/pull/4434))
|
||||
- Pingvin Share: Added explanation on how to add/edit environment variables [@tremor021](https://github.com/tremor021) ([#4432](https://github.com/community-scripts/ProxmoxVE/pull/4432))
|
||||
- pingvin.json: fix typo [@warmbo](https://github.com/warmbo) ([#4426](https://github.com/community-scripts/ProxmoxVE/pull/4426))
|
||||
- Navidrome - Fix config path (use /etc/ instead of /var/lib) [@quake1508](https://github.com/quake1508) ([#4406](https://github.com/community-scripts/ProxmoxVE/pull/4406))
|
||||
|
||||
## 2025-03-24
|
||||
|
||||
### 🆕 New Scripts
|
||||
@ -85,7 +15,7 @@
|
||||
- wazuh [@omiinaya](https://github.com/omiinaya) ([#3381](https://github.com/community-scripts/ProxmoxVE/pull/3381))
|
||||
- yt-dlp-webui [@CrazyWolf13](https://github.com/CrazyWolf13) ([#3364](https://github.com/community-scripts/ProxmoxVE/pull/3364))
|
||||
- Extension/New Script: Redis Alpine Installation [@MickLesk](https://github.com/MickLesk) ([#3367](https://github.com/community-scripts/ProxmoxVE/pull/3367))
|
||||
- Fluid Calendar [@vhsdream](https://github.com/vhsdream) ([#2869](ht
|
||||
- Fluid Calendar [@vhsdream](https://github.com/vhsdream) ([#2869](https://github.com/community-scripts/ProxmoxVE/pull/2869))
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
@ -105,7 +35,7 @@
|
||||
- GoMFT: Fix build dependencies [@tremor021](https://github.com/tremor021) ([#3313](https://github.com/community-scripts/ProxmoxVE/pull/3313))
|
||||
- GoMFT: Don't rely on binaries from github [@tremor021](https://github.com/tremor021) ([#3303](https://github.com/community-scripts/ProxmoxVE/pull/3303))
|
||||
- Wikijs: Remove Dev Message & Performance-Boost [@bvdberg01](https://github.com/bvdberg01) ([#3232](https://github.com/community-scripts/ProxmoxVE/pull/3232))
|
||||
- Update omada download url [@bvdberg01](https://github.com/bvdberg01) ([#3245](https://github.cooxVE/pull/3245))
|
||||
- Update omada download url [@bvdberg01](https://github.com/bvdberg01) ([#3245](https://github.com/community-scripts/ProxmoxVE/pull/3245))
|
||||
- TriliumNotes: Fix release handling [@tremor021](https://github.com/tremor021) ([#3160](https://github.com/community-scripts/ProxmoxVE/pull/3160))
|
||||
|
||||
- #### ✨ New Features
|
||||
@ -126,6 +56,7 @@
|
||||
|
||||
- #### ✨ New Features
|
||||
|
||||
- [core] add gitignore to prevent big pulls [@MickLesk](https://github.com/MickLesk) ([#3278](https://github.com/community-scripts/ProxmoxVE/pull/3278))
|
||||
- [core] install core deps (debian / ubuntu) [@MickLesk](https://github.com/MickLesk) ([#3366](https://github.com/community-scripts/ProxmoxVE/pull/3366))
|
||||
|
||||
- #### 💾 Core
|
||||
@ -141,7 +72,7 @@
|
||||
### 🌐 Website
|
||||
|
||||
- Update siteConfig.tsx to use new analytics code [@BramSuurdje](https://github.com/BramSuurdje) ([#3389](https://github.com/community-scripts/ProxmoxVE/pull/3389))
|
||||
- Bump next from 15.1.3 to 15.2.16](https://github.com/community-scripE/pull/3316))
|
||||
- Bump next from 15.1.3 to 15.2.3 in /frontend [@dependabot[bot]](https://github.com/dependabot[bot]) ([#3316](https://github.com/community-scripts/ProxmoxVE/pull/3316))
|
||||
|
||||
- #### 🐞 Bug Fixes
|
||||
|
||||
|
88
ct/alpine-bitmagnet.sh
Normal file
88
ct/alpine-bitmagnet.sh
Normal file
@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: Slaviša Arežina (tremor021)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/bitmagnet-io/bitmagnet
|
||||
|
||||
APP="Alpine-bitmagnet"
|
||||
var_tags="${var_tags:-alpine;torrent}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-256}"
|
||||
var_disk="${var_disk:-3}"
|
||||
var_os="${var_os:-alpine}"
|
||||
var_version="${var_version:-3.21}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
|
||||
if [[ ! -d /opt/bitmagnet ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit 1
|
||||
fi
|
||||
RELEASE=$(curl -s https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [ "${RELEASE}" != "$(cat /opt/bitmagnet_version.txt)" ] || [ ! -f /opt/bitmagnet_version.txt ]; then
|
||||
msg_info "Backing up database"
|
||||
rm -f /tmp/backup.sql
|
||||
$STD sudo -u postgres pg_dump \
|
||||
--column-inserts \
|
||||
--data-only \
|
||||
--on-conflict-do-nothing \
|
||||
--rows-per-insert=1000 \
|
||||
--table=metadata_sources \
|
||||
--table=content \
|
||||
--table=content_attributes \
|
||||
--table=content_collections \
|
||||
--table=content_collections_content \
|
||||
--table=torrent_sources \
|
||||
--table=torrents \
|
||||
--table=torrent_files \
|
||||
--table=torrent_hints \
|
||||
--table=torrent_contents \
|
||||
--table=torrent_tags \
|
||||
--table=torrents_torrent_sources \
|
||||
--table=key_values \
|
||||
bitmagnet \
|
||||
>/tmp/backup.sql
|
||||
mv /tmp/backup.sql /opt/
|
||||
msg_ok "Database backed up"
|
||||
|
||||
msg_info "Updating ${APP} from $(cat /opt/bitmagnet_version.txt) to ${RELEASE}"
|
||||
$STD apk -U upgrade
|
||||
$STD service bitmagnet stop
|
||||
[ -f /opt/bitmagnet/.env ] && cp /opt/bitmagnet/.env /opt/
|
||||
[ -f /opt/bitmagnet/config.yml ] && cp /opt/bitmagnet/config.yml /opt/
|
||||
rm -rf /opt/bitmagnet/*
|
||||
temp_file=$(mktemp)
|
||||
curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
||||
tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
|
||||
cd /opt/bitmagnet
|
||||
$STD go build
|
||||
chmod +x bitmagnet
|
||||
[ -f "/opt/.env" ] && cp "/opt/.env" /opt/bitmagnet/
|
||||
[ -f "/opt/config.yml" ] && cp "/opt/config.yml" /opt/bitmagnet/
|
||||
rm -f "$temp_file"
|
||||
echo "${RELEASE}" >/opt/bitmagnet_version.txt
|
||||
$STD service bitmagnet start
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following IP:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3333${CL}"
|
46
ct/alpine-syncthing.sh
Normal file
46
ct/alpine-syncthing.sh
Normal file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
|
||||
# Source: https://syncthing.net/
|
||||
|
||||
APP="Alpine-Syncthing"
|
||||
var_tags="${var_tags:-alpine;networking}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-256}"
|
||||
var_disk="${var_disk:-1}"
|
||||
var_os="${var_os:-alpine}"
|
||||
var_version="${var_version:-3.21}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
msg_info "Updating Alpine Packages"
|
||||
$STD apk update
|
||||
$STD apk upgrade
|
||||
msg_ok "Updated Alpine Packages"
|
||||
|
||||
msg_info "Updating Syncthing"
|
||||
$STD $STD apk upgrade syncthing
|
||||
msg_ok "Updated Syncthing"
|
||||
|
||||
msg_info "Restarting Syncthing"
|
||||
$STD rc-service syncthing restart
|
||||
msg_ok "Restarted Syncthing"
|
||||
|
||||
exit 1
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8384${CL}"
|
28
ct/alpine.sh
28
ct/alpine.sh
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
|
||||
@ -9,9 +9,9 @@ APP="Alpine"
|
||||
var_tags="${var_tags:-os;alpine}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-512}"
|
||||
var_disk="${var_disk:-1}"
|
||||
var_disk="${var_disk:-0.1}"
|
||||
var_os="${var_os:-alpine}"
|
||||
var_version="${var_version:-3.22}"
|
||||
var_version="${var_version:-3.21}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
@ -20,18 +20,18 @@ color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
#check_container_storage
|
||||
#check_container_resources
|
||||
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 1 \
|
||||
"1" "Check for Alpine Updates" ON \
|
||||
3>&1 1>&2 2>&3)
|
||||
header_info
|
||||
#check_container_storage
|
||||
#check_container_resources
|
||||
UPD=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "SUPPORT" --radiolist --cancel-button Exit-Script "Spacebar = Select" 11 58 1 \
|
||||
"1" "Check for Alpine Updates" ON \
|
||||
3>&1 1>&2 2>&3)
|
||||
|
||||
header_info
|
||||
if [ "$UPD" == "1" ]; then
|
||||
apk update && apk upgrade
|
||||
exit
|
||||
fi
|
||||
header_info
|
||||
if [ "$UPD" == "1" ]; then
|
||||
apk update && apk upgrade
|
||||
exit
|
||||
fi
|
||||
}
|
||||
|
||||
start
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (Canbiz)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
39
ct/asterisk.sh
Normal file
39
ct/asterisk.sh
Normal file
@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: michelroegl-brunner
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://asterisk.org/
|
||||
|
||||
APP="Asterisk"
|
||||
var_tags="${var_tags:-os}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /var ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_info "No Update function provided for ${APP} LXC"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
91
ct/authentik.sh
Normal file
91
ct/authentik.sh
Normal file
@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: remz1337
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://goauthentik.io/
|
||||
|
||||
APP="Authentik"
|
||||
var_tags="${var_tags:-identity-provider}"
|
||||
var_disk="${var_disk:-12}"
|
||||
var_cpu="${var_cpu:-6}"
|
||||
var_ram="${var_ram:-10240}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -f /etc/systemd/system/authentik-server.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/goauthentik/authentik/releases/latest | grep "tarball_url" | awk '{print substr($2, 2, length($2)-3)}')
|
||||
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||
NODE_VERSION="22"
|
||||
PG_VERSION="16"
|
||||
setup_uv
|
||||
install_postgresql
|
||||
install_node_and_modules
|
||||
install_go
|
||||
|
||||
msg_info "Stopping ${APP}"
|
||||
systemctl stop authentik-server
|
||||
systemctl stop authentik-worker
|
||||
msg_ok "Stopped ${APP}"
|
||||
|
||||
msg_info "Building ${APP} website"
|
||||
mkdir -p /opt/authentik
|
||||
curl -fsSL "${RELEASE}" -o "authentik.tar.gz"
|
||||
tar -xzf authentik.tar.gz -C /opt/authentik --strip-components 1 --overwrite
|
||||
rm -rf authentik.tar.gz
|
||||
cd /opt/authentik/website
|
||||
$STD npm install
|
||||
$STD npm run build-bundled
|
||||
cd /opt/authentik/web
|
||||
$STD npm install
|
||||
$STD npm run build
|
||||
msg_ok "Built ${APP} website"
|
||||
|
||||
msg_info "Building ${APP} server"
|
||||
cd /opt/authentik
|
||||
go mod download
|
||||
go build -o /go/authentik ./cmd/server
|
||||
go build -o /opt/authentik/authentik-server /opt/authentik/cmd/server/
|
||||
msg_ok "Built ${APP} server"
|
||||
|
||||
msg_info "Building Authentik"
|
||||
cd /opt/authentik
|
||||
$STD uv sync --frozen --no-install-project --no-dev
|
||||
uv run python -m lifecycle.migrate
|
||||
ln -s /opt/authentik/.venv/bin/gunicorn /usr/local/bin/gunicorn
|
||||
ln -s /opt/authentik/.venv/bin/celery /usr/local/bin/celery
|
||||
msg_ok "Authentik built"
|
||||
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated ${APP} to v${RELEASE}"
|
||||
|
||||
msg_info "Starting ${APP}"
|
||||
systemctl start authentik-server
|
||||
systemctl start authentik-worker
|
||||
msg_ok "Started ${APP}"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9000/if/flow/initial-setup/${CL}"
|
@ -1,84 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: vhsdream
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/gelbphoenix/autocaliweb
|
||||
|
||||
APP="Autocaliweb"
|
||||
var_tags="${var_tags:-ebooks}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-6}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/autocaliweb ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
setup_uv
|
||||
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/gelbphoenix/autocaliweb/releases/latest | jq '.tag_name' | sed 's/^"v//;s/"$//')
|
||||
if [[ "${RELEASE}" != "$(cat ~/.autocaliweb 2>/dev/null)" ]] || [[ ! -f ~/.autocaliweb ]]; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop autocaliweb metadata-change-detector acw-ingest-service acw-auto-zipper
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
INSTALL_DIR="/opt/autocaliweb"
|
||||
export VIRTUAL_ENV="${INSTALL_DIR}/venv"
|
||||
$STD tar -cf ~/autocaliweb_bkp.tar "$INSTALL_DIR"/{metadata_change_logs,dirs.json,.env,scripts/ingest_watcher.sh,scripts/auto_zipper_wrapper.sh,scripts/metadata_change_detector_wrapper.sh}
|
||||
fetch_and_deploy_gh_release "autocaliweb" "gelbphoenix/autocaliweb" "tarball" "latest" "/opt/autocaliweb"
|
||||
msg_info "Updating ${APP}"
|
||||
cd "$INSTALL_DIR"
|
||||
if [[ ! -d "$VIRTUAL_ENV" ]]; then
|
||||
$STD uv venv "$VIRTUAL_ENV"
|
||||
fi
|
||||
$STD uv sync --all-extras --active
|
||||
cd "$INSTALL_DIR"/koreader/plugins
|
||||
PLUGIN_DIGEST="$(find acwsync.koplugin -type f -name "*.lua" -o -name "*.json" | sort | xargs sha256sum | sha256sum | cut -d' ' -f1)"
|
||||
echo "Plugin files digest: $PLUGIN_DIGEST" >acwsync.koplugin/${PLUGIN_DIGEST}.digest
|
||||
echo "Build date: $(date)" >>acwsync.koplugin/${PLUGIN_DIGEST}.digest
|
||||
echo "Files included:" >>acwsync.koplugin/${PLUGIN_DIGEST}.digest
|
||||
$STD zip -r koplugin.zip acwsync.koplugin/
|
||||
cp -r koplugin.zip "$INSTALL_DIR"/cps/static
|
||||
mkdir -p "$INSTALL_DIR"/metadata_temp
|
||||
$STD tar -xf ~/autocaliweb_bkp.tar --directory /
|
||||
KEPUB_VERSION="$(/usr/bin/kepubify --version)"
|
||||
CALIBRE_RELEASE="$(curl -s https://api.github.com/repos/kovidgoyal/calibre/releases/latest | grep -o '"tag_name": "[^"]*' | cut -d'"' -f4)"
|
||||
echo "${KEPUB_VERSION#v}" >"$INSTALL_DIR"/KEPUBIFY_RELEASE
|
||||
echo "${CALIBRE_RELEASE#v}" >/"$INSTALL_DIR"/CALIBRE_RELEASE
|
||||
sed 's/^/v/' ~/.autocaliweb >"$INSTALL_DIR"/ACW_RELEASE
|
||||
chown -R acw:acw "$INSTALL_DIR"
|
||||
msg_ok "Updated $APP"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start autocaliweb metadata-change-detector acw-ingest-service acw-auto-zipper
|
||||
msg_ok "Started Services"
|
||||
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "Already up to date"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8083${CL}"
|
@ -1,17 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (Canbiz)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source:
|
||||
|
||||
APP="Pixelfed"
|
||||
var_tags="${var_tags:-pictures}"
|
||||
var_disk="${var_disk:-7}"
|
||||
APP="BabyBuddy"
|
||||
var_tags="${var_tags:-baby}"
|
||||
var_disk="${var_disk:-5}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
@ -22,18 +23,16 @@ function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/pixelfed ]]; then
|
||||
if [[ ! -d /opt/maxun ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/xxxx/xxxx/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
RELEASE=$(curl -s https://api.github.com/repos/xxxxx/xxxxx/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Updating ${APP} to ${RELEASE}"
|
||||
cd /opt
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop APP
|
||||
msg_ok "Services Stopped"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
@ -41,5 +40,6 @@ build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${APP} Setup should be reachable by going to the following URL.
|
||||
${BL}http://${IP}:8000${CL} \n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
|
136
ct/bar-assistant.sh
Normal file
136
ct/bar-assistant.sh
Normal file
@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: bvdberg01
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/karlomikus/bar-assistant
|
||||
# Source: https://github.com/karlomikus/vue-salt-rim
|
||||
# Source: https://www.meilisearch.com/
|
||||
|
||||
APP="Bar-Assistant"
|
||||
var_tags="${var_tags:-inventory;drinks}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/bar-assistant ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE_MEILISEARCH=$(curl -s https://api.github.com/repos/meilisearch/meilisearch/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
RELEASE_BARASSISTANT=$(curl -s https://api.github.com/repos/karlomikus/bar-assistant/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
RELEASE_SALTRIM=$(curl -s https://api.github.com/repos/karlomikus/vue-salt-rim/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE_BARASSISTANT}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop nginx
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Updating ${APP} to v${RELEASE_BARASSISTANT}"
|
||||
cd /opt
|
||||
mv /opt/bar-assistant /opt/bar-assistant-backup
|
||||
curl -fsSL "https://github.com/karlomikus/bar-assistant/archive/refs/tags/v${RELEASE_BARASSISTANT}.zip" -o barassistant.zip
|
||||
unzip -q barassistant.zip
|
||||
mv /opt/bar-assistant-${RELEASE_BARASSISTANT}/ /opt/bar-assistant
|
||||
cp /opt/bar-assistant-backup/.env /opt/bar-assistant/.env
|
||||
cp /opt/bar-assistant-backup/storage/bar-assistant /opt/bar-assistant/storage/bar-assistant
|
||||
cd /opt/bar-assistant
|
||||
composer install
|
||||
php artisan migrate --force
|
||||
php artisan storage:link
|
||||
php artisan bar:setup-meilisearch
|
||||
php artisan scout:sync-index-settings
|
||||
php artisan config:cache
|
||||
php artisan route:cache
|
||||
php artisan event:cache
|
||||
echo "${RELEASE_BARASSISTANT}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated $APP to v${RELEASE_BARASSISTANT}"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start service nginx
|
||||
msg_ok "Started Service"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf /opt/barassistant.zip
|
||||
rm -rf /opt/bar-assistant-backup
|
||||
msg_ok "Cleaned"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE_BARASSISTANT}"
|
||||
fi
|
||||
|
||||
if [[ ! -f /opt/vue-salt-rim_version.txt ]] || [[ "${RELEASE_SALTRIM}" != "$(cat /opt/vue-salt-rim_version.txt)" ]]; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop nginx
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Updating Salt Rim to v${RELEASE_SALTRIM}"
|
||||
cd /opt
|
||||
mv /opt/vue-salt-rim /opt/vue-salt-rim-backup
|
||||
curl -fsSL "https://github.com/karlomikus/vue-salt-rim/archive/refs/tags/v${RELEASE_SALTRIM}.zip" -o saltrim.zip
|
||||
unzip -q saltrim.zip
|
||||
mv /opt/vue-salt-rim-${RELEASE_SALTRIM}/ /opt/vue-salt-rim
|
||||
cp /opt/vue-salt-rim-backup/public/config.js /opt/vue-salt-rim/public/config.js
|
||||
cd /opt/vue-salt-rim
|
||||
npm run build
|
||||
echo "${RELEASE_SALTRIM}" >/opt/vue-salt-rim_version.txt
|
||||
msg_ok "Updated $APP to v${RELEASE_SALTRIM}"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start service nginx
|
||||
msg_ok "Started Service"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf /opt/saltrim.zip
|
||||
rm -rf /opt/vue-salt-rim-backup
|
||||
msg_ok "Cleaned"
|
||||
else
|
||||
msg_ok "No update required. Salt Rim is already at v${RELEASE_SALTRIM}"
|
||||
fi
|
||||
|
||||
if [[ ! -f /opt/meilisearch_version.txt ]] || [[ "${RELEASE_MEILISEARCH}" != "$(cat /opt/meilisearch_version.txt)" ]]; then
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop meilisearch
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
msg_info "Updating Meilisearch to ${RELEASE_MEILISEARCH}"
|
||||
cd /opt
|
||||
RELEASE=$(curl -s https://api.github.com/repos/meilisearch/meilisearch/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
curl -fsSL https://github.com/meilisearch/meilisearch/releases/latest/download/meilisearch.deb -o meilisearch.deb
|
||||
$STD dpkg -i meilisearch.deb
|
||||
echo "${RELEASE_MEILISEARCH}" >/opt/meilisearch_version.txt
|
||||
msg_ok "Updated Meilisearch to v${RELEASE_MEILISEARCH}"
|
||||
|
||||
msg_info "Starting Service"
|
||||
systemctl start meilisearch
|
||||
msg_ok "Started Service"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf "/opt/meilisearch.deb"
|
||||
msg_ok "Cleaned"
|
||||
msg_ok "Updated Meilisearch"
|
||||
else
|
||||
msg_ok "No update required. Meilisearch is already at ${RELEASE_MEILISEARCH}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
75
ct/bluecherry.sh
Normal file
75
ct/bluecherry.sh
Normal file
@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: Slaviša Arežina (tremor021)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/TwiN/gatus
|
||||
|
||||
APP="bluecherry"
|
||||
var_tags="${var_tags:-video;dvr}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-15}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/gatus ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||
msg_info "Updating $APP"
|
||||
|
||||
msg_info "Stopping $APP"
|
||||
systemctl stop gatus
|
||||
msg_ok "Stopped $APP"
|
||||
|
||||
msg_info "Updating $APP to v${RELEASE}"
|
||||
mv /opt/gatus/config/config.yaml /opt
|
||||
rm -rf /opt/gatus/*
|
||||
temp_file=$(mktemp)
|
||||
curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
||||
tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
|
||||
cd /opt/gatus
|
||||
$STD go mod tidy
|
||||
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
|
||||
setcap CAP_NET_RAW+ep gatus
|
||||
mv /opt/config.yaml config
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated $APP to v${RELEASE}"
|
||||
|
||||
msg_info "Starting $APP"
|
||||
systemctl start gatus
|
||||
msg_ok "Started $APP"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -f "$temp_file"
|
||||
msg_ok "Cleanup Completed"
|
||||
|
||||
msg_ok "Update Successful"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
|
76
ct/bookstack.sh
Normal file
76
ct/bookstack.sh
Normal file
@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (Canbiz)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/BookStackApp/BookStack
|
||||
|
||||
APP="Bookstack"
|
||||
var_tags="${var_tags:-organizer}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/bookstack ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Stopping Apache2"
|
||||
systemctl stop apache2
|
||||
msg_ok "Services Stopped"
|
||||
|
||||
msg_info "Updating ${APP} to v${RELEASE}"
|
||||
mv /opt/bookstack /opt/bookstack-backup
|
||||
fetch_and_deploy_gh_release BookstackApp/BookStack
|
||||
cp /opt/bookstack-backup/.env /opt/bookstack/.env
|
||||
cp -r /opt/bookstack-backup/public/uploads/* /opt/bookstack/public/uploads/ || true
|
||||
cp -r /opt/bookstack-backup/storage/uploads/* /opt/bookstack/storage/uploads/ || true
|
||||
cp -r /opt/bookstack-backup/themes/* /opt/bookstack/themes/ || true
|
||||
cd /opt/bookstack
|
||||
export COMPOSER_ALLOW_SUPERUSER=1
|
||||
$STD composer install --no-dev
|
||||
$STD php artisan migrate --force
|
||||
chown www-data:www-data -R /opt/bookstack /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads /opt/bookstack/storage
|
||||
chmod -R 755 /opt/bookstack /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads /opt/bookstack/storage
|
||||
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
|
||||
chmod -R 640 /opt/bookstack/.env
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated ${APP} to v${RELEASE}"
|
||||
|
||||
msg_info "Starting Apache2"
|
||||
systemctl start apache2
|
||||
msg_ok "Started Apache2"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/bookstack-backup
|
||||
rm -rf "/opt/BookStack-${RELEASE}.zip"
|
||||
msg_ok "Cleaned"
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"
|
@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
|
||||
# Source: https://github.com/rybbit-io/rybbit
|
||||
# Author: edoardop13
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/favonia/cloudflare-ddns
|
||||
|
||||
APP="Rybbit"
|
||||
var_tags="${var_tags:-analytics}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-5}"
|
||||
APP="Cloudflare-DDNS"
|
||||
var_tags="${var_tags:-network}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-512}"
|
||||
var_disk="${var_disk:-3}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
@ -23,20 +23,15 @@ function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /var ]]; then
|
||||
if [[ ! -f /etc/systemd/system/cloudflare-ddns.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_info "Updating $APP LXC"
|
||||
$STD apt-get update
|
||||
$STD apt-get -y upgrade
|
||||
msg_ok "Updated $APP LXC"
|
||||
msg_error "There is no update function for ${APP}."
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!"
|
||||
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"
|
||||
msg_ok "Completed Successfully!\n"
|
248
ct/create_lxc.sh
Normal file
248
ct/create_lxc.sh
Normal file
@ -0,0 +1,248 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# Co-Author: MickLesk
|
||||
# License: MIT
|
||||
# https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
|
||||
# This sets verbose mode if the global variable is set to "yes"
|
||||
# if [ "$VERBOSE" == "yes" ]; then set -x; fi
|
||||
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func)
|
||||
load_functions
|
||||
#echo "(create-lxc.sh) Loaded core.func via curl"
|
||||
elif command -v wget >/dev/null 2>&1; then
|
||||
source <(wget -qO- https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/core.func)
|
||||
load_functions
|
||||
#echo "(create-lxc.sh) Loaded core.func via wget"
|
||||
fi
|
||||
|
||||
# This sets error handling options and defines the error_handler function to handle errors
|
||||
set -Eeuo pipefail
|
||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||
|
||||
# This function handles errors
|
||||
function error_handler() {
|
||||
printf "\e[?25h"
|
||||
local exit_code="$?"
|
||||
local line_number="$1"
|
||||
local command="$2"
|
||||
local error_message="${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}"
|
||||
echo -e "\n$error_message\n"
|
||||
exit 200
|
||||
}
|
||||
|
||||
# This checks for the presence of valid Container Storage and Template Storage locations
|
||||
msg_info "Validating Storage"
|
||||
VALIDCT=$(pvesm status -content rootdir | awk 'NR>1')
|
||||
if [ -z "$VALIDCT" ]; then
|
||||
msg_error "Unable to detect a valid Container Storage location."
|
||||
exit 1
|
||||
fi
|
||||
VALIDTMP=$(pvesm status -content vztmpl | awk 'NR>1')
|
||||
if [ -z "$VALIDTMP" ]; then
|
||||
msg_error "Unable to detect a valid Template Storage location."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This function is used to select the storage class and determine the corresponding storage content type and label.
|
||||
function select_storage() {
|
||||
local CLASS=$1
|
||||
local CONTENT
|
||||
local CONTENT_LABEL
|
||||
case $CLASS in
|
||||
container)
|
||||
CONTENT='rootdir'
|
||||
CONTENT_LABEL='Container'
|
||||
;;
|
||||
template)
|
||||
CONTENT='vztmpl'
|
||||
CONTENT_LABEL='Container template'
|
||||
;;
|
||||
*) false || {
|
||||
msg_error "Invalid storage class."
|
||||
exit 201
|
||||
} ;;
|
||||
esac
|
||||
|
||||
# This Queries all storage locations
|
||||
local -a MENU
|
||||
while read -r line; do
|
||||
local TAG=$(echo $line | awk '{print $1}')
|
||||
local TYPE=$(echo $line | awk '{printf "%-10s", $2}')
|
||||
local FREE=$(echo $line | numfmt --field 4-6 --from-unit=K --to=iec --format %.2f | awk '{printf( "%9sB", $6)}')
|
||||
local ITEM="Type: $TYPE Free: $FREE "
|
||||
local OFFSET=2
|
||||
if [[ $((${#ITEM} + $OFFSET)) -gt ${MSG_MAX_LENGTH:-} ]]; then
|
||||
local MSG_MAX_LENGTH=$((${#ITEM} + $OFFSET))
|
||||
fi
|
||||
MENU+=("$TAG" "$ITEM" "OFF")
|
||||
done < <(pvesm status -content $CONTENT | awk 'NR>1')
|
||||
|
||||
# Select storage location
|
||||
if [ $((${#MENU[@]} / 3)) -eq 1 ]; then
|
||||
printf ${MENU[0]}
|
||||
else
|
||||
local STORAGE
|
||||
while [ -z "${STORAGE:+x}" ]; do
|
||||
STORAGE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Storage Pools" --radiolist \
|
||||
"Which storage pool you would like to use for the ${CONTENT_LABEL,,}?\nTo make a selection, use the Spacebar.\n" \
|
||||
16 $(($MSG_MAX_LENGTH + 23)) 6 \
|
||||
"${MENU[@]}" 3>&1 1>&2 2>&3) || {
|
||||
msg_error "Menu aborted."
|
||||
exit 202
|
||||
}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "${CROSS}${RD} Menu aborted by user.${CL}"
|
||||
exit 0
|
||||
fi
|
||||
done
|
||||
printf "%s" "$STORAGE"
|
||||
fi
|
||||
}
|
||||
# Test if required variables are set
|
||||
[[ "${CTID:-}" ]] || {
|
||||
msg_error "You need to set 'CTID' variable."
|
||||
exit 203
|
||||
}
|
||||
[[ "${PCT_OSTYPE:-}" ]] || {
|
||||
msg_error "You need to set 'PCT_OSTYPE' variable."
|
||||
exit 204
|
||||
}
|
||||
|
||||
# Test if ID is valid
|
||||
[ "$CTID" -ge "100" ] || {
|
||||
msg_error "ID cannot be less than 100."
|
||||
exit 205
|
||||
}
|
||||
|
||||
# Test if ID is in use
|
||||
if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then
|
||||
echo -e "ID '$CTID' is already in use."
|
||||
unset CTID
|
||||
msg_error "Cannot use ID that is already in use."
|
||||
exit 206
|
||||
fi
|
||||
|
||||
# Get template storage
|
||||
TEMPLATE_STORAGE=$(select_storage template) || exit
|
||||
msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
|
||||
|
||||
# Get container storage
|
||||
CONTAINER_STORAGE=$(select_storage container) || exit
|
||||
msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
|
||||
|
||||
# Update LXC template list
|
||||
msg_info "Updating LXC Template List"
|
||||
#check_network
|
||||
pveam update >/dev/null
|
||||
msg_ok "Updated LXC Template List"
|
||||
|
||||
# Get LXC template string
|
||||
TEMPLATE_SEARCH=${PCT_OSTYPE}-${PCT_OSVERSION:-}
|
||||
mapfile -t TEMPLATES < <(pveam available -section system | sed -n "s/.*\($TEMPLATE_SEARCH.*\)/\1/p" | sort -t - -k 2 -V)
|
||||
[ ${#TEMPLATES[@]} -gt 0 ] || {
|
||||
msg_error "Unable to find a template when searching for '$TEMPLATE_SEARCH'."
|
||||
exit 207
|
||||
}
|
||||
TEMPLATE="${TEMPLATES[-1]}"
|
||||
TEMPLATE_PATH="$(pvesm path $TEMPLATE_STORAGE:vztmpl/$TEMPLATE)"
|
||||
# Without NAS/Mount: TEMPLATE_PATH="/var/lib/vz/template/cache/$TEMPLATE"
|
||||
# Check if template exists, if corrupt remove and redownload
|
||||
if ! pveam list "$TEMPLATE_STORAGE" | grep -q "$TEMPLATE" || ! zstdcat "$TEMPLATE_PATH" | tar -tf - >/dev/null 2>&1; then
|
||||
msg_warn "Template $TEMPLATE not found in storage or seems to be corrupted. Redownloading."
|
||||
[[ -f "$TEMPLATE_PATH" ]] && rm -f "$TEMPLATE_PATH"
|
||||
|
||||
# Download with 3 attempts
|
||||
for attempt in {1..3}; do
|
||||
msg_info "Attempt $attempt: Downloading LXC template..."
|
||||
|
||||
if timeout 120 pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null; then
|
||||
msg_ok "Template download successful."
|
||||
break
|
||||
fi
|
||||
|
||||
if [ $attempt -eq 3 ]; then
|
||||
msg_error "Three failed attempts. Aborting."
|
||||
exit 208
|
||||
fi
|
||||
|
||||
sleep $((attempt * 5))
|
||||
done
|
||||
fi
|
||||
msg_ok "LXC Template is ready to use."
|
||||
|
||||
# Check and fix subuid/subgid
|
||||
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
|
||||
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
|
||||
|
||||
# Combine all options
|
||||
PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}})
|
||||
[[ " ${PCT_OPTIONS[@]} " =~ " -rootfs " ]] || PCT_OPTIONS+=(-rootfs "$CONTAINER_STORAGE:${PCT_DISK_SIZE:-8}")
|
||||
|
||||
msg_info "Creating LXC Container"
|
||||
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
|
||||
msg_error "Container creation failed. Checking if template is corrupted."
|
||||
|
||||
if ! zstdcat "$TEMPLATE_PATH" | tar -tf - >/dev/null 2>&1; then
|
||||
msg_error "Template appears to be corrupted. Removing and re-downloading."
|
||||
rm -f "$TEMPLATE_PATH"
|
||||
|
||||
if ! timeout 120 pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null; then
|
||||
msg_error "Failed to re-download template."
|
||||
exit 208
|
||||
fi
|
||||
|
||||
msg_ok "Re-downloaded LXC Template"
|
||||
|
||||
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
|
||||
msg_error "Container creation failed after re-downloading template."
|
||||
exit 200
|
||||
fi
|
||||
else
|
||||
msg_error "Container creation failed, but template is not corrupted."
|
||||
exit 209
|
||||
fi
|
||||
fi
|
||||
: "${UDHCPC_FIX:=}"
|
||||
if [ "$UDHCPC_FIX" == "yes" ]; then
|
||||
# Ensure container is mounted
|
||||
if ! mount | grep -q "/var/lib/lxc/${CTID}/rootfs"; then
|
||||
pct mount "$CTID" >/dev/null 2>&1
|
||||
MOUNTED_HERE=true
|
||||
else
|
||||
MOUNTED_HERE=false
|
||||
fi
|
||||
|
||||
CONFIG_FILE="/var/lib/lxc/${CTID}/rootfs/etc/udhcpc/udhcpc.conf"
|
||||
|
||||
for i in {1..10}; do
|
||||
[ -f "$CONFIG_FILE" ] && break
|
||||
sleep 0.5
|
||||
done
|
||||
|
||||
if [ -f "$CONFIG_FILE" ]; then
|
||||
msg_info "Patching udhcpc.conf for Alpine DNS override"
|
||||
sed -i '/^#*RESOLV_CONF="/d' "$CONFIG_FILE"
|
||||
awk '
|
||||
/^# Do not overwrite \/etc\/resolv\.conf/ {
|
||||
print
|
||||
print "RESOLV_CONF=\"no\""
|
||||
next
|
||||
}
|
||||
{ print }
|
||||
' "$CONFIG_FILE" >"${CONFIG_FILE}.tmp" && mv "${CONFIG_FILE}.tmp" "$CONFIG_FILE"
|
||||
msg_ok "Patched udhcpc.conf (RESOLV_CONF=\"no\")"
|
||||
else
|
||||
msg_error "udhcpc.conf not found in $CONFIG_FILE after waiting"
|
||||
fi
|
||||
|
||||
# Clean up: only unmount if we mounted it here
|
||||
if [ "${MOUNTED_HERE}" = true ]; then
|
||||
pct unmount "$CTID" >/dev/null 2>&1
|
||||
fi
|
||||
fi
|
||||
|
||||
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."
|
36
ct/debian.sh
36
ct/debian.sh
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
|
||||
@ -7,14 +7,12 @@ source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxV
|
||||
|
||||
APP="Debian"
|
||||
var_tags="${var_tags:-os}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-15}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-512}"
|
||||
var_disk="${var_disk:-2}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
#var_fuse="${var_fuse:-no}"
|
||||
#var_tun="${var_tun:-no}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
@ -22,23 +20,23 @@ color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /var ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /var ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_info "Updating $APP LXC"
|
||||
$STD apt-get update
|
||||
$STD apt-get -y upgrade
|
||||
msg_ok "Updated $APP LXC"
|
||||
exit
|
||||
fi
|
||||
msg_info "Updating $APP LXC"
|
||||
$STD apt-get update
|
||||
$STD apt-get -y upgrade
|
||||
msg_ok "Updated $APP LXC"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!"
|
||||
msg_custom "🚀" "${GN}" "${APP} setup has been successfully initialized!"
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
|
@ -1,114 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster) | Co-Author: MickLesk (Canbiz) | Co-Author: CrazyWolf13
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://homarr.dev/
|
||||
|
||||
APP="alpine-homarr"
|
||||
var_tags="${var_tags:-arr;dashboard}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-8}"
|
||||
var_os="${var_os:-alpine}"
|
||||
var_version="${var_version:-3.21}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/homarr-labs/homarr/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
|
||||
msg_info "Stopping Services (Patience)"
|
||||
systemctl stop homarr
|
||||
msg_ok "Services Stopped"
|
||||
|
||||
msg_info "Backup Data"
|
||||
mkdir -p /opt/homarr-data-backup
|
||||
cp /opt/homarr/.env /opt/homarr-data-backup/.env
|
||||
msg_ok "Backup Data"
|
||||
|
||||
msg_info "Updating and rebuilding ${APP} to v${RELEASE} (Patience)"
|
||||
rm /opt/run_homarr.sh
|
||||
cat <<'EOF' >/opt/run_homarr.sh
|
||||
#!/bin/bash
|
||||
set -a
|
||||
source /opt/homarr/.env
|
||||
set +a
|
||||
export DB_DIALECT='sqlite'
|
||||
export AUTH_SECRET=$(openssl rand -base64 32)
|
||||
node /opt/homarr_db/migrations/$DB_DIALECT/migrate.cjs /opt/homarr_db/migrations/$DB_DIALECT
|
||||
for dir in $(find /opt/homarr_db/migrations/migrations -mindepth 1 -maxdepth 1 -type d); do
|
||||
dirname=$(basename "$dir")
|
||||
mkdir -p "/opt/homarr_db/migrations/$dirname"
|
||||
cp -r "$dir"/* "/opt/homarr_db/migrations/$dirname/" 2>/dev/null || true
|
||||
done
|
||||
export HOSTNAME=$(ip route get 1.1.1.1 | grep -oP 'src \K[^ ]+')
|
||||
envsubst '${HOSTNAME}' < /etc/nginx/templates/nginx.conf > /etc/nginx/nginx.conf
|
||||
nginx -g 'daemon off;' &
|
||||
redis-server /opt/homarr/packages/redis/redis.conf &
|
||||
node apps/tasks/tasks.cjs &
|
||||
node apps/websocket/wssServer.cjs &
|
||||
node apps/nextjs/server.js & PID=$!
|
||||
wait $PID
|
||||
EOF
|
||||
chmod +x /opt/run_homarr.sh
|
||||
NODE_VERSION=$(curl -fsSL https://raw.githubusercontent.com/homarr-labs/homarr/dev/package.json | jq -r '.engines.node | split(">=")[1] | split(".")[0]')
|
||||
NODE_MODULE="pnpm@$(curl -fsSL https://raw.githubusercontent.com/homarr-labs/homarr/dev/package.json | jq -r '.packageManager | split("@")[1]')"
|
||||
install_node_and_modules
|
||||
rm -rf /opt/homarr
|
||||
fetch_and_deploy_gh_release "homarr-labs/homarr"
|
||||
mv /opt/homarr-data-backup/.env /opt/homarr/.env
|
||||
cd /opt/homarr
|
||||
echo "test2"
|
||||
export NODE_ENV=""
|
||||
$STD pnpm install --recursive --frozen-lockfile --shamefully-hoist
|
||||
$STD pnpm build
|
||||
cp /opt/homarr/apps/nextjs/next.config.ts .
|
||||
cp /opt/homarr/apps/nextjs/package.json .
|
||||
cp -r /opt/homarr/packages/db/migrations /opt/homarr_db/migrations
|
||||
cp -r /opt/homarr/apps/nextjs/.next/standalone/* /opt/homarr
|
||||
mkdir -p /appdata/redis
|
||||
cp /opt/homarr/packages/redis/redis.conf /opt/homarr/redis.conf
|
||||
rm /etc/nginx/nginx.conf
|
||||
mkdir -p /etc/nginx/templates
|
||||
cp /opt/homarr/nginx.conf /etc/nginx/templates/nginx.conf
|
||||
|
||||
mkdir -p /opt/homarr/apps/cli
|
||||
cp /opt/homarr/packages/cli/cli.cjs /opt/homarr/apps/cli/cli.cjs
|
||||
echo $'#!/bin/bash\ncd /opt/homarr/apps/cli && node ./cli.cjs "$@"' >/usr/bin/homarr
|
||||
chmod +x /usr/bin/homarr
|
||||
|
||||
mkdir /opt/homarr/build
|
||||
cp ./node_modules/better-sqlite3/build/Release/better_sqlite3.node ./build/better_sqlite3.node
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated ${APP}"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start homarr
|
||||
msg_ok "Started Services"
|
||||
msg_ok "Updated Successfully"
|
||||
read -p "It's recommended to reboot the LXC after an update, would you like to reboot the LXC now ? (y/n): " choice
|
||||
if [[ "$choice" =~ ^[Yy]$ ]]; then
|
||||
reboot
|
||||
fi
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:7575${CL}"
|
@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source:
|
||||
|
||||
APP="Hoodik"
|
||||
# shellcheck disable=SC2034
|
||||
var_tags="${var_tags:-sharing}"
|
||||
var_disk="${var_disk:-7}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/hoodik ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/hudikhq/hoodik/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop hoodik
|
||||
msg_ok "Services Stopped"
|
||||
|
||||
msg_info "Updating ${APP} to ${RELEASE}"
|
||||
cd /opt
|
||||
if [ -d hoodik_bak ]; then
|
||||
rm -rf hoodik_bak
|
||||
fi
|
||||
mv hoodik hoodik_bak
|
||||
curl -fsSL "https://github.com/hudikhq/hoodik/archive/refs/tags/${RELEASE}.zip"
|
||||
unzip -q ${RELEASE}.zip
|
||||
mv hoodik-${RELEASE} /opt/hoodik
|
||||
cd /opt/hoodik
|
||||
cargo update -q
|
||||
cargo build -q --release
|
||||
msg_ok "Updated Hoodik"
|
||||
|
||||
msg_info "Starting Services"
|
||||
systemctl start hoodik
|
||||
msg_ok "Started Services"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -R /opt/${RELEASE}.zip
|
||||
rm -R /opt/hoodik_bak
|
||||
msg_ok "Cleaned"
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8088${CL}"
|
@ -1,66 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: Omar Minaya
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://www.kasmweb.com/docs/latest/index.html
|
||||
|
||||
APP="Kasm"
|
||||
var_tags="${var_tags:-os}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-4192}"
|
||||
var_disk="${var_disk:-30}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-0}"
|
||||
var_fuse="${var_fuse:-yes}"
|
||||
var_tun="${var_tun:-yes}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/kasm ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -fsSL 'https://www.kasmweb.com/downloads' | grep -o 'https://kasm-static-content.s3.amazonaws.com/kasm_release_[^"]*\.tar\.gz' | head -n 1 | sed -E 's/.*release_(.*)\.tar\.gz/\1/')
|
||||
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||
msg_info "Updating ${APP} to v${RELEASE}"
|
||||
temp_file=$(mktemp)
|
||||
curl -fsSL "https://kasm-static-content.s3.amazonaws.com/kasm_release_${RELEASE}.tar.gz" -o "$temp_file"
|
||||
tar zxf "$temp_file"
|
||||
mkdir -p /opt/kasm/backups/
|
||||
chmod 777 /opt/kasm/backups/
|
||||
mv /opt/kasm/1.*/certs/kasm_nginx.crt /opt/kasm/kasm_nginx.crt_bak
|
||||
printf 'y\n' | $STD sudo bash /tmp/kasm_release/upgrade.sh
|
||||
$STD sudo bash /tmp/kasm_release/upgrade.sh
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated ${APP} to v${RELEASE}"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -f "$temp_file"
|
||||
rm -rf /tmp/kasm_release
|
||||
$STD apt-get -y autoremove
|
||||
$STD apt-get -y autoclean
|
||||
msg_ok "Cleaned"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}https://${IP}${CL}"
|
@ -1,71 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (Canbiz)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
|
||||
## App Default Values
|
||||
APP="Koel"
|
||||
var_tags="${var_tags:-music}"
|
||||
var_disk="${var_disk:-9}"
|
||||
var_cpu="${var_cpu:-3}"
|
||||
var_ram="${var_ram:-3072}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/koel ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/koel/koel/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Stopping ${APP} Service"
|
||||
systemctl stop nginx
|
||||
msg_ok "Stopped ${APP} Service"
|
||||
|
||||
msg_info "Updating ${APP} to v${RELEASE}"
|
||||
cd /opt
|
||||
curl -fsSL https://github.com/koel/koel/releases/download/${RELEASE}/koel-${RELEASE}.zip
|
||||
unzip -q koel-${RELEASE}.zip
|
||||
cd /opt/koel
|
||||
composer update --no-interaction >/dev/null 2>&1
|
||||
composer install --no-interaction >/dev/null 2>&1
|
||||
php artisan migrate --force >/dev/null 2>&1
|
||||
php artisan cache:clear >/dev/null 2>&1
|
||||
php artisan config:clear >/dev/null 2>&1
|
||||
php artisan view:clear >/dev/null 2>&1
|
||||
php artisan koel:init --no-interaction >/dev/null 2>&1
|
||||
msg_ok "Updated ${APP} to v${RELEASE}"
|
||||
|
||||
msg_info "Starting ${APP} Service"
|
||||
systemctl start nginx
|
||||
msg_ok "Started ${APP} Service"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf /opt/koel-${RELEASE}.zip
|
||||
msg_ok "Cleaned"
|
||||
msg_ok "Updated Successfully!\n"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN} ${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:6767${CL}"
|
@ -1,129 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2023 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT
|
||||
# https://github.com/tteck/Proxmox/raw/main/LICENSE
|
||||
|
||||
function header_info {
|
||||
clear
|
||||
cat <<"EOF"
|
||||
__ __ __
|
||||
____ ___ / /_/ /_ ____ ____ / /_ _ ____ ______
|
||||
/ __ \/ _ \/ __/ __ \/ __ \/ __ \/ __/ | |/_/ / / /_ /
|
||||
/ / / / __/ /_/ /_/ / /_/ / /_/ / /__ _> </ /_/ / / /_
|
||||
/_/ /_/\___/\__/_.___/\____/\____/\__(_)_/|_|\__, / /___/
|
||||
/____/
|
||||
EOF
|
||||
}
|
||||
header_info
|
||||
echo -e "Loading..."
|
||||
APP="netboot.xyz"
|
||||
var_disk="${var_disk:-2}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-512}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function default_settings() {
|
||||
CT_TYPE="1"
|
||||
PW=""
|
||||
CT_ID=$NEXTID
|
||||
HN=$NSAPP
|
||||
DISK_SIZE="$var_disk"
|
||||
CORE_COUNT="$var_cpu"
|
||||
RAM_SIZE="$var_ram"
|
||||
BRG="vmbr0"
|
||||
NET="dhcp"
|
||||
GATE=""
|
||||
DISABLEIP6="no"
|
||||
MTU=""
|
||||
SD=""
|
||||
NS=""
|
||||
MAC=""
|
||||
VLAN=""
|
||||
SSH="no"
|
||||
VERB="no"
|
||||
echo_default
|
||||
}
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
if [[ ! -d /opt/netboot.xyz ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_info "Stopping ${APP}"
|
||||
systemctl disable netbootxyz.service &>/dev/null
|
||||
systemctl stop netbootxyz
|
||||
sleep 1
|
||||
msg_ok "Stopped ${APP}"
|
||||
|
||||
msg_info "Backing up Data"
|
||||
cp -R /opt/netboot.xyz/config config-backup
|
||||
cp -R /opt/netboot.xyz/assets assets-backup
|
||||
sleep 1
|
||||
msg_ok "Backed up Data"
|
||||
|
||||
RELEASE=$(curl -fsSLX GET "https://api.github.com/repos/netbootxyz/netboot.xyz/releases/latest" | awk '/tag_name/{print $4;exit}' FS='[""]')
|
||||
msg_info "Updating netboot.xyz to ${RELEASE}"
|
||||
curl --silent -o ${RELEASE}.tar.gz -L "https://github.com/netbootxyz/netboot.xyz/archive/${RELEASE}.tar.gz" &>/dev/null
|
||||
tar xvzf ${RELEASE}.tar.gz &>/dev/null
|
||||
VER=$(curl -fsSL https://api.github.com/repos/netbootxyz/netboot.xyz/releases/latest |
|
||||
grep "tag_name" |
|
||||
awk '{print substr($2, 2, length($2)-3) }')
|
||||
|
||||
if [ ! -d "/opt/netboot.xyz" ]; then
|
||||
mv netboot.xyz-${VER} /opt/netboot.xyz
|
||||
else
|
||||
cp -R netboot.xyz-${VER}/* /opt/netboot.xyz
|
||||
fi
|
||||
|
||||
service_path="/etc/systemd/system/netbootxyz.service"
|
||||
echo "[Unit]
|
||||
Description=netboot.xyz
|
||||
After=network.target
|
||||
[Service]
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/netboot.xyz
|
||||
ExecStart="ansible-playbook" -i inventory site.yml
|
||||
TimeoutStopSec=30
|
||||
[Install]
|
||||
WantedBy=multi-user.target" >$service_path
|
||||
msg_ok "Updated netboot.xyz to ${RELEASE}"
|
||||
|
||||
msg_info "Restoring Data"
|
||||
cp -R config-backup/* /opt/netboot.xyz/config
|
||||
cp -R assets-backup/* /opt/netboot.xyz/assets
|
||||
sleep 1
|
||||
msg_ok "Restored Data"
|
||||
|
||||
msg_info "Cleanup"
|
||||
rm -rf ${RELEASE}.tar.gz
|
||||
rm -rf netboot.xyz-${VER}
|
||||
rm -rf config-backup
|
||||
rm -rf assets-backup
|
||||
sleep 1
|
||||
msg_ok "Cleaned"
|
||||
|
||||
msg_info "Starting ${APP}"
|
||||
systemctl enable --now netbootxyz.service &>/dev/null
|
||||
sleep 2
|
||||
msg_ok "Started ${APP}"
|
||||
msg_ok "Updated Successfully"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${APP} should be reachable by going to the following URL.
|
||||
${BL}http://${IP}:3000${CL} \n"
|
@ -1,160 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://nginxproxymanager.com/
|
||||
|
||||
APP="Nginx Proxy Manager"
|
||||
var_tags="${var_tags:-proxy}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -f /lib/systemd/system/npm.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
if ! command -v pnpm &>/dev/null; then
|
||||
msg_info "Installing pnpm"
|
||||
#export NODE_OPTIONS=--openssl-legacy-provider
|
||||
$STD npm install -g pnpm@8.15
|
||||
msg_ok "Installed pnpm"
|
||||
fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/NginxProxyManager/nginx-proxy-manager/releases/latest |
|
||||
grep "tag_name" |
|
||||
awk '{print substr($2, 3, length($2)-4) }')
|
||||
msg_info "Stopping Services"
|
||||
systemctl stop openresty
|
||||
systemctl stop npm
|
||||
msg_ok "Stopped Services"
|
||||
|
||||
msg_info "Cleaning Old Files"
|
||||
rm -rf /app \
|
||||
/var/www/html \
|
||||
/etc/nginx \
|
||||
/var/log/nginx \
|
||||
/var/lib/nginx \
|
||||
$STD /var/cache/nginx
|
||||
msg_ok "Cleaned Old Files"
|
||||
|
||||
msg_info "Downloading NPM v${RELEASE}"
|
||||
curl -fsSL https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/v${RELEASE} -o - | tar -xz
|
||||
cd nginx-proxy-manager-${RELEASE}
|
||||
msg_ok "Downloaded NPM v${RELEASE}"
|
||||
|
||||
msg_info "Setting up Enviroment"
|
||||
ln -sf /usr/bin/python3 /usr/bin/python
|
||||
ln -sf /usr/bin/certbot /opt/certbot/bin/certbot
|
||||
ln -sf /usr/local/openresty/nginx/sbin/nginx /usr/sbin/nginx
|
||||
ln -sf /usr/local/openresty/nginx/ /etc/nginx
|
||||
sed -i "s|\"version\": \"0.0.0\"|\"version\": \"$RELEASE\"|" backend/package.json
|
||||
sed -i "s|\"version\": \"0.0.0\"|\"version\": \"$RELEASE\"|" frontend/package.json
|
||||
sed -i 's|"fork-me": ".*"|"fork-me": "Proxmox VE Helper-Scripts"|' frontend/js/i18n/messages.json
|
||||
sed -i "s|https://github.com.*source=nginx-proxy-manager|https://helper-scripts.com|g" frontend/js/app/ui/footer/main.ejs
|
||||
sed -i 's+^daemon+#daemon+g' docker/rootfs/etc/nginx/nginx.conf
|
||||
NGINX_CONFS=$(find "$(pwd)" -type f -name "*.conf")
|
||||
for NGINX_CONF in $NGINX_CONFS; do
|
||||
sed -i 's+include conf.d+include /etc/nginx/conf.d+g' "$NGINX_CONF"
|
||||
done
|
||||
mkdir -p /var/www/html /etc/nginx/logs
|
||||
cp -r docker/rootfs/var/www/html/* /var/www/html/
|
||||
cp -r docker/rootfs/etc/nginx/* /etc/nginx/
|
||||
cp docker/rootfs/etc/letsencrypt.ini /etc/letsencrypt.ini
|
||||
cp docker/rootfs/etc/logrotate.d/nginx-proxy-manager /etc/logrotate.d/nginx-proxy-manager
|
||||
ln -sf /etc/nginx/nginx.conf /etc/nginx/conf/nginx.conf
|
||||
rm -f /etc/nginx/conf.d/dev.conf
|
||||
mkdir -p /tmp/nginx/body \
|
||||
/run/nginx \
|
||||
/data/nginx \
|
||||
/data/custom_ssl \
|
||||
/data/logs \
|
||||
/data/access \
|
||||
/data/nginx/default_host \
|
||||
/data/nginx/default_www \
|
||||
/data/nginx/proxy_host \
|
||||
/data/nginx/redirection_host \
|
||||
/data/nginx/stream \
|
||||
/data/nginx/dead_host \
|
||||
/data/nginx/temp \
|
||||
/var/lib/nginx/cache/public \
|
||||
/var/lib/nginx/cache/private \
|
||||
/var/cache/nginx/proxy_temp
|
||||
chmod -R 777 /var/cache/nginx
|
||||
chown root /tmp/nginx
|
||||
echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" {print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf);" >/etc/nginx/conf.d/include/resolvers.conf
|
||||
if [ ! -f /data/nginx/dummycert.pem ] || [ ! -f /data/nginx/dummykey.pem ]; then
|
||||
$STD openssl req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj "/O=Nginx Proxy Manager/OU=Dummy Certificate/CN=localhost" -keyout /data/nginx/dummykey.pem -out /data/nginx/dummycert.pem
|
||||
fi
|
||||
mkdir -p /app/global /app/frontend/images
|
||||
cp -r backend/* /app
|
||||
cp -r global/* /app/global
|
||||
$STD python3 -m pip install --no-cache-dir certbot-dns-cloudflare
|
||||
msg_ok "Setup Enviroment"
|
||||
|
||||
msg_info "Building Frontend"
|
||||
cd ./frontend
|
||||
$STD pnpm install
|
||||
$STD pnpm upgrade
|
||||
$STD pnpm run build
|
||||
cp -r dist/* /app/frontend
|
||||
cp -r app-images/* /app/frontend/images
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Initializing Backend"
|
||||
$STD rm -rf /app/config/default.json
|
||||
if [ ! -f /app/config/production.json ]; then
|
||||
cat <<'EOF' >/app/config/production.json
|
||||
{
|
||||
"database": {
|
||||
"engine": "knex-native",
|
||||
"knex": {
|
||||
"client": "sqlite3",
|
||||
"connection": {
|
||||
"filename": "/data/database.sqlite"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
cd /app
|
||||
$STD pnpm install
|
||||
msg_ok "Initialized Backend"
|
||||
|
||||
msg_info "Starting Services"
|
||||
sed -i 's/user npm/user root/g; s/^pid/#pid/g' /usr/local/openresty/nginx/conf/nginx.conf
|
||||
sed -i 's/su npm npm/su root root/g' /etc/logrotate.d/nginx-proxy-manager
|
||||
sed -i 's/include-system-site-packages = false/include-system-site-packages = true/g' /opt/certbot/pyvenv.cfg
|
||||
systemctl enable -q --now openresty
|
||||
systemctl enable -q --now npm
|
||||
msg_ok "Started Services"
|
||||
|
||||
msg_info "Cleaning up"
|
||||
rm -rf ~/nginx-proxy-manager-*
|
||||
msg_ok "Cleaned"
|
||||
|
||||
msg_ok "Updated Successfully"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:81${CL}"
|
@ -1,44 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: tteck (tteckster)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
|
||||
# Source: https://www.debian.org/
|
||||
|
||||
APP="ocis"
|
||||
var_tags="${var_tags:-cloud}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-10}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /var ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_info "Updating $APP LXC"
|
||||
$STD apt-get update
|
||||
$STD apt-get -y upgrade
|
||||
msg_ok "Updated $APP LXC"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9200${CL}"
|
@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 tteck
|
||||
# Author: havardthom
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://openwebui.com/
|
||||
|
||||
APP="Open WebUI"
|
||||
var_tags="${var_tags:-ai;interface}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-8192}"
|
||||
var_disk="${var_disk:-25}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/open-webui ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -x "/usr/bin/ollama" ]; then
|
||||
msg_info "Updating Ollama"
|
||||
OLLAMA_VERSION=$(ollama -v | awk '{print $NF}')
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
|
||||
if [ "$OLLAMA_VERSION" != "$RELEASE" ]; then
|
||||
curl -fsSLO https://ollama.com/download/ollama-linux-amd64.tgz
|
||||
tar -C /usr -xzf ollama-linux-amd64.tgz
|
||||
rm -rf ollama-linux-amd64.tgz
|
||||
msg_ok "Ollama updated to version $RELEASE"
|
||||
else
|
||||
msg_ok "Ollama is already up to date."
|
||||
fi
|
||||
fi
|
||||
|
||||
msg_info "Updating ${APP} (Patience)"
|
||||
systemctl stop open-webui.service
|
||||
mkdir -p /opt/openwebui-backup
|
||||
cp -rf /opt/openwebui/backend/data /opt/openwebui-backup
|
||||
cp /opt/openwebui/.env /opt
|
||||
rm -rf /opt/openwebui
|
||||
fetch_and_deploy_gh_release "open-webui/open-webui"
|
||||
cd /opt/openwebui
|
||||
$STD npm install
|
||||
export NODE_OPTIONS="--max-old-space-size=3584"
|
||||
sed -i "s/git rev-parse HEAD/openssl rand -hex 20/g" /opt/openwebui/svelte.config.js
|
||||
$STD npm run build
|
||||
cd ./backend
|
||||
$STD pip install -r requirements.txt -U
|
||||
cp -rf /opt/openwebui-backup/* /opt/openwebui/backend
|
||||
mv /opt/.env /opt/openwebui/
|
||||
systemctl start open-webui.service
|
||||
msg_ok "Updated Successfully"
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
|
@ -1,64 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source:
|
||||
|
||||
APP="Roundcubemail"
|
||||
var_tags="${var_tags:-mail}"
|
||||
var_disk="${var_disk:-5}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
if [[ ! -d /opt/roundcubemail ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
if (($(df /boot | awk 'NR==2{gsub("%","",$5); print $5}') > 80)); then
|
||||
read -r -p "Warning: Storage is dangerously low, continue anyway? <y/N> " prompt
|
||||
[[ ${prompt,,} =~ ^(y|yes)$ ]] || exit
|
||||
fi
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/roundcube/roundcubemail/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Updating ${APP} to ${RELEASE}"
|
||||
cd /opt
|
||||
curl -fsSL "https://github.com/roundcube/roundcubemail/releases/download/${RELEASE}/roundcubemail-${RELEASE}-complete.tar.gz"
|
||||
tar -xf roundcubemail-${RELEASE}-complete.tar.gz
|
||||
mv roundcubemail-${RELEASE} /opt/roundcubemail
|
||||
cd /opt/roundcubemail
|
||||
COMPOSER_ALLOW_SUPERUSER=1 composer install --no-dev
|
||||
chown -R www-data:www-data temp/ logs/
|
||||
msg_ok "Updated ${APP}"
|
||||
|
||||
msg_info "Reload Apache2"
|
||||
systemctl reload apache2
|
||||
msg_ok "Apache2 Reloaded"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/roundcubemail-${RELEASE}-complete.tar.gz
|
||||
msg_ok "Cleaned"
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${APP} should be reachable by going to the following URL.
|
||||
${BL}http://${IP}/installer ${CL} \n"
|
@ -1,78 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (Canbiz)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://vikunja.io/
|
||||
|
||||
APP="Vikunja"
|
||||
var_tags="${var_tags:-todo-app}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -d /opt/vikunja ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
if whiptail --backtitle "Vikunja Update" --title "🔄 VERSION SELECTION" --yesno \
|
||||
"Choose the version type to update to:\n\n• STABLE: Recommended for production use\n• UNSTABLE: Latest development version\n\n⚠️ WARNING: Unstable versions may contain bugs,\nbe incomplete, or cause system instability.\nOnly use for testing purposes.\n\nDo you want to use the UNSTABLE version?\n(No = Stable, Yes = Unstable)" 16 70 --defaultno
|
||||
then
|
||||
msg_info "Selecting version"
|
||||
RELEASE="unstable"
|
||||
FILENAME="vikunja-${RELEASE}-x86_64.deb"
|
||||
msg_ok "Selected UNSTABLE version"
|
||||
else
|
||||
msg_info "Selecting version"
|
||||
RELEASE=$(curl -fsSL https://dl.vikunja.io/vikunja/ | grep -oP 'href="/vikunja/\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -n 1)
|
||||
FILENAME="vikunja-${RELEASE}-amd64.deb"
|
||||
msg_ok "Selected STABLE version: ${RELEASE}"
|
||||
fi
|
||||
|
||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
||||
msg_info "Stopping ${APP}"
|
||||
systemctl stop vikunja
|
||||
msg_ok "Stopped ${APP}"
|
||||
msg_info "Updating ${APP} to ${RELEASE}"
|
||||
cd /opt
|
||||
rm -rf /opt/vikunja/vikunja
|
||||
rm -rf "/opt/$FILENAME"
|
||||
curl -fsSL "https://dl.vikunja.io/vikunja/$RELEASE/$FILENAME" -o $(basename "https://dl.vikunja.io/vikunja/$RELEASE/$FILENAME")
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
$STD dpkg -i $FILENAME
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated ${APP}"
|
||||
msg_info "Starting ${APP}"
|
||||
systemctl start vikunja
|
||||
msg_ok "Started ${APP}"
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/$FILENAME
|
||||
msg_ok "Cleaned"
|
||||
msg_ok "Updated Successfully"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3456${CL}"
|
@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: ekke85
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/Dispatcharr/Dispatcharr
|
||||
|
||||
APP="Dispatcharr"
|
||||
APP_NAME=${APP,,}
|
||||
var_tags="${var_tags:-media;arr}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-8}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d "/opt/dispatcharr" ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/Dispatcharr/Dispatcharr/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||
msg_ok "Starting update"
|
||||
APP_DIR="/opt/dispatcharr"
|
||||
APP_USER="dispatcharr"
|
||||
APP_GROUP="dispatcharr"
|
||||
|
||||
|
||||
|
||||
msg_info "Stopping $APP"
|
||||
systemctl stop dispatcharr-celery
|
||||
systemctl stop dispatcharr-celerybeat
|
||||
systemctl stop dispatcharr-daphne
|
||||
systemctl stop dispatcharr
|
||||
msg_ok "Stopped $APP"
|
||||
|
||||
msg_info "Creating Backup"
|
||||
BACKUP_FILE="/opt/dispatcharr_$(date +%F).tar.gz"
|
||||
msg_info "Source and Database backup"
|
||||
set -o allexport
|
||||
source /etc/$APP_NAME/$APP_NAME.env
|
||||
set +o allexport
|
||||
PGPASSWORD=$POSTGRES_PASSWORD pg_dump -U $POSTGRES_USER -h $POSTGRES_HOST $POSTGRES_DB > /opt/$POSTGRES_DB-`date +%F`.sql
|
||||
$STD tar -czf "$BACKUP_FILE" /opt/dispatcharr /opt/Dispatcharr_version.txt /opt/$POSTGRES_DB-`date +%F`.sql &>/dev/null
|
||||
msg_ok "Backup Created"
|
||||
|
||||
msg_info "Updating $APP to v${RELEASE}"
|
||||
rm -rf /opt/dispatcharr
|
||||
fetch_and_deploy_gh_release "dispatcharr" "Dispatcharr/Dispatcharr"
|
||||
chown -R "$APP_USER:$APP_GROUP" "$APP_DIR"
|
||||
sed -i 's/program\[\x27channel_id\x27\]/program["channel_id"]/g' "${APP_DIR}/apps/output/views.py"
|
||||
|
||||
msg_ok "Dispatcharr Updated to $RELEASE"
|
||||
|
||||
msg_info "Creating Python Virtual Environment"
|
||||
cd $APP_DIR
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
$STD pip install --upgrade pip
|
||||
$STD pip install -r requirements.txt
|
||||
$STD pip install gunicorn
|
||||
ln -sf /usr/bin/ffmpeg $APP_DIR/env/bin/ffmpeg
|
||||
msg_ok "Python Environment Setup"
|
||||
|
||||
msg_info "Building Frontend"
|
||||
cd $APP_DIR/frontend
|
||||
$STD npm install --legacy-peer-deps
|
||||
$STD npm run build
|
||||
msg_ok "Built Frontend"
|
||||
|
||||
msg_info "Running Django Migrations"
|
||||
cd $APP_DIR
|
||||
source env/bin/activate
|
||||
set -o allexport
|
||||
source /etc/$APP_NAME/$APP_NAME.env
|
||||
set +o allexport
|
||||
$STD python manage.py migrate --noinput
|
||||
$STD python manage.py collectstatic --noinput
|
||||
msg_ok "Migrations Complete"
|
||||
|
||||
msg_info "Starting $APP"
|
||||
systemctl start dispatcharr-celery
|
||||
systemctl start dispatcharr-celerybeat
|
||||
systemctl start dispatcharr-daphne
|
||||
systemctl start dispatcharr
|
||||
msg_ok "Started $APP"
|
||||
echo "${RELEASE}" > "/opt/${APP}_version.txt"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/$POSTGRES_DB-`date +%F`.sql
|
||||
msg_ok "Cleanup Completed"
|
||||
|
||||
msg_ok "Update Successful, Backup saved to $BACKUP_FILE"
|
||||
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:9191${CL}"
|
@ -1,9 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (Canbiz)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/community-scripts/ProxmoxVE
|
||||
# Source:
|
||||
|
||||
APP="Docspell"
|
||||
var_tags="${var_tags:-document}"
|
||||
|
@ -1,16 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -s https://raw.githubusercontent.com/vsc55/community-scripts-ProxmoxVED/refs/heads/freepbx/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: Arian Nasr (arian-nasr)
|
||||
# Updated by: Javier Pastor (vsc55)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
|
||||
# Source: https://www.freepbx.org/
|
||||
|
||||
APP="FreePBX"
|
||||
var_tags="pbx;voip;telephony"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-10}"
|
||||
var_tags=""
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-20}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
@ -21,43 +20,20 @@ color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -f /lib/systemd/system/freepbx.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
# Check if installation is present | -f for file, -d for folder
|
||||
if [[ ! -f /lib/systemd/system/freepbx.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_error "Currently we don't provide an update function for this ${APP}."
|
||||
exit
|
||||
fi
|
||||
|
||||
msg_info "Updating $APP LXC"
|
||||
$STD apt-get update
|
||||
$STD apt-get -y upgrade
|
||||
msg_ok "Updated $APP LXC"
|
||||
|
||||
msg_info "Updating $APP Modules"
|
||||
$STD fwconsole ma updateall
|
||||
$STD fwconsole reload
|
||||
msg_ok "Updated $APP Modules"
|
||||
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
|
||||
if whiptail --title "Commercial Modules" --yesno "Remove Commercial modules?" --defaultno 10 50; then
|
||||
export ONLY_OPENSOURCE="yes"
|
||||
|
||||
if whiptail --title "Firewall Module" --yesno "Do you want to KEEP the Firewall module (and sysadmin)?" 10 50; then
|
||||
export REMOVE_FIREWALL="no"
|
||||
else
|
||||
export REMOVE_FIREWALL="yes"
|
||||
fi
|
||||
else
|
||||
export ONLY_OPENSOURCE="no"
|
||||
export REMOVE_FIREWALL="no"
|
||||
fi
|
||||
|
||||
build_container
|
||||
description
|
||||
|
||||
|
@ -1,34 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Authors: MickLesk (CanbiZ)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://frigate.video/
|
||||
|
||||
# App Default Values
|
||||
APP="Frigate"
|
||||
var_tags="${var_tags:-nvr}"
|
||||
var_cpu="${var_cpu:-4}"
|
||||
var_ram="${var_ram:-4096}"
|
||||
var_disk="${var_disk:-20}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_version="${var_version:-11}"
|
||||
var_unprivileged="${var_unprivileged:-0}"
|
||||
|
||||
# App Output
|
||||
header_info "$APP"
|
||||
|
||||
# Core
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -f /etc/systemd/system/frigate.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_error "To update Frigate, create a new container and transfer your configuration."
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
if [[ ! -f /etc/systemd/system/frigate.service ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
msg_error "To update Frigate, create a new container and transfer your configuration."
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
|
@ -1,101 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/raw/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: aliaksei135
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/arpanghosh8453/garmin-grafana
|
||||
|
||||
APP="garmin-grafana"
|
||||
var_tags="${var_tags:-sports;visualization}"
|
||||
var_cpu="${var_cpu:-2}"
|
||||
var_ram="${var_ram:-2048}"
|
||||
var_disk="${var_disk:-8}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
# this only updates garmin-grafana, not influxdb or grafana, which are upgraded with apt
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/garmin-grafana/ ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
|
||||
RELEASE=$(curl -fsSL https://api.github.com/repos/arpanghosh8453/garmin-grafana/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||
if [[ ! -d /opt/garmin-grafana/ ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||
msg_info "Stopping $APP"
|
||||
systemctl stop garmin-grafana
|
||||
systemctl stop grafana-server
|
||||
systemctl stop influxdb
|
||||
msg_ok "Stopped $APP"
|
||||
|
||||
if [[ ! -f /opt/garmin-grafana/.env ]]; then
|
||||
msg_error "No .env file found in /opt/garmin-grafana/.env"
|
||||
exit
|
||||
fi
|
||||
source /opt/garmin-grafana/.env
|
||||
if [[ -z "${INFLUXDB_USER}" || -z "${INFLUXDB_PASSWORD}" || -z "${INFLUXDB_NAME}" ]]; then
|
||||
msg_error "INFLUXDB_USER, INFLUXDB_PASSWORD, or INFLUXDB_NAME not set in .env file"
|
||||
exit
|
||||
fi
|
||||
|
||||
msg_info "Creating Backup"
|
||||
tar -czf "/opt/${APP}_backup_$(date +%F).tar.gz" /opt/garmin-grafana/.garminconnect /opt/garmin-grafana/.env
|
||||
mv /opt/garmin-grafana/ /opt/garmin-grafana-backup/
|
||||
msg_ok "Backup Created"
|
||||
|
||||
msg_info "Updating $APP to v${RELEASE}"
|
||||
curl -fsSL -o "${RELEASE}.zip" "https://github.com/arpanghosh8453/garmin-grafana/archive/refs/tags/${RELEASE}.zip"
|
||||
unzip -q "${RELEASE}.zip"
|
||||
mv "garmin-grafana-${RELEASE}/" "/opt/garmin-grafana"
|
||||
rm -f "${RELEASE}.zip"
|
||||
$STD uv sync --locked --project /opt/garmin-grafana/
|
||||
# shellcheck disable=SC2016
|
||||
sed -i 's/\${DS_GARMIN_STATS}/garmin_influxdb/g' /opt/garmin-grafana/Grafana_Dashboard/Garmin-Grafana-Dashboard.json
|
||||
sed -i 's/influxdb:8086/localhost:8086/' /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
|
||||
sed -i "s/influxdb_user/${INFLUXDB_USER}/" /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
|
||||
sed -i "s/influxdb_secret_password/${INFLUXDB_PASSWORD}/" /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
|
||||
sed -i "s/GarminStats/${INFLUXDB_NAME}/" /opt/garmin-grafana/Grafana_Datasource/influxdb.yaml
|
||||
# Copy across grafana data
|
||||
cp -r /opt/garmin-grafana/Grafana_Datasource/* /etc/grafana/provisioning/datasources
|
||||
cp -r /opt/garmin-grafana/Grafana_Dashboard/* /etc/grafana/provisioning/dashboards
|
||||
# Copy back the env and token files
|
||||
cp /opt/garmin-grafana-backup/.env /opt/garmin-grafana/.env
|
||||
cp -r /opt/garmin-grafana-backup/.garminconnect /opt/garmin-grafana/.garminconnect
|
||||
msg_ok "Updated $APP to v${RELEASE}"
|
||||
|
||||
msg_info "Starting $APP"
|
||||
systemctl start garmin-grafana
|
||||
systemctl start grafana-server
|
||||
systemctl start influxdb
|
||||
msg_ok "Started $APP"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -rf /opt/garmin-grafana-backup
|
||||
msg_ok "Cleanup Completed"
|
||||
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Update Successful"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
75
ct/gatus.sh
Normal file
75
ct/gatus.sh
Normal file
@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: Slaviša Arežina (tremor021)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||
# Source: https://github.com/TwiN/gatus
|
||||
|
||||
APP="gatus"
|
||||
var_tags="${var_tags:-monitoring}"
|
||||
var_cpu="${var_cpu:-1}"
|
||||
var_ram="${var_ram:-1024}"
|
||||
var_disk="${var_disk:-4}"
|
||||
var_os="${var_os:-debian}"
|
||||
var_version="${var_version:-12}"
|
||||
var_unprivileged="${var_unprivileged:-1}"
|
||||
|
||||
header_info "$APP"
|
||||
variables
|
||||
color
|
||||
catch_errors
|
||||
|
||||
function update_script() {
|
||||
header_info
|
||||
check_container_storage
|
||||
check_container_resources
|
||||
|
||||
if [[ ! -d /opt/gatus ]]; then
|
||||
msg_error "No ${APP} Installation Found!"
|
||||
exit
|
||||
fi
|
||||
RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
||||
msg_info "Updating $APP"
|
||||
|
||||
msg_info "Stopping $APP"
|
||||
systemctl stop gatus
|
||||
msg_ok "Stopped $APP"
|
||||
|
||||
msg_info "Updating $APP to v${RELEASE}"
|
||||
mv /opt/gatus/config/config.yaml /opt
|
||||
rm -rf /opt/gatus/*
|
||||
temp_file=$(mktemp)
|
||||
curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
||||
tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
|
||||
cd /opt/gatus
|
||||
$STD go mod tidy
|
||||
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
|
||||
setcap CAP_NET_RAW+ep gatus
|
||||
mv /opt/config.yaml config
|
||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||
msg_ok "Updated $APP to v${RELEASE}"
|
||||
|
||||
msg_info "Starting $APP"
|
||||
systemctl start gatus
|
||||
msg_ok "Started $APP"
|
||||
|
||||
msg_info "Cleaning Up"
|
||||
rm -f "$temp_file"
|
||||
msg_ok "Cleanup Completed"
|
||||
|
||||
msg_ok "Update Successful"
|
||||
else
|
||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||
fi
|
||||
exit
|
||||
}
|
||||
|
||||
start
|
||||
build_container
|
||||
description
|
||||
|
||||
msg_ok "Completed Successfully!\n"
|
||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
|
@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
source <(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
source <(curl -s https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/build.func)
|
||||
# Copyright (c) 2021-2025 community-scripts ORG
|
||||
# Author: MickLesk (Canbiz)
|
||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
6
ct/headers/alpine-bitmagnet
Normal file
6
ct/headers/alpine-bitmagnet
Normal file
@ -0,0 +1,6 @@
|
||||
___ __ _ __ _ __ __
|
||||
/ | / /___ (_)___ ___ / /_ (_) /_____ ___ ____ _____ _____ ___ / /_
|
||||
/ /| | / / __ \/ / __ \/ _ \______/ __ \/ / __/ __ `__ \/ __ `/ __ `/ __ \/ _ \/ __/
|
||||
/ ___ |/ / /_/ / / / / / __/_____/ /_/ / / /_/ / / / / / /_/ / /_/ / / / / __/ /_
|
||||
/_/ |_/_/ .___/_/_/ /_/\___/ /_.___/_/\__/_/ /_/ /_/\__,_/\__, /_/ /_/\___/\__/
|
||||
/_/ /____/
|
6
ct/headers/alpine-syncthing
Normal file
6
ct/headers/alpine-syncthing
Normal file
@ -0,0 +1,6 @@
|
||||
___ __ _ _____ __ __ _
|
||||
/ | / /___ (_)___ ___ / ___/__ ______ _____/ /_/ /_ (_)___ ____ _
|
||||
/ /| | / / __ \/ / __ \/ _ \______\__ \/ / / / __ \/ ___/ __/ __ \/ / __ \/ __ `/
|
||||
/ ___ |/ / /_/ / / / / / __/_____/__/ / /_/ / / / / /__/ /_/ / / / / / / / /_/ /
|
||||
/_/ |_/_/ .___/_/_/ /_/\___/ /____/\__, /_/ /_/\___/\__/_/ /_/_/_/ /_/\__, /
|
||||
/_/ /____/ /____/
|
6
ct/headers/ampache
Normal file
6
ct/headers/ampache
Normal file
@ -0,0 +1,6 @@
|
||||
___ __
|
||||
/ | ____ ___ ____ ____ ______/ /_ ___
|
||||
/ /| | / __ `__ \/ __ \/ __ `/ ___/ __ \/ _ \
|
||||
/ ___ |/ / / / / / /_/ / /_/ / /__/ / / / __/
|
||||
/_/ |_/_/ /_/ /_/ .___/\__,_/\___/_/ /_/\___/
|
||||
/_/
|
6
ct/headers/asterisk
Normal file
6
ct/headers/asterisk
Normal file
@ -0,0 +1,6 @@
|
||||
___ __ _ __
|
||||
/ | _____/ /____ _____(_)____/ /__
|
||||
/ /| | / ___/ __/ _ \/ ___/ / ___/ //_/
|
||||
/ ___ |(__ ) /_/ __/ / / (__ ) ,<
|
||||
/_/ |_/____/\__/\___/_/ /_/____/_/|_|
|
||||
|
6
ct/headers/authentik
Normal file
6
ct/headers/authentik
Normal file
@ -0,0 +1,6 @@
|
||||
___ __ __ __ _ __
|
||||
/ | __ __/ /_/ /_ ___ ____ / /_(_) /__
|
||||
/ /| |/ / / / __/ __ \/ _ \/ __ \/ __/ / //_/
|
||||
/ ___ / /_/ / /_/ / / / __/ / / / /_/ / ,<
|
||||
/_/ |_\__,_/\__/_/ /_/\___/_/ /_/\__/_/_/|_|
|
||||
|
@ -1,6 +0,0 @@
|
||||
___ __ ___ __
|
||||
/ | __ __/ /_____ _________ _/ (_) _____ / /_
|
||||
/ /| |/ / / / __/ __ \/ ___/ __ `/ / / | /| / / _ \/ __ \
|
||||
/ ___ / /_/ / /_/ /_/ / /__/ /_/ / / /| |/ |/ / __/ /_/ /
|
||||
/_/ |_\__,_/\__/\____/\___/\__,_/_/_/ |__/|__/\___/_.___/
|
||||
|
6
ct/headers/babybuddy
Normal file
6
ct/headers/babybuddy
Normal file
@ -0,0 +1,6 @@
|
||||
____ __ ____ __ __
|
||||
/ __ )____ _/ /_ __ __/ __ )__ ______/ /___/ /_ __
|
||||
/ __ / __ `/ __ \/ / / / __ / / / / __ / __ / / / /
|
||||
/ /_/ / /_/ / /_/ / /_/ / /_/ / /_/ / /_/ / /_/ / /_/ /
|
||||
/_____/\__,_/_.___/\__, /_____/\__,_/\__,_/\__,_/\__, /
|
||||
/____/ /____/
|
6
ct/headers/bar-assistant
Normal file
6
ct/headers/bar-assistant
Normal file
@ -0,0 +1,6 @@
|
||||
____ ___ _ __ __
|
||||
/ __ )____ ______ / | __________(_)____/ /_____ _____ / /_
|
||||
/ __ / __ `/ ___/_____/ /| | / ___/ ___/ / ___/ __/ __ `/ __ \/ __/
|
||||
/ /_/ / /_/ / / /_____/ ___ |(__ |__ ) (__ ) /_/ /_/ / / / / /_
|
||||
/_____/\__,_/_/ /_/ |_/____/____/_/____/\__/\__,_/_/ /_/\__/
|
||||
|
6
ct/headers/bluecherry
Normal file
6
ct/headers/bluecherry
Normal file
@ -0,0 +1,6 @@
|
||||
__ __ __
|
||||
/ /_ / /_ _____ _____/ /_ ___ ____________ __
|
||||
/ __ \/ / / / / _ \/ ___/ __ \/ _ \/ ___/ ___/ / / /
|
||||
/ /_/ / / /_/ / __/ /__/ / / / __/ / / / / /_/ /
|
||||
/_.___/_/\__,_/\___/\___/_/ /_/\___/_/ /_/ \__, /
|
||||
/____/
|
6
ct/headers/bookstack
Normal file
6
ct/headers/bookstack
Normal file
@ -0,0 +1,6 @@
|
||||
____ __ __ __
|
||||
/ __ )____ ____ / /_______/ /_____ ______/ /__
|
||||
/ __ / __ \/ __ \/ //_/ ___/ __/ __ `/ ___/ //_/
|
||||
/ /_/ / /_/ / /_/ / ,< (__ ) /_/ /_/ / /__/ ,<
|
||||
/_____/\____/\____/_/|_/____/\__/\__,_/\___/_/|_|
|
||||
|
6
ct/headers/cloudflare-ddns
Normal file
6
ct/headers/cloudflare-ddns
Normal file
@ -0,0 +1,6 @@
|
||||
________ ________ ____ ____ _ _______
|
||||
/ ____/ /___ __ ______/ / __/ /___ _________ / __ \/ __ \/ | / / ___/
|
||||
/ / / / __ \/ / / / __ / /_/ / __ `/ ___/ _ \______/ / / / / / / |/ /\__ \
|
||||
/ /___/ / /_/ / /_/ / /_/ / __/ / /_/ / / / __/_____/ /_/ / /_/ / /| /___/ /
|
||||
\____/_/\____/\__,_/\__,_/_/ /_/\__,_/_/ \___/ /_____/_____/_/ |_//____/
|
||||
|
@ -1,6 +0,0 @@
|
||||
____ _ __ __
|
||||
/ __ \(_)________ ____ _/ /______/ /_ ____ ___________
|
||||
/ / / / / ___/ __ \/ __ `/ __/ ___/ __ \/ __ `/ ___/ ___/
|
||||
/ /_/ / (__ ) /_/ / /_/ / /_/ /__/ / / / /_/ / / / /
|
||||
/_____/_/____/ .___/\__,_/\__/\___/_/ /_/\__,_/_/ /_/
|
||||
/_/
|
@ -1,6 +0,0 @@
|
||||
______ __
|
||||
/ ____/___ / /____
|
||||
/ __/ / __ \/ __/ _ \
|
||||
/ /___/ / / / /_/ __/
|
||||
/_____/_/ /_/\__/\___/
|
||||
|
@ -1,6 +0,0 @@
|
||||
_ ____
|
||||
____ _____ __________ ___ (_)___ ____ __________ _/ __/___ _____ ____ _
|
||||
/ __ `/ __ `/ ___/ __ `__ \/ / __ \______/ __ `/ ___/ __ `/ /_/ __ `/ __ \/ __ `/
|
||||
/ /_/ / /_/ / / / / / / / / / / / /_____/ /_/ / / / /_/ / __/ /_/ / / / / /_/ /
|
||||
\__, /\__,_/_/ /_/ /_/ /_/_/_/ /_/ \__, /_/ \__,_/_/ \__,_/_/ /_/\__,_/
|
||||
/____/ /____/
|
6
ct/headers/gatus
Normal file
6
ct/headers/gatus
Normal file
@ -0,0 +1,6 @@
|
||||
__
|
||||
____ _____ _/ /___ _______
|
||||
/ __ `/ __ `/ __/ / / / ___/
|
||||
/ /_/ / /_/ / /_/ /_/ (__ )
|
||||
\__, /\__,_/\__/\__,_/____/
|
||||
/____/
|
6
ct/headers/ghostfolio
Normal file
6
ct/headers/ghostfolio
Normal file
@ -0,0 +1,6 @@
|
||||
________ __ ____ ___
|
||||
/ ____/ /_ ____ _____/ /_/ __/___ / (_)___
|
||||
/ / __/ __ \/ __ \/ ___/ __/ /_/ __ \/ / / __ \
|
||||
/ /_/ / / / / /_/ (__ ) /_/ __/ /_/ / / / /_/ /
|
||||
\____/_/ /_/\____/____/\__/_/ \____/_/_/\____/
|
||||
|
@ -1,6 +0,0 @@
|
||||
__ __ __
|
||||
/ / / /___ _____ / /______
|
||||
/ /_/ / __ `/ __ \/ //_/ __ \
|
||||
/ __ / /_/ / / / / ,< / /_/ /
|
||||
/_/ /_/\__,_/_/ /_/_/|_|\____/
|
||||
|
6
ct/headers/healthchecks
Normal file
6
ct/headers/healthchecks
Normal file
@ -0,0 +1,6 @@
|
||||
__ ____ __ __ __
|
||||
/ /_ ___ ____ _/ / /_/ /_ _____/ /_ ___ _____/ /_______
|
||||
/ __ \/ _ \/ __ `/ / __/ __ \/ ___/ __ \/ _ \/ ___/ //_/ ___/
|
||||
/ / / / __/ /_/ / / /_/ / / / /__/ / / / __/ /__/ ,< (__ )
|
||||
/_/ /_/\___/\__,_/_/\__/_/ /_/\___/_/ /_/\___/\___/_/|_/____/
|
||||
|
6
ct/headers/homarr
Normal file
6
ct/headers/homarr
Normal file
@ -0,0 +1,6 @@
|
||||
__ __
|
||||
/ / / /___ ____ ___ ____ ___________
|
||||
/ /_/ / __ \/ __ `__ \/ __ `/ ___/ ___/
|
||||
/ __ / /_/ / / / / / / /_/ / / / /
|
||||
/_/ /_/\____/_/ /_/ /_/\__,_/_/ /_/
|
||||
|
@ -1,6 +1,6 @@
|
||||
__ __ ___ _ __ __ ______
|
||||
/ / / /___ ____ ___ ___ / | __________(_)____/ /_____ _____ / /_ / ____/___ ________
|
||||
/ /_/ / __ \/ __ `__ \/ _ \ / /| | / ___/ ___/ / ___/ __/ __ `/ __ \/ __/ / / / __ \/ ___/ _ \
|
||||
/ __ / /_/ / / / / / / __/ / ___ |(__ |__ ) (__ ) /_/ /_/ / / / / /_ / /___/ /_/ / / / __/
|
||||
/_/ /_/\____/_/ /_/ /_/\___/ /_/ |_/____/____/_/____/\__/\__,_/_/ /_/\__/ \____/\____/_/ \___/
|
||||
|
||||
__ __ ___ _ __ __ ______
|
||||
/ / / /___ ____ ___ ___ / | __________(_)____/ /_____ _____ / /_ / ____/___ ________
|
||||
/ /_/ / __ \/ __ `__ \/ _ \ / /| | / ___/ ___/ / ___/ __/ __ `/ __ \/ __/_____/ / / __ \/ ___/ _ \
|
||||
/ __ / /_/ / / / / / / __/ / ___ |(__ |__ ) (__ ) /_/ /_/ / / / / /_/_____/ /___/ /_/ / / / __/
|
||||
/_/ /_/\____/_/ /_/ /_/\___/ /_/ |_/____/____/_/____/\__/\__,_/_/ /_/\__/ \____/\____/_/ \___/
|
||||
|
6
ct/headers/hoodik
Normal file
6
ct/headers/hoodik
Normal file
@ -0,0 +1,6 @@
|
||||
__ __ ___ __
|
||||
/ / / /___ ____ ____/ (_) /__
|
||||
/ /_/ / __ \/ __ \/ __ / / //_/
|
||||
/ __ / /_/ / /_/ / /_/ / / ,<
|
||||
/_/ /_/\____/\____/\__,_/_/_/|_|
|
||||
|
6
ct/headers/immich
Normal file
6
ct/headers/immich
Normal file
@ -0,0 +1,6 @@
|
||||
_ _ __
|
||||
(_)___ ___ ____ ___ (_)____/ /_
|
||||
/ / __ `__ \/ __ `__ \/ / ___/ __ \
|
||||
/ / / / / / / / / / / / / /__/ / / /
|
||||
/_/_/ /_/ /_/_/ /_/ /_/_/\___/_/ /_/
|
||||
|
6
ct/headers/jumpserver
Normal file
6
ct/headers/jumpserver
Normal file
@ -0,0 +1,6 @@
|
||||
__ _____
|
||||
/ /_ ______ ___ ____ / ___/___ ______ _____ _____
|
||||
__ / / / / / __ `__ \/ __ \\__ \/ _ \/ ___/ | / / _ \/ ___/
|
||||
/ /_/ / /_/ / / / / / / /_/ /__/ / __/ / | |/ / __/ /
|
||||
\____/\__,_/_/ /_/ /_/ .___/____/\___/_/ |___/\___/_/
|
||||
/_/
|
@ -1,6 +0,0 @@
|
||||
__ __ __
|
||||
/ //_/___ _____ / /_ ____ _
|
||||
/ ,< / __ `/ __ \/ __ \/ __ `/
|
||||
/ /| / /_/ / / / / /_/ / /_/ /
|
||||
/_/ |_\__,_/_/ /_/_.___/\__,_/
|
||||
|
6
ct/headers/koel
Normal file
6
ct/headers/koel
Normal file
@ -0,0 +1,6 @@
|
||||
__ __ __
|
||||
/ //_/___ ___ / /
|
||||
/ ,< / __ \/ _ \/ /
|
||||
/ /| / /_/ / __/ /
|
||||
/_/ |_\____/\___/_/
|
||||
|
@ -1,6 +0,0 @@
|
||||
__ __ _
|
||||
/ / ___ ____ _____ / /_(_)___ ___ ___
|
||||
/ / / _ \/ __ `/ __ \/ __/ / __ `__ \/ _ \
|
||||
/ /___/ __/ /_/ / / / / /_/ / / / / / / __/
|
||||
/_____/\___/\__,_/_/ /_/\__/_/_/ /_/ /_/\___/
|
||||
|
6
ct/headers/librespeed
Normal file
6
ct/headers/librespeed
Normal file
@ -0,0 +1,6 @@
|
||||
___ __ __
|
||||
/ (_) /_ ________ _________ ___ ___ ____/ /
|
||||
/ / / __ \/ ___/ _ \/ ___/ __ \/ _ \/ _ \/ __ /
|
||||
/ / / /_/ / / / __(__ ) /_/ / __/ __/ /_/ /
|
||||
/_/_/_.___/_/ \___/____/ .___/\___/\___/\__,_/
|
||||
/_/
|
6
ct/headers/matterbridge
Normal file
6
ct/headers/matterbridge
Normal file
@ -0,0 +1,6 @@
|
||||
__ ___ __ __ __ _ __
|
||||
/ |/ /___ _/ /_/ /____ _____/ /_ _____(_)___/ /___ ____
|
||||
/ /|_/ / __ `/ __/ __/ _ \/ ___/ __ \/ ___/ / __ / __ `/ _ \
|
||||
/ / / / /_/ / /_/ /_/ __/ / / /_/ / / / / /_/ / /_/ / __/
|
||||
/_/ /_/\__,_/\__/\__/\___/_/ /_.___/_/ /_/\__,_/\__, /\___/
|
||||
/____/
|
6
ct/headers/mattermost
Normal file
6
ct/headers/mattermost
Normal file
@ -0,0 +1,6 @@
|
||||
__ ___ __ __ __
|
||||
/ |/ /___ _/ /_/ /____ _________ ___ ____ _____/ /_
|
||||
/ /|_/ / __ `/ __/ __/ _ \/ ___/ __ `__ \/ __ \/ ___/ __/
|
||||
/ / / / /_/ / /_/ /_/ __/ / / / / / / / /_/ (__ ) /_
|
||||
/_/ /_/\__,_/\__/\__/\___/_/ /_/ /_/ /_/\____/____/\__/
|
||||
|
6
ct/headers/netbootxyz
Normal file
6
ct/headers/netbootxyz
Normal file
@ -0,0 +1,6 @@
|
||||
__ __ __
|
||||
____ ___ / /_/ /_ ____ ____ / /_ _ ____ ______
|
||||
/ __ \/ _ \/ __/ __ \/ __ \/ __ \/ __/ | |/_/ / / /_ /
|
||||
/ / / / __/ /_/ /_/ / /_/ / /_/ / /__ _> </ /_/ / / /_
|
||||
/_/ /_/\___/\__/_.___/\____/\____/\__(_)_/|_|\__, / /___/
|
||||
/____/
|
6
ct/headers/nginxproxymanager
Normal file
6
ct/headers/nginxproxymanager
Normal file
@ -0,0 +1,6 @@
|
||||
_ __ _ ____ __ ___
|
||||
/ | / /___ _(_)___ _ __ / __ \_________ _ ____ __ / |/ /___ _____ ____ _____ ____ _____
|
||||
/ |/ / __ `/ / __ \| |/_/ / /_/ / ___/ __ \| |/_/ / / / / /|_/ / __ `/ __ \/ __ `/ __ `/ _ \/ ___/
|
||||
/ /| / /_/ / / / / /> < / ____/ / / /_/ /> </ /_/ / / / / / /_/ / / / / /_/ / /_/ / __/ /
|
||||
/_/ |_/\__, /_/_/ /_/_/|_| /_/ /_/ \____/_/|_|\__, / /_/ /_/\__,_/_/ /_/\__,_/\__, /\___/_/
|
||||
/____/ /____/ /____/
|
@ -1,6 +0,0 @@
|
||||
__ __
|
||||
____ ____ / /____ _________ ____ ____ / /__
|
||||
/ __ \/ __ \/ __/ _ \/ ___/ __ \/ __ \/ __ \/ //_/
|
||||
/ / / / /_/ / /_/ __(__ ) / / / /_/ / /_/ / ,<
|
||||
/_/ /_/\____/\__/\___/____/_/ /_/\____/\____/_/|_|
|
||||
|
6
ct/headers/odoo
Normal file
6
ct/headers/odoo
Normal file
@ -0,0 +1,6 @@
|
||||
____ __
|
||||
/ __ \____/ /___ ____
|
||||
/ / / / __ / __ \/ __ \
|
||||
/ /_/ / /_/ / /_/ / /_/ /
|
||||
\____/\__,_/\____/\____/
|
||||
|
@ -1,6 +0,0 @@
|
||||
____ ________ __
|
||||
/ __ \____ ___ ____ / ____/ /___ __ ______/ /
|
||||
/ / / / __ \/ _ \/ __ \/ / / / __ \/ / / / __ /
|
||||
/ /_/ / /_/ / __/ / / / /___/ / /_/ / /_/ / /_/ /
|
||||
\____/ .___/\___/_/ /_/\____/_/\____/\__,_/\__,_/
|
||||
/_/
|
6
ct/headers/openobserve
Normal file
6
ct/headers/openobserve
Normal file
@ -0,0 +1,6 @@
|
||||
____ ____ __
|
||||
/ __ \____ ___ ____ / __ \/ /_ ________ ______ _____
|
||||
/ / / / __ \/ _ \/ __ \/ / / / __ \/ ___/ _ \/ ___/ | / / _ \
|
||||
/ /_/ / /_/ / __/ / / / /_/ / /_/ (__ ) __/ / | |/ / __/
|
||||
\____/ .___/\___/_/ /_/\____/_.___/____/\___/_/ |___/\___/
|
||||
/_/
|
6
ct/headers/openproject
Normal file
6
ct/headers/openproject
Normal file
@ -0,0 +1,6 @@
|
||||
____ ____ _ __
|
||||
/ __ \____ ___ ____ / __ \_________ (_)__ _____/ /_
|
||||
/ / / / __ \/ _ \/ __ \/ /_/ / ___/ __ \ / / _ \/ ___/ __/
|
||||
/ /_/ / /_/ / __/ / / / ____/ / / /_/ / / / __/ /__/ /_
|
||||
\____/ .___/\___/_/ /_/_/ /_/ \____/_/ /\___/\___/\__/
|
||||
/_/ /___/
|
6
ct/headers/paperless-ngx
Normal file
6
ct/headers/paperless-ngx
Normal file
@ -0,0 +1,6 @@
|
||||
____ __
|
||||
/ __ \____ _____ ___ _____/ /__ __________ ____ ____ __ __
|
||||
/ /_/ / __ `/ __ \/ _ \/ ___/ / _ \/ ___/ ___/_____/ __ \/ __ `/ |/_/
|
||||
/ ____/ /_/ / /_/ / __/ / / / __(__ |__ )_____/ / / / /_/ /> <
|
||||
/_/ \__,_/ .___/\___/_/ /_/\___/____/____/ /_/ /_/\__, /_/|_|
|
||||
/_/ /____/
|
6
ct/headers/pixelfed
Normal file
6
ct/headers/pixelfed
Normal file
@ -0,0 +1,6 @@
|
||||
____ _ ______ __
|
||||
/ __ \(_) _____ / / __/__ ____/ /
|
||||
/ /_/ / / |/_/ _ \/ / /_/ _ \/ __ /
|
||||
/ ____/ /> </ __/ / __/ __/ /_/ /
|
||||
/_/ /_/_/|_|\___/_/_/ \___/\__,_/
|
||||
|
6
ct/headers/polaris
Normal file
6
ct/headers/polaris
Normal file
@ -0,0 +1,6 @@
|
||||
____ __ _
|
||||
/ __ \____ / /___ ______(_)____
|
||||
/ /_/ / __ \/ / __ `/ ___/ / ___/
|
||||
/ ____/ /_/ / / /_/ / / / (__ )
|
||||
/_/ \____/_/\__,_/_/ /_/____/
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user