cleanup
This commit is contained in:
parent
2d550fb4b5
commit
9b7fbdf7d7
@ -1,60 +0,0 @@
|
||||
name: Update date_created in JSON files
|
||||
|
||||
on:
|
||||
# Dieser Trigger wird für das Öffnen von PRs sowie für das Aktualisieren von offenen PRs verwendet
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
schedule:
|
||||
# Dieser Trigger wird 4x am Tag ausgelöst, um sicherzustellen, dass das Datum aktualisiert wird
|
||||
- cron: "0 0,6,12,18 * * *" # Führt alle 6 Stunden aus
|
||||
workflow_dispatch: # Manuelle Ausführung des Workflows möglich
|
||||
|
||||
jobs:
|
||||
update-date:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install yq
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y yq
|
||||
|
||||
- name: Set the current date
|
||||
id: set_date
|
||||
run: echo "TODAY=$(date -u +%Y-%m-%d)" >> $GITHUB_ENV
|
||||
|
||||
- name: Check for changes in PR
|
||||
run: |
|
||||
# Hole den PR-Branch
|
||||
PR_BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
|
||||
git fetch origin $PR_BRANCH
|
||||
|
||||
# Liste alle JSON-Dateien im PR auf, die geändert wurden
|
||||
CHANGED_JSON_FILES=$(git diff --name-only origin/main...$PR_BRANCH | grep '.json')
|
||||
|
||||
if [ -z "$CHANGED_JSON_FILES" ]; then
|
||||
echo "No JSON files changed in this PR."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Gehe alle geänderten JSON-Dateien durch und aktualisiere das Datum
|
||||
for file in $CHANGED_JSON_FILES; do
|
||||
echo "Updating date_created in $file"
|
||||
# Setze das aktuelle Datum
|
||||
yq eval ".date_created = \"${{ env.TODAY }}\"" -i "$file"
|
||||
git add "$file"
|
||||
done
|
||||
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
# Prüfe, ob es Änderungen gibt und committe sie
|
||||
git config user.name "json-updater-bot"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
git commit -m "Update date_created to ${{ env.TODAY }}" || echo "No changes to commit"
|
||||
|
||||
# Push zurück in den PR-Branch
|
||||
git push origin $PR_BRANCH
|
60
.github/workflows/backup/shellcheck.yml
vendored
60
.github/workflows/backup/shellcheck.yml
vendored
@ -1,60 +0,0 @@
|
||||
name: Shellcheck
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "5 1 * * *"
|
||||
|
||||
jobs:
|
||||
shellcheck:
|
||||
name: Shellcheck
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: |
|
||||
**.sh
|
||||
|
||||
- name: Download ShellCheck
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_VERSION: "v0.10.0"
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ "${{ runner.os }}" == "macOS" ]]; then
|
||||
osvariant="darwin"
|
||||
else
|
||||
osvariant="linux"
|
||||
fi
|
||||
|
||||
baseurl="https://github.com/koalaman/shellcheck/releases/download"
|
||||
curl -Lso "${{ github.workspace }}/sc.tar.xz" \
|
||||
"${baseurl}/${INPUT_VERSION}/shellcheck-${INPUT_VERSION}.${osvariant}.x86_64.tar.xz"
|
||||
|
||||
tar -xf "${{ github.workspace }}/sc.tar.xz" -C "${{ github.workspace }}"
|
||||
mv "${{ github.workspace }}/shellcheck-${INPUT_VERSION}/shellcheck" \
|
||||
"${{ github.workspace }}/shellcheck"
|
||||
|
||||
- name: Verify ShellCheck binary
|
||||
run: |
|
||||
ls -l "${{ github.workspace }}/shellcheck"
|
||||
|
||||
- name: Display ShellCheck version
|
||||
run: |
|
||||
"${{ github.workspace }}/shellcheck" --version
|
||||
|
||||
- name: Run ShellCheck
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
env:
|
||||
ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||
run: |
|
||||
echo "${ALL_CHANGED_FILES}" | xargs "${{ github.workspace }}/shellcheck"
|
@ -1,90 +0,0 @@
|
||||
name: Auto Update JSON-Date
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-json-dates:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Full history for proper detection
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Actions"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Find JSON files with incorrect date_created
|
||||
id: find_wrong_json
|
||||
run: |
|
||||
TODAY=$(date -u +"%Y-%m-%d")
|
||||
> incorrect_json_files.txt
|
||||
|
||||
for FILE in json/*.json; do
|
||||
if [[ -f "$FILE" ]]; then
|
||||
DATE_IN_JSON=$(jq -r '.date_created' "$FILE" 2>/dev/null || echo "")
|
||||
|
||||
if [[ "$DATE_IN_JSON" != "$TODAY" ]]; then
|
||||
echo "$FILE" >> incorrect_json_files.txt
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -s incorrect_json_files.txt ]]; then
|
||||
echo "CHANGED=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Run update script
|
||||
if: env.CHANGED == 'true'
|
||||
run: |
|
||||
chmod +x .github/workflows/scripts/update-json.sh
|
||||
while read -r FILE; do
|
||||
.github/workflows/scripts/update-json.sh "$FILE"
|
||||
done < incorrect_json_files.txt
|
||||
|
||||
- name: Commit and create PR if changes exist
|
||||
if: env.CHANGED == 'true'
|
||||
run: |
|
||||
git add json/*.json
|
||||
git commit -m "Auto-update date_created in incorrect JSON files"
|
||||
git checkout -b pr-fix-json-dates
|
||||
git push origin pr-fix-json-dates --force
|
||||
gh pr create --title "[core] Fix incorrect JSON date_created fields" \
|
||||
--body "This PR is auto-generated to fix incorrect `date_created` fields in JSON files." \
|
||||
--head pr-fix-json-dates \
|
||||
--base main \
|
||||
--label "automated pr"
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.CHANGED == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "pr-fix-json-dates" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
@ -1,133 +0,0 @@
|
||||
name: Validate script formatting
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "**/*.sh"
|
||||
- "**/*.func"
|
||||
|
||||
jobs:
|
||||
shfmt:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if ${{ github.event_name == 'pull_request_target' }}; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Set up Go
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
uses: actions/setup-go@v5
|
||||
|
||||
- name: Install shfmt
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
run: |
|
||||
go install mvdan.cc/sh/v3/cmd/shfmt@latest
|
||||
echo "$GOPATH/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Run shfmt
|
||||
if: steps.changed-files.outputs.files != ''
|
||||
id: shfmt
|
||||
run: |
|
||||
set +e
|
||||
|
||||
|
||||
shfmt_output=$(shfmt -d ${{ steps.changed-files.outputs.files }})
|
||||
if [[ $? -eq 0 ]]; then
|
||||
exit 0
|
||||
else
|
||||
echo "diff=\"$(echo -n "$shfmt_output" | base64 -w 0)\"" >> $GITHUB_OUTPUT
|
||||
printf "%s" "$shfmt_output"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post comment with results
|
||||
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = "${{ job.status }}" === "success" ? "success" : "failure";
|
||||
const diff = Buffer.from(
|
||||
${{ steps.shfmt.outputs.diff }},
|
||||
"base64",
|
||||
).toString();
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: null;
|
||||
const commentIdentifier = "validate-formatting";
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script formatting\n\n`;
|
||||
|
||||
if (result === "failure") {
|
||||
newCommentBody +=
|
||||
`:x: We found issues in the formatting of the following changed files:\n\n\`\`\`diff\n${diff}\n\`\`\`\n`;
|
||||
} else {
|
||||
newCommentBody += `:rocket: All changed shell scripts are formatted correctly!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(
|
||||
(comment) => comment.user.login === "github-actions[bot]",
|
||||
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
if (existingComment.body.includes(commentIdentifier)) {
|
||||
const re = new RegExp(
|
||||
String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`,
|
||||
"",
|
||||
);
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
} else {
|
||||
newCommentBody = existingComment.body + "\n\n---\n\n" + newCommentBody;
|
||||
}
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody,
|
||||
});
|
||||
}
|
||||
}
|
234
.github/workflows/backup/validate-scripts.yml.bak
vendored
234
.github/workflows/backup/validate-scripts.yml.bak
vendored
@ -1,234 +0,0 @@
|
||||
name: Validate scripts
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "ct/*.sh"
|
||||
- "install/*.sh"
|
||||
|
||||
jobs:
|
||||
check-scripts:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Debug event payload
|
||||
run: |
|
||||
echo "Event name: ${{ github.event_name }}"
|
||||
echo "Payload: $(cat $GITHUB_EVENT_PATH)"
|
||||
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -E '\.(sh|func)$' | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check build.func line
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: build-func
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ "$FILE" == ct/* ]] && [[ $(sed -n '2p' "$FILE") != "source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Build.func line missing or incorrect in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check executable permissions
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-executable
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ ! -x "$FILE" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Files not executable:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check copyright
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-copyright
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '3p' "$FILE" | grep -qE "^# Copyright \(c\) [0-9]{4}(-[0-9]{4})? (tteck \| community-scripts ORG|community-scripts ORG|tteck)$"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Copyright header missing or not on line 3 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check author
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-author
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '4p' "$FILE" | grep -qE "^# Author: .+"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Author header missing or invalid on line 4 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check license
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-license
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if [[ "$(sed -n '5p' "$FILE")" != "# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE" ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "License header missing or not on line 5 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check source
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-source
|
||||
run: |
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in ${{ steps.changed-files.outputs.files }}; do
|
||||
if ! sed -n '6p' "$FILE" | grep -qE "^# Source: .+"; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Source header missing or not on line 6 in files:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post results and comment
|
||||
if: always() && steps.changed-files.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = '${{ job.status }}' === 'success' ? 'success' : 'failure';
|
||||
const nonCompliantFiles = {
|
||||
'Invalid build.func source': "${{ steps.build-func.outputs.files || '' }}",
|
||||
'Not executable': "${{ steps.check-executable.outputs.files || '' }}",
|
||||
'Copyright header line missing or invalid': "${{ steps.check-copyright.outputs.files || '' }}",
|
||||
'Author header line missing or invalid': "${{ steps.check-author.outputs.files || '' }}",
|
||||
'License header line missing or invalid': "${{ steps.check-license.outputs.files || '' }}",
|
||||
'Source header line missing or invalid': "${{ steps.check-source.outputs.files || '' }}"
|
||||
};
|
||||
|
||||
const issueNumber = context.payload.pull_request ? context.payload.pull_request.number : null;
|
||||
const commentIdentifier = 'validate-scripts';
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Script validation\n\n`;
|
||||
|
||||
if (result === 'failure') {
|
||||
newCommentBody += ':x: We found issues in the following changed files:\n\n';
|
||||
for (const [check, files] of Object.entries(nonCompliantFiles)) {
|
||||
if (files) {
|
||||
newCommentBody += `**${check}:**\n`;
|
||||
files.trim().split(' ').forEach(file => {
|
||||
newCommentBody += `- ${file}: ${check}\n`;
|
||||
});
|
||||
newCommentBody += `\n`;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
newCommentBody += `:rocket: All changed shell scripts passed validation!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber
|
||||
});
|
||||
|
||||
const existingComment = comments.find(comment =>
|
||||
comment.body.includes(`<!-- ${commentIdentifier}-start -->`) &&
|
||||
comment.user.login === 'github-actions[bot]'
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\\s\\S]*?<!-- ${commentIdentifier}-end -->`, "m");
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody
|
||||
});
|
||||
}
|
||||
}
|
286
.github/workflows/changelog-pr.yaml
vendored
286
.github/workflows/changelog-pr.yaml
vendored
@ -1,286 +0,0 @@
|
||||
name: Create Changelog Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-changelog-pull-request:
|
||||
if: github.repository == 'community-scripts/ProxmoxVED'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CONFIG_PATH: .github/changelog-pr-config.json
|
||||
BRANCH_NAME: github-action-update-changelog
|
||||
AUTOMATED_PR_LABEL: "automated pr"
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token for PR creation
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Generate a token for PR approval and merge
|
||||
id: generate-token-merge
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ secrets.APP_ID_APPROVE_AND_MERGE }}
|
||||
private-key: ${{ secrets.APP_KEY_APPROVE_AND_MERGE }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get latest dates in changelog
|
||||
run: |
|
||||
DATES=$(grep -E '^## [0-9]{4}-[0-9]{2}-[0-9]{2}' CHANGELOG.md | head -n 2 | awk '{print $2}')
|
||||
|
||||
LATEST_DATE=$(echo "$DATES" | sed -n '1p')
|
||||
SECOND_LATEST_DATE=$(echo "$DATES" | sed -n '2p')
|
||||
TODAY=$(date -u +%Y-%m-%d)
|
||||
|
||||
echo "TODAY=$TODAY" >> $GITHUB_ENV
|
||||
if [[ "$LATEST_DATE" == "$TODAY" ]]; then
|
||||
echo "LATEST_DATE=$SECOND_LATEST_DATE" >> $GITHUB_ENV
|
||||
else
|
||||
echo "LATEST_DATE=$LATEST_DATE" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Get categorized pull requests
|
||||
id: get-categorized-prs
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
async function main() {
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const configPath = path.resolve(process.env.CONFIG_PATH);
|
||||
const fileContent = await fs.readFile(configPath, 'utf-8');
|
||||
const changelogConfig = JSON.parse(fileContent);
|
||||
|
||||
const categorizedPRs = changelogConfig.map(obj => ({
|
||||
...obj,
|
||||
notes: [],
|
||||
subCategories: obj.subCategories ?? (
|
||||
obj.labels.includes("update script") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "🔧 Refactor", labels: ["refactor"], notes: [] },
|
||||
] :
|
||||
obj.labels.includes("maintenance") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "📡 API", labels: ["api"], notes: [] },
|
||||
{ title: "Github", labels: ["github"], notes: [] },
|
||||
{ title: "📝 Documentation", labels: ["documentation"], notes: [] },
|
||||
{ title: "🔧 Refactor", labels: ["refactor"], notes: [] }
|
||||
] :
|
||||
obj.labels.includes("website") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "Script Information", labels: ["json"], notes: [] }
|
||||
] : []
|
||||
)
|
||||
}));
|
||||
|
||||
const latestDateInChangelog = new Date(process.env.LATEST_DATE);
|
||||
latestDateInChangelog.setUTCHours(23, 59, 59, 999);
|
||||
|
||||
const { data: pulls } = await github.rest.pulls.list({
|
||||
owner: context.repo.owner,
|
||||
repo: "ProxmoxVE",
|
||||
base: "main",
|
||||
state: "closed",
|
||||
sort: "updated",
|
||||
direction: "desc",
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const filteredPRs = pulls.filter(pr =>
|
||||
pr.merged_at &&
|
||||
new Date(pr.merged_at) > latestDateInChangelog &&
|
||||
!pr.labels.some(label =>
|
||||
["invalid", "wontdo", process.env.AUTOMATED_PR_LABEL].includes(label.name.toLowerCase())
|
||||
)
|
||||
);
|
||||
|
||||
for (const pr of filteredPRs) {
|
||||
const prLabels = pr.labels.map(label => label.name.toLowerCase());
|
||||
if (pr.user.login.includes("push-app-to-main[bot]")) {
|
||||
|
||||
const scriptName = pr.title;
|
||||
try {
|
||||
const { data: relatedIssues } = await github.rest.issues.listForRepo({
|
||||
owner: context.repo.owner,
|
||||
repo: "ProxmoxVED",
|
||||
state: "all",
|
||||
labels: ["Started Migration To ProxmoxVE"]
|
||||
});
|
||||
|
||||
const matchingIssue = relatedIssues.find(issue =>
|
||||
issue.title.toLowerCase().includes(scriptName.toLowerCase())
|
||||
);
|
||||
|
||||
if (matchingIssue) {
|
||||
const issueAuthor = matchingIssue.user.login;
|
||||
const issueAuthorUrl = `https://github.com/${issueAuthor}`;
|
||||
prNote = `- ${pr.title} [@${issueAuthor}](${issueAuthorUrl}) ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
else {
|
||||
prNote = `- ${pr.title} ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error fetching related issues: ${error}`);
|
||||
prNote = `- ${pr.title} ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
}else{
|
||||
prNote = `- ${pr.title} [@${pr.user.login}](https://github.com/${pr.user.login}) ([#${pr.number}](${pr.html_url}))`;
|
||||
}
|
||||
|
||||
|
||||
if (prLabels.includes("new script")) {
|
||||
const newScriptCategory = categorizedPRs.find(category =>
|
||||
category.title === "New Scripts" || category.labels.includes("new script"));
|
||||
if (newScriptCategory) {
|
||||
newScriptCategory.notes.push(prNote);
|
||||
}
|
||||
} else {
|
||||
|
||||
let categorized = false;
|
||||
const priorityCategories = categorizedPRs.slice();
|
||||
for (const category of priorityCategories) {
|
||||
if (categorized) break;
|
||||
if (category.labels.some(label => prLabels.includes(label))) {
|
||||
if (category.subCategories && category.subCategories.length > 0) {
|
||||
const subCategory = category.subCategories.find(sub =>
|
||||
sub.labels.some(label => prLabels.includes(label))
|
||||
);
|
||||
|
||||
if (subCategory) {
|
||||
subCategory.notes.push(prNote);
|
||||
} else {
|
||||
category.notes.push(prNote);
|
||||
}
|
||||
} else {
|
||||
category.notes.push(prNote);
|
||||
}
|
||||
categorized = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return categorizedPRs;
|
||||
}
|
||||
|
||||
return await main();
|
||||
|
||||
- name: Update CHANGELOG.md
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const today = process.env.TODAY;
|
||||
const latestDateInChangelog = process.env.LATEST_DATE;
|
||||
const changelogPath = path.resolve('CHANGELOG.md');
|
||||
const categorizedPRs = ${{ steps.get-categorized-prs.outputs.result }};
|
||||
|
||||
let newReleaseNotes = `## ${today}\n\n`;
|
||||
for (const { title, notes, subCategories } of categorizedPRs) {
|
||||
const hasSubcategories = subCategories && subCategories.length > 0;
|
||||
const hasMainNotes = notes.length > 0;
|
||||
const hasSubNotes = hasSubcategories && subCategories.some(sub => sub.notes && sub.notes.length > 0);
|
||||
|
||||
if (hasMainNotes || hasSubNotes) {
|
||||
newReleaseNotes += `### ${title}\n\n`;
|
||||
}
|
||||
|
||||
if (hasMainNotes) {
|
||||
newReleaseNotes += ` ${notes.join("\n")}\n\n`;
|
||||
}
|
||||
if (hasSubcategories) {
|
||||
for (const { title: subTitle, notes: subNotes } of subCategories) {
|
||||
if (subNotes && subNotes.length > 0) {
|
||||
newReleaseNotes += ` - #### ${subTitle}\n\n`;
|
||||
newReleaseNotes += ` ${subNotes.join("\n ")}\n\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const changelogContent = await fs.readFile(changelogPath, 'utf-8');
|
||||
const changelogIncludesTodaysReleaseNotes = changelogContent.includes(`\n## ${today}`);
|
||||
|
||||
const regex = changelogIncludesTodaysReleaseNotes
|
||||
? new RegExp(`## ${today}.*(?=## ${latestDateInChangelog})`, "gs")
|
||||
: new RegExp(`(?=## ${latestDateInChangelog})`, "gs");
|
||||
|
||||
const newChangelogContent = changelogContent.replace(regex, newReleaseNotes);
|
||||
await fs.writeFile(changelogPath, newChangelogContent);
|
||||
|
||||
- name: Check for changes
|
||||
id: verify-diff
|
||||
run: |
|
||||
git diff --quiet . || echo "changed=true" >> $GITHUB_ENV
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.changed == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add CHANGELOG.md
|
||||
git commit -m "Update CHANGELOG.md"
|
||||
git checkout -b $BRANCH_NAME || git checkout $BRANCH_NAME
|
||||
git push origin $BRANCH_NAME --force
|
||||
|
||||
- name: Create pull request if not exists
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
run: |
|
||||
PR_EXISTS=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -z "$PR_EXISTS" ]; then
|
||||
gh pr create --title "[Github Action] Update CHANGELOG.md" \
|
||||
--body "This PR is auto-generated by a Github Action to update the CHANGELOG.md file." \
|
||||
--head $BRANCH_NAME \
|
||||
--base main \
|
||||
--label "$AUTOMATED_PR_LABEL"
|
||||
fi
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: Approve pull request and merge
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token-merge.outputs.token }}
|
||||
run: |
|
||||
git config --global user.name "github-actions-automege[bot]"
|
||||
git config --global user.email "github-actions-automege[bot]@users.noreply.github.com"
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
gh pr merge $PR_NUMBER --squash --admin
|
||||
fi
|
164
.github/workflows/close-discussion.yaml
vendored
164
.github/workflows/close-discussion.yaml
vendored
@ -1,164 +0,0 @@
|
||||
name: Close Discussion on PR Merge
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
discussions: write
|
||||
|
||||
jobs:
|
||||
close-discussion:
|
||||
if: github.repository == 'community-scripts/ProxmoxVED'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm install zx @octokit/graphql
|
||||
|
||||
- name: Close Discussion
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
run: |
|
||||
npx zx << 'EOF'
|
||||
import { graphql } from "@octokit/graphql";
|
||||
|
||||
(async function () {
|
||||
try {
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
const commitSha = process.env.GITHUB_SHA;
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/");
|
||||
|
||||
if (!token || !commitSha || !owner || !repo) {
|
||||
console.log("Missing required environment variables.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const graphqlWithAuth = graphql.defaults({
|
||||
headers: { authorization: `Bearer ${token}` },
|
||||
});
|
||||
|
||||
// Find PR from commit SHA
|
||||
const searchQuery = `
|
||||
query($owner: String!, $repo: String!, $sha: GitObjectID!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
object(oid: $sha) {
|
||||
... on Commit {
|
||||
associatedPullRequests(first: 1) {
|
||||
nodes {
|
||||
number
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const prResult = await graphqlWithAuth(searchQuery, {
|
||||
owner,
|
||||
repo,
|
||||
sha: commitSha,
|
||||
});
|
||||
|
||||
const pr = prResult.repository.object.associatedPullRequests.nodes[0];
|
||||
if (!pr) {
|
||||
console.log("No PR found for this commit.");
|
||||
return;
|
||||
}
|
||||
|
||||
const prNumber = pr.number;
|
||||
const prBody = pr.body;
|
||||
|
||||
const match = prBody.match(/#(\d+)/);
|
||||
if (!match) {
|
||||
console.log("No discussion ID found in PR body.");
|
||||
return;
|
||||
}
|
||||
|
||||
const discussionNumber = match[1];
|
||||
console.log(`Extracted Discussion Number: ${discussionNumber}`);
|
||||
|
||||
// Fetch GraphQL discussion ID
|
||||
const discussionQuery = `
|
||||
query($owner: String!, $repo: String!, $number: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
discussion(number: $number) {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
//
|
||||
try {
|
||||
const discussionResponse = await graphqlWithAuth(discussionQuery, {
|
||||
owner,
|
||||
repo,
|
||||
number: parseInt(discussionNumber, 10),
|
||||
});
|
||||
|
||||
const discussionQLId = discussionResponse.repository.discussion.id;
|
||||
if (!discussionQLId) {
|
||||
console.log("Failed to fetch discussion GraphQL ID.");
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Discussion not found or error occurred while fetching discussion:", error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Post comment
|
||||
const commentMutation = `
|
||||
mutation($discussionId: ID!, $body: String!) {
|
||||
addDiscussionComment(input: { discussionId: $discussionId, body: $body }) {
|
||||
comment { id body }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const commentResponse = await graphqlWithAuth(commentMutation, {
|
||||
discussionId: discussionQLId,
|
||||
body: `Merged with PR #${prNumber}`,
|
||||
});
|
||||
|
||||
const commentId = commentResponse.addDiscussionComment.comment.id;
|
||||
if (!commentId) {
|
||||
console.log("Failed to post the comment.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Comment Posted Successfully! Comment ID: ${commentId}`);
|
||||
|
||||
// Mark comment as answer
|
||||
const markAnswerMutation = `
|
||||
mutation($id: ID!) {
|
||||
markDiscussionCommentAsAnswer(input: { id: $id }) {
|
||||
discussion { id title }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
await graphqlWithAuth(markAnswerMutation, { id: commentId });
|
||||
|
||||
console.log("Comment marked as answer successfully!");
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
EOF
|
53
.github/workflows/close-ttek-issue.yaml
vendored
53
.github/workflows/close-ttek-issue.yaml
vendored
@ -1,53 +0,0 @@
|
||||
name: Auto-Close tteck Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
close_tteck_issues:
|
||||
if: github.repository == 'community-scripts/ProxmoxVED'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Auto-close if tteck script detected
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const issue = context.payload.issue;
|
||||
const content = `${issue.title}\n${issue.body}`;
|
||||
const issueNumber = issue.number;
|
||||
|
||||
// Check for tteck script mention
|
||||
if (content.includes("tteck") || content.includes("tteck/Proxmox")) {
|
||||
const message = `Hello, it looks like you are referencing the **old tteck repo**.
|
||||
|
||||
This repository is no longer used for active scripts.
|
||||
**Please update your bookmarks** and use: [https://helper-scripts.com](https://helper-scripts.com)
|
||||
|
||||
Also make sure your Bash command starts with:
|
||||
\`\`\`bash
|
||||
bash <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/ct/...)
|
||||
\`\`\`
|
||||
|
||||
This issue is being closed automatically.`;
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: message
|
||||
});
|
||||
|
||||
// Optionally apply a label like "not planned"
|
||||
await github.rest.issues.addLabels({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
labels: ["not planned"]
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
state: "closed"
|
||||
});
|
||||
}
|
228
.github/workflows/live/changelog-pr.yml
vendored
228
.github/workflows/live/changelog-pr.yml
vendored
@ -1,228 +0,0 @@
|
||||
name: Create Changelog Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-changelog-pull-request:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CONFIG_PATH: .github/changelog-pr-config.json
|
||||
BRANCH_NAME: github-action-update-changelog
|
||||
AUTOMATED_PR_LABEL: "automated pr"
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get latest dates in changelog
|
||||
run: |
|
||||
DATES=$(grep -E '^## [0-9]{4}-[0-9]{2}-[0-9]{2}' CHANGELOG.md | head -n 2 | awk '{print $2}')
|
||||
|
||||
LATEST_DATE=$(echo "$DATES" | sed -n '1p')
|
||||
SECOND_LATEST_DATE=$(echo "$DATES" | sed -n '2p')
|
||||
TODAY=$(date -u +%Y-%m-%d)
|
||||
|
||||
echo "TODAY=$TODAY" >> $GITHUB_ENV
|
||||
if [[ "$LATEST_DATE" == "$TODAY" ]]; then
|
||||
echo "LATEST_DATE=$SECOND_LATEST_DATE" >> $GITHUB_ENV
|
||||
else
|
||||
echo "LATEST_DATE=$LATEST_DATE" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Get categorized pull requests
|
||||
id: get-categorized-prs
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const configPath = path.resolve(process.env.CONFIG_PATH);
|
||||
const fileContent = await fs.readFile(configPath, 'utf-8');
|
||||
const changelogConfig = JSON.parse(fileContent);
|
||||
|
||||
const categorizedPRs = changelogConfig.map(obj => ({
|
||||
...obj,
|
||||
notes: [],
|
||||
subCategories: obj.subCategories ?? (
|
||||
obj.labels.includes("update script") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] }
|
||||
] :
|
||||
obj.labels.includes("maintenance") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "📡 API", labels: ["api"], notes: [] },
|
||||
{ title: "Github", labels: ["github"], notes: [] }
|
||||
] :
|
||||
obj.labels.includes("website") ? [
|
||||
{ title: "🐞 Bug Fixes", labels: ["bugfix"], notes: [] },
|
||||
{ title: "✨ New Features", labels: ["feature"], notes: [] },
|
||||
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
|
||||
{ title: "Script Information", labels: ["json"], notes: [] }
|
||||
] : []
|
||||
)
|
||||
}));
|
||||
|
||||
const latestDateInChangelog = new Date(process.env.LATEST_DATE);
|
||||
latestDateInChangelog.setUTCHours(23, 59, 59, 999);
|
||||
|
||||
const { data: pulls } = await github.rest.pulls.list({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
base: "main",
|
||||
state: "closed",
|
||||
sort: "updated",
|
||||
direction: "desc",
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
pulls.filter(pr =>
|
||||
pr.merged_at &&
|
||||
new Date(pr.merged_at) > latestDateInChangelog &&
|
||||
!pr.labels.some(label =>
|
||||
["invalid", "wontdo", process.env.AUTOMATED_PR_LABEL].includes(label.name.toLowerCase())
|
||||
)
|
||||
).forEach(pr => {
|
||||
|
||||
const prLabels = pr.labels.map(label => label.name.toLowerCase());
|
||||
const prNote = `- ${pr.title} [@${pr.user.login}](https://github.com/${pr.user.login}) ([#${pr.number}](${pr.html_url}))`;
|
||||
|
||||
const updateScriptsCategory = categorizedPRs.find(category =>
|
||||
category.labels.some(label => prLabels.includes(label))
|
||||
);
|
||||
|
||||
if (updateScriptsCategory) {
|
||||
|
||||
const subCategory = updateScriptsCategory.subCategories.find(sub =>
|
||||
sub.labels.some(label => prLabels.includes(label))
|
||||
);
|
||||
|
||||
if (subCategory) {
|
||||
subCategory.notes.push(prNote);
|
||||
} else {
|
||||
updateScriptsCategory.notes.push(prNote);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(JSON.stringify(categorizedPRs, null, 2));
|
||||
|
||||
return categorizedPRs;
|
||||
|
||||
|
||||
- name: Update CHANGELOG.md
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
const today = process.env.TODAY;
|
||||
const latestDateInChangelog = process.env.LATEST_DATE;
|
||||
const changelogPath = path.resolve('CHANGELOG.md');
|
||||
const categorizedPRs = ${{ steps.get-categorized-prs.outputs.result }};
|
||||
|
||||
console.log(JSON.stringify(categorizedPRs, null, 2));
|
||||
|
||||
|
||||
let newReleaseNotes = `## ${today}\n\n`;
|
||||
for (const { title, notes, subCategories } of categorizedPRs) {
|
||||
const hasSubcategories = subCategories && subCategories.length > 0;
|
||||
const hasMainNotes = notes.length > 0;
|
||||
const hasSubNotes = hasSubcategories && subCategories.some(sub => sub.notes && sub.notes.length > 0);
|
||||
|
||||
|
||||
if (hasMainNotes || hasSubNotes) {
|
||||
newReleaseNotes += `### ${title}\n\n`;
|
||||
}
|
||||
|
||||
if (hasMainNotes) {
|
||||
newReleaseNotes += ` ${notes.join("\n")}\n\n`;
|
||||
}
|
||||
if (hasSubcategories) {
|
||||
for (const { title: subTitle, notes: subNotes } of subCategories) {
|
||||
if (subNotes && subNotes.length > 0) {
|
||||
newReleaseNotes += ` - #### ${subTitle}\n\n`;
|
||||
newReleaseNotes += ` ${subNotes.join("\n ")}\n\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const changelogContent = await fs.readFile(changelogPath, 'utf-8');
|
||||
const changelogIncludesTodaysReleaseNotes = changelogContent.includes(`\n## ${today}`);
|
||||
|
||||
const regex = changelogIncludesTodaysReleaseNotes
|
||||
? new RegExp(`## ${today}.*(?=## ${latestDateInChangelog})`, "gs")
|
||||
: new RegExp(`(?=## ${latestDateInChangelog})`, "gs");
|
||||
|
||||
const newChangelogContent = changelogContent.replace(regex, newReleaseNotes);
|
||||
await fs.writeFile(changelogPath, newChangelogContent);
|
||||
|
||||
- name: Check for changes
|
||||
id: verify-diff
|
||||
run: |
|
||||
git diff --quiet . || echo "changed=true" >> $GITHUB_ENV
|
||||
|
||||
- name: Commit and push changes
|
||||
if: env.changed == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add CHANGELOG.md
|
||||
git commit -m "Update CHANGELOG.md"
|
||||
git checkout -b $BRANCH_NAME || git checkout $BRANCH_NAME
|
||||
git push origin $BRANCH_NAME --force
|
||||
|
||||
- name: Create pull request if not exists
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
run: |
|
||||
PR_EXISTS=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -z "$PR_EXISTS" ]; then
|
||||
gh pr create --title "[Github Action] Update CHANGELOG.md" \
|
||||
--body "This PR is auto-generated by a Github Action to update the CHANGELOG.md file." \
|
||||
--head $BRANCH_NAME \
|
||||
--base main \
|
||||
--label "$AUTOMATED_PR_LABEL"
|
||||
fi
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: Re-approve pull request after update
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${BRANCH_NAME}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
122
.github/workflows/live/close-discussion.yml
vendored
122
.github/workflows/live/close-discussion.yml
vendored
@ -1,122 +0,0 @@
|
||||
name: Close Discussion on PR Merge
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
close-discussion:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Install Dependencies
|
||||
run: npm install zx @octokit/graphql
|
||||
|
||||
- name: Close Discussion
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
REPO_OWNER: ${{ github.repository_owner }}
|
||||
REPO_NAME: ${{ github.event.repository.name }}
|
||||
run: |
|
||||
npx zx << 'EOF'
|
||||
import { graphql } from "@octokit/graphql";
|
||||
(async function() {
|
||||
try {
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
const prBody = process.env.PR_BODY;
|
||||
const prNumber = process.env.PR_NUMBER;
|
||||
const owner = process.env.REPO_OWNER;
|
||||
const repo = process.env.REPO_NAME;
|
||||
|
||||
if (!token || !prBody || !prNumber || !owner || !repo) {
|
||||
console.log("Missing required environment variables.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const match = prBody.match(/#(\d+)/);
|
||||
if (!match) {
|
||||
console.log("No discussion ID found in PR body.");
|
||||
return;
|
||||
}
|
||||
const discussionNumber = match[1];
|
||||
|
||||
console.log(`Extracted Discussion Number: ${discussionNumber}`);
|
||||
console.log(`PR Number: ${prNumber}`);
|
||||
console.log(`Repository: ${owner}/${repo}`);
|
||||
|
||||
const graphqlWithAuth = graphql.defaults({
|
||||
headers: { authorization: `Bearer ${token}` },
|
||||
});
|
||||
|
||||
const discussionQuery = `
|
||||
query($owner: String!, $repo: String!, $number: Int!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
discussion(number: $number) {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const discussionResponse = await graphqlWithAuth(discussionQuery, {
|
||||
owner,
|
||||
repo,
|
||||
number: parseInt(discussionNumber, 10),
|
||||
});
|
||||
|
||||
const discussionQLId = discussionResponse.repository.discussion.id;
|
||||
if (!discussionQLId) {
|
||||
console.log("Failed to fetch discussion GraphQL ID.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`GraphQL Discussion ID: ${discussionQLId}`);
|
||||
|
||||
const commentMutation = `
|
||||
mutation($discussionId: ID!, $body: String!) {
|
||||
addDiscussionComment(input: { discussionId: $discussionId, body: $body }) {
|
||||
comment { id body }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const commentResponse = await graphqlWithAuth(commentMutation, {
|
||||
discussionId: discussionQLId,
|
||||
body: `Merged with PR #${prNumber}`,
|
||||
});
|
||||
|
||||
const commentId = commentResponse.addDiscussionComment.comment.id;
|
||||
if (!commentId) {
|
||||
console.log("Failed to post the comment.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Comment Posted Successfully! Comment ID: ${commentId}`);
|
||||
|
||||
const markAnswerMutation = `
|
||||
mutation($id: ID!) {
|
||||
markDiscussionCommentAsAnswer(input: { id: $id }) {
|
||||
discussion { id title }
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
await graphqlWithAuth(markAnswerMutation, { id: commentId });
|
||||
|
||||
console.log("Comment marked as answer successfully!");
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error:", error);
|
||||
return;
|
||||
}
|
||||
})();
|
||||
EOF
|
@ -1,37 +0,0 @@
|
||||
name: Build and Publish Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- '.github/runner/docker/**'
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest #To ensure it always builds we use the github runner with all the right tooling
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
run: |
|
||||
repo_name=${{ github.repository }} # Get repository name
|
||||
repo_name_lower=$(echo $repo_name | tr '[:upper:]' '[:lower:]') # Convert to lowercase
|
||||
docker build -t ghcr.io/$repo_name_lower/gh-runner-self:latest -f .github/runner/docker/gh-runner-self.dockerfile .
|
||||
|
||||
- name: Push Docker image to GHCR
|
||||
run: |
|
||||
repo_name=${{ github.repository }} # Get repository name
|
||||
repo_name_lower=$(echo $repo_name | tr '[:upper:]' '[:lower:]') # Convert to lowercase
|
||||
docker push ghcr.io/$repo_name_lower/gh-runner-self:latest
|
28
.github/workflows/live/delete-json-branch.yml
vendored
28
.github/workflows/live/delete-json-branch.yml
vendored
@ -1,28 +0,0 @@
|
||||
|
||||
name: Delete JSON date PR Branch
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
delete_branch:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Delete PR Update Branch
|
||||
if: github.event.pull_request.merged == true && startsWith(github.event.pull_request.head.ref, 'pr-update-json-')
|
||||
run: |
|
||||
PR_BRANCH="${{ github.event.pull_request.head.ref }}"
|
||||
echo "Deleting branch $PR_BRANCH..."
|
||||
|
||||
# Avoid deleting the default branch (e.g., main)
|
||||
if [[ "$PR_BRANCH" != "main" ]]; then
|
||||
git push origin --delete "$PR_BRANCH"
|
||||
else
|
||||
echo "Skipping deletion of the main branch"
|
||||
fi
|
57
.github/workflows/live/github-release.yml
vendored
57
.github/workflows/live/github-release.yml
vendored
@ -1,57 +0,0 @@
|
||||
name: Create Daily Release
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '1 0 * * *' # Runs daily at 00:01 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-daily-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Extract first 5000 characters from CHANGELOG.md
|
||||
run: head -c 5000 CHANGELOG.md > changelog_cropped.md
|
||||
|
||||
- name: Debugging - Show extracted changelog
|
||||
run: |
|
||||
echo "=== CHANGELOG EXCERPT ==="
|
||||
cat changelog_cropped.md
|
||||
echo "========================="
|
||||
|
||||
- name: Parse CHANGELOG.md and create release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
YESTERDAY=$(date -u --date="yesterday" +%Y-%m-%d)
|
||||
echo "Checking for changes on: $YESTERDAY"
|
||||
|
||||
# Ensure yesterday's date exists in the changelog
|
||||
if ! grep -q "## $YESTERDAY" changelog_cropped.md; then
|
||||
echo "No entry found for $YESTERDAY, skipping release."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract section for yesterday's date
|
||||
awk -v date="## $YESTERDAY" '
|
||||
$0 ~ date {found=1; next}
|
||||
found && /^## [0-9]{4}-[0-9]{2}-[0-9]{2}/ {exit}
|
||||
found
|
||||
' changelog_cropped.md > changelog_tmp.md
|
||||
|
||||
echo "=== Extracted Changelog ==="
|
||||
cat changelog_tmp.md
|
||||
echo "==========================="
|
||||
|
||||
# Skip if no content was found
|
||||
if [ ! -s changelog_tmp.md ]; then
|
||||
echo "No changes found for $YESTERDAY, skipping release."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create GitHub release
|
||||
gh release create "$YESTERDAY" -t "$YESTERDAY" -F changelog_tmp.md
|
177
.github/workflows/live/script-test.yml
vendored
177
.github/workflows/live/script-test.yml
vendored
@ -1,177 +0,0 @@
|
||||
name: Run Scripts on PVE Node for testing
|
||||
permissions:
|
||||
pull-requests: write
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'install/**.sh'
|
||||
- 'ct/**.sh'
|
||||
|
||||
jobs:
|
||||
run-install-script:
|
||||
runs-on: pvenode
|
||||
steps:
|
||||
- name: Checkout PR branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add Git safe directory
|
||||
run: |
|
||||
git config --global --add safe.directory /__w/ProxmoxVED/ProxmoxVE
|
||||
|
||||
- name: Set up GH_TOKEN
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Changed Files
|
||||
run: |
|
||||
CHANGED_FILES=$(gh pr diff ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --name-only)
|
||||
CHANGED_FILES=$(echo "$CHANGED_FILES" | tr '\n' ' ')
|
||||
echo "Changed files: $CHANGED_FILES"
|
||||
echo "SCRIPT=$CHANGED_FILES" >> $GITHUB_ENV
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
- name: Get scripts
|
||||
id: check-install-script
|
||||
run: |
|
||||
ALL_FILES=()
|
||||
ADDED_FILES=()
|
||||
for FILE in ${{ env.SCRIPT }}; do
|
||||
if [[ $FILE =~ ^install/.*-install\.sh$ ]] || [[ $FILE =~ ^ct/.*\.sh$ ]]; then
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
if [[ ! " ${ADDED_FILES[@]} " =~ " $STRIPPED_NAME " ]]; then
|
||||
ALL_FILES+=("$FILE")
|
||||
ADDED_FILES+=("$STRIPPED_NAME") # Mark this base file as added (without the path)
|
||||
fi
|
||||
fi
|
||||
done
|
||||
ALL_FILES=$(echo "${ALL_FILES[@]}" | xargs)
|
||||
echo "$ALL_FILES"
|
||||
echo "ALL_FILES=$ALL_FILES" >> $GITHUB_ENV
|
||||
|
||||
- name: Run scripts
|
||||
id: run-install
|
||||
continue-on-error: true
|
||||
run: |
|
||||
set +e
|
||||
#run for each files in /ct
|
||||
for FILE in ${{ env.ALL_FILES }}; do
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
echo "Running Test for: $STRIPPED_NAME"
|
||||
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "$FILE"; then
|
||||
echo "The script contains an interactive prompt. Skipping execution."
|
||||
continue
|
||||
fi
|
||||
if [[ $FILE =~ ^install/.*-install\.sh$ ]]; then
|
||||
CT_SCRIPT="ct/$STRIPPED_NAME.sh"
|
||||
if [[ ! -f $CT_SCRIPT ]]; then
|
||||
echo "No CT script found for $STRIPPED_NAME"
|
||||
ERROR_MSG="No CT script found for $FILE"
|
||||
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
|
||||
continue
|
||||
fi
|
||||
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "install/$STRIPPED_NAME-install.sh"; then
|
||||
echo "The script contains an interactive prompt. Skipping execution."
|
||||
continue
|
||||
fi
|
||||
echo "Found CT script for $STRIPPED_NAME"
|
||||
chmod +x "$CT_SCRIPT"
|
||||
RUNNING_FILE=$CT_SCRIPT
|
||||
elif [[ $FILE =~ ^ct/.*\.sh$ ]]; then
|
||||
INSTALL_SCRIPT="install/$STRIPPED_NAME-install.sh"
|
||||
if [[ ! -f $INSTALL_SCRIPT ]]; then
|
||||
echo "No install script found for $STRIPPED_NAME"
|
||||
ERROR_MSG="No install script found for $FILE"
|
||||
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
|
||||
continue
|
||||
fi
|
||||
echo "Found install script for $STRIPPED_NAME"
|
||||
chmod +x "$INSTALL_SCRIPT"
|
||||
RUNNING_FILE=$FILE
|
||||
if grep -E -q 'read\s+-r\s+-p\s+".*"\s+\w+' "ct/$STRIPPED_NAME.sh"; then
|
||||
echo "The script contains an interactive prompt. Skipping execution."
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
git remote add community-scripts https://github.com/community-scripts/ProxmoxVE.git
|
||||
git fetch community-scripts
|
||||
rm -f .github/workflows/scripts/app-test/pr-build.func || true
|
||||
rm -f .github/workflows/scripts/app-test/pr-install.func || true
|
||||
rm -f .github/workflows/scripts/app-test/pr-alpine-install.func || true
|
||||
rm -f .github/workflows/scripts/app-test/pr-create-lxc.sh || true
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-build.func
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-install.func
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-alpine-install.func
|
||||
git checkout community-scripts/main -- .github/workflows/scripts/app-test/pr-create-lxc.sh
|
||||
chmod +x $RUNNING_FILE
|
||||
chmod +x .github/workflows/scripts/app-test/pr-create-lxc.sh
|
||||
chmod +x .github/workflows/scripts/app-test/pr-install.func
|
||||
chmod +x .github/workflows/scripts/app-test/pr-alpine-install.func
|
||||
chmod +x .github/workflows/scripts/app-test/pr-build.func
|
||||
sed -i 's|source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)|source .github/workflows/scripts/app-test/pr-build.func|g' "$RUNNING_FILE"
|
||||
echo "Executing $RUNNING_FILE"
|
||||
ERROR_MSG=$(./$RUNNING_FILE 2>&1 > /dev/null)
|
||||
echo "Finished running $FILE"
|
||||
if [ -n "$ERROR_MSG" ]; then
|
||||
echo "ERROR in $STRIPPED_NAME: $ERROR_MSG"
|
||||
echo "$ERROR_MSG" > result_$STRIPPED_NAME.log
|
||||
fi
|
||||
done
|
||||
set -e # Restore exit-on-error
|
||||
|
||||
- name: Cleanup PVE Node
|
||||
run: |
|
||||
containers=$(pct list | tail -n +2 | awk '{print $0 " " $4}' | awk '{print $1}')
|
||||
|
||||
for container_id in $containers; do
|
||||
status=$(pct status $container_id | awk '{print $2}')
|
||||
if [[ $status == "running" ]]; then
|
||||
pct stop $container_id
|
||||
pct destroy $container_id
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Post error comments
|
||||
run: |
|
||||
ERROR="false"
|
||||
SEARCH_LINE=".github/workflows/scripts/app-test/pr-build.func: line 255:"
|
||||
|
||||
# Get all existing comments on the PR
|
||||
EXISTING_COMMENTS=$(gh pr view ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --json comments --jq '.comments[].body')
|
||||
|
||||
for FILE in ${{ env.ALL_FILES }}; do
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
if [[ ! -f result_$STRIPPED_NAME.log ]]; then
|
||||
continue
|
||||
fi
|
||||
ERROR_MSG=$(cat result_$STRIPPED_NAME.log)
|
||||
|
||||
if [ -n "$ERROR_MSG" ]; then
|
||||
CLEANED_ERROR_MSG=$(echo "$ERROR_MSG" | sed "s|$SEARCH_LINE.*||")
|
||||
COMMENT_BODY=":warning: The script _**$FILE**_ failed with the following message: <br> <div><strong>${CLEANED_ERROR_MSG}</strong></div>"
|
||||
|
||||
# Check if the comment already exists
|
||||
if echo "$EXISTING_COMMENTS" | grep -qF "$COMMENT_BODY"; then
|
||||
echo "Skipping duplicate comment for $FILE"
|
||||
else
|
||||
echo "Posting error message for $FILE"
|
||||
gh pr comment ${{ github.event.pull_request.number }} \
|
||||
--repo ${{ github.repository }} \
|
||||
--body "$COMMENT_BODY"
|
||||
ERROR="true"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "ERROR=$ERROR" >> $GITHUB_ENV
|
||||
|
||||
|
243
.github/workflows/live/script_format.yml
vendored
243
.github/workflows/live/script_format.yml
vendored
@ -1,243 +0,0 @@
|
||||
name: Script Format Check
|
||||
permissions:
|
||||
pull-requests: write
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'install/*.sh'
|
||||
- 'ct/*.sh'
|
||||
|
||||
jobs:
|
||||
run-install-script:
|
||||
runs-on: pvenode
|
||||
steps:
|
||||
- name: Checkout PR branch (supports forks)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add Git safe directory
|
||||
run: |
|
||||
git config --global --add safe.directory /__w/ProxmoxVED/ProxmoxVE
|
||||
|
||||
- name: Set up GH_TOKEN
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Changed Files
|
||||
run: |
|
||||
CHANGED_FILES=$(gh pr diff ${{ github.event.pull_request.number }} --repo ${{ github.repository }} --name-only)
|
||||
CHANGED_FILES=$(echo "$CHANGED_FILES" | tr '\n' ' ')
|
||||
echo "Changed files: $CHANGED_FILES"
|
||||
echo "SCRIPT=$CHANGED_FILES" >> $GITHUB_ENV
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check scripts
|
||||
id: run-install
|
||||
continue-on-error: true
|
||||
run: |
|
||||
for FILE in ${{ env.SCRIPT }}; do
|
||||
STRIPPED_NAME=$(basename "$FILE" | sed 's/-install//' | sed 's/\.sh$//')
|
||||
echo "Running Test for: $STRIPPED_NAME"
|
||||
FILE_STRIPPED="${FILE##*/}"
|
||||
LOG_FILE="result_$FILE_STRIPPED.log"
|
||||
|
||||
if [[ $FILE =~ ^ct/.*\.sh$ ]]; then
|
||||
|
||||
FIRST_LINE=$(sed -n '1p' "$FILE")
|
||||
[[ "$FIRST_LINE" != "#!/usr/bin/env bash" ]] && echo "Line 1 was $FIRST_LINE | Should be: #!/usr/bin/env bash" >> "$LOG_FILE"
|
||||
SECOND_LINE=$(sed -n '2p' "$FILE")
|
||||
[[ "$SECOND_LINE" != "source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" ]] &&
|
||||
echo "Line 2 was $SECOND_LINE | Should be: source <(curl -s https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)" >> "$LOG_FILE"
|
||||
THIRD_LINE=$(sed -n '3p' "$FILE")
|
||||
if ! [[ "$THIRD_LINE" =~ ^#\ Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ community-scripts\ ORG$ || "$THIRD_LINE" =~ ^Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ tteck$ ]]; then
|
||||
echo "Line 3 was $THIRD_LINE | Should be: # Copyright (c) 2021-2025 community-scripts ORG" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
EXPECTED_AUTHOR="# Author:"
|
||||
EXPECTED_LICENSE="# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE"
|
||||
EXPECTED_SOURCE="# Source:"
|
||||
EXPECTED_EMPTY=""
|
||||
|
||||
for i in {4..7}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
|
||||
case $i in
|
||||
4)
|
||||
[[ $LINE == $EXPECTED_AUTHOR* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_AUTHOR" >> $LOG_FILE
|
||||
;;
|
||||
5)
|
||||
[[ "$LINE" == "$EXPECTED_LICENSE" ]] || printf "Line %d was: '%s' | Should be: '%s'\n" "$i" "$LINE" "$EXPECTED_LICENSE" >> $LOG_FILE
|
||||
;;
|
||||
6)
|
||||
[[ $LINE == $EXPECTED_SOURCE* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_SOURCE" >> $LOG_FILE
|
||||
;;
|
||||
7)
|
||||
[[ -z $LINE ]] || printf "Line %d was: '%s' | Should be empty\n" "$i" "$LINE" >> $LOG_FILE
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
|
||||
EXPECTED_PREFIXES=(
|
||||
"APP="
|
||||
"var_tags="
|
||||
"var_cpu=" # Must be a number
|
||||
"var_ram=" # Must be a number
|
||||
"var_disk=" # Must be a number
|
||||
"var_os=" # Must be debian, alpine, or ubuntu
|
||||
"var_version="
|
||||
"var_unprivileged=" # Must be 0 or 1
|
||||
)
|
||||
|
||||
|
||||
for i in {8..15}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
INDEX=$((i - 8))
|
||||
|
||||
case $INDEX in
|
||||
2|3|4) # var_cpu, var_ram, var_disk (must be numbers)
|
||||
if [[ "$LINE" =~ ^${EXPECTED_PREFIXES[$INDEX]}([0-9]+)$ ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should be: '${EXPECTED_PREFIXES[$INDEX]}<NUMBER>'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
5) # var_os (must be debian, alpine, or ubuntu)
|
||||
if [[ "$LINE" =~ ^var_os=(debian|alpine|ubuntu)$ ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should be: 'var_os=[debian|alpine|ubuntu]'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
7) # var_unprivileged (must be 0 or 1)
|
||||
if [[ "$LINE" =~ ^var_unprivileged=[01]$ ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should be: 'var_unprivileged=[0|1]'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
*) # Other lines (must start with expected prefix)
|
||||
if [[ "$LINE" == ${EXPECTED_PREFIXES[$INDEX]}* ]]; then
|
||||
continue # Valid
|
||||
else
|
||||
echo "Line $i was '$LINE' | Should start with '${EXPECTED_PREFIXES[$INDEX]}'" >> "$LOG_FILE"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
for i in {16..20}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
EXPECTED=(
|
||||
"header_info \"$APP\""
|
||||
"variables"
|
||||
"color"
|
||||
"catch_errors"
|
||||
"function update_script() {"
|
||||
)
|
||||
[[ "$LINE" != "${EXPECTED[$((i-16))]}" ]] && echo "Line $i was $LINE | Should be: ${EXPECTED[$((i-16))]}" >> "$LOG_FILE"
|
||||
done
|
||||
cat "$LOG_FILE"
|
||||
elif [[ $FILE =~ ^install/.*-install\.sh$ ]]; then
|
||||
|
||||
FIRST_LINE=$(sed -n '1p' "$FILE")
|
||||
[[ "$FIRST_LINE" != "#!/usr/bin/env bash" ]] && echo "Line 1 was $FIRST_LINE | Should be: #!/usr/bin/env bash" >> "$LOG_FILE"
|
||||
|
||||
SECOND_LINE=$(sed -n '2p' "$FILE")
|
||||
[[ -n "$SECOND_LINE" ]] && echo "Line 2 should be empty" >> "$LOG_FILE"
|
||||
|
||||
THIRD_LINE=$(sed -n '3p' "$FILE")
|
||||
if ! [[ "$THIRD_LINE" =~ ^#\ Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ community-scripts\ ORG$ || "$THIRD_LINE" =~ ^Copyright\ \(c\)\ [0-9]{4}-[0-9]{4}\ tteck$ ]]; then
|
||||
echo "Line 3 was $THIRD_LINE | Should be: # Copyright (c) 2021-2025 community-scripts ORG" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
EXPECTED_AUTHOR="# Author:"
|
||||
EXPECTED_LICENSE="# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE"
|
||||
EXPECTED_SOURCE="# Source:"
|
||||
EXPECTED_EMPTY=""
|
||||
|
||||
for i in {4..7}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
|
||||
case $i in
|
||||
4)
|
||||
[[ $LINE == $EXPECTED_AUTHOR* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_AUTHOR" >> $LOG_FILE
|
||||
;;
|
||||
5)
|
||||
[[ "$LINE" == "$EXPECTED_LICENSE" ]] || printf "Line %d was: '%s' | Should be: '%s'\n" "$i" "$LINE" "$EXPECTED_LICENSE" >> $LOG_FILE
|
||||
;;
|
||||
6)
|
||||
[[ $LINE == $EXPECTED_SOURCE* ]] || printf "Line %d was: '%s' | Should start with: '%s'\n" "$i" "$LINE" "$EXPECTED_SOURCE" >> $LOG_FILE
|
||||
;;
|
||||
7)
|
||||
[[ -z $LINE ]] || printf "Line %d was: '%s' | Should be empty\n" "$i" "$LINE" >> $LOG_FILE
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ "$(sed -n '8p' "$FILE")" != 'source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"' ]] && echo 'Line 8 should be: source /dev/stdin <<< "$FUNCTIONS_FILE_PATH"' >> "$LOG_FILE"
|
||||
|
||||
for i in {9..14}; do
|
||||
LINE=$(sed -n "${i}p" "$FILE")
|
||||
EXPECTED=(
|
||||
"color"
|
||||
"verb_ip6"
|
||||
"catch_errors"
|
||||
"setting_up_container"
|
||||
"network_check"
|
||||
"update_os"
|
||||
)
|
||||
[[ "$LINE" != "${EXPECTED[$((i-9))]}" ]] && echo "Line $i was $LINE | Should be: ${EXPECTED[$((i-9))]}" >> "$LOG_FILE"
|
||||
done
|
||||
|
||||
[[ -n "$(sed -n '15p' "$FILE")" ]] && echo "Line 15 should be empty" >> "$LOG_FILE"
|
||||
[[ "$(sed -n '16p' "$FILE")" != 'msg_info "Installing Dependencies"' ]] && echo 'Line 16 should be: msg_info "Installing Dependencies"' >> "$LOG_FILE"
|
||||
|
||||
LAST_3_LINES=$(tail -n 3 "$FILE")
|
||||
[[ "$LAST_3_LINES" != *"$STD apt-get -y autoremove"* ]] && echo 'Third to last line should be: $STD apt-get -y autoremove' >> "$LOG_FILE"
|
||||
[[ "$LAST_3_LINES" != *"$STD apt-get -y autoclean"* ]] && echo 'Second to last line should be: $STD apt-get -y clean' >> "$LOG_FILE"
|
||||
[[ "$LAST_3_LINES" != *'msg_ok "Cleaned"'* ]] && echo 'Last line should be: msg_ok "Cleaned"' >> "$LOG_FILE"
|
||||
cat "$LOG_FILE"
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
|
||||
- name: Post error comments
|
||||
run: |
|
||||
ERROR="false"
|
||||
for FILE in ${{ env.SCRIPT }}; do
|
||||
FILE_STRIPPED="${FILE##*/}"
|
||||
LOG_FILE="result_$FILE_STRIPPED.log"
|
||||
echo $LOG_FILE
|
||||
if [[ ! -f $LOG_FILE ]]; then
|
||||
continue
|
||||
fi
|
||||
ERROR_MSG=$(cat $LOG_FILE)
|
||||
|
||||
if [ -n "$ERROR_MSG" ]; then
|
||||
echo "Posting error message for $FILE"
|
||||
echo ${ERROR_MSG}
|
||||
gh pr comment ${{ github.event.pull_request.number }} \
|
||||
--repo ${{ github.repository }} \
|
||||
--body ":warning: The script _**$FILE**_ has the following formatting errors: <br> <div><strong>${ERROR_MSG}</strong></div>"
|
||||
|
||||
|
||||
ERROR="true"
|
||||
fi
|
||||
done
|
||||
echo "ERROR=$ERROR" >> $GITHUB_ENV
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Fail if error
|
||||
if: ${{ env.ERROR == 'true' }}
|
||||
run: exit 1
|
133
.github/workflows/live/update-json-date.yml
vendored
133
.github/workflows/live/update-json-date.yml
vendored
@ -1,133 +0,0 @@
|
||||
name: Update JSON Date
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'json/**.json'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-app-files:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ vars.APP_ID }}
|
||||
private-key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
owner: community-scripts
|
||||
repositories: ProxmoxVED
|
||||
|
||||
- name: Generate dynamic branch name
|
||||
id: timestamp
|
||||
run: echo "BRANCH_NAME=pr-update-json-$(date +'%Y%m%d%H%M%S')" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up GH_TOKEN
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "GH_TOKEN=${GH_TOKEN}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # Ensure we have the last two commits
|
||||
|
||||
- name: Get Previous Commit
|
||||
id: prev_commit
|
||||
run: |
|
||||
PREV_COMMIT=$(git rev-parse HEAD^)
|
||||
echo "Previous commit: $PREV_COMMIT"
|
||||
echo "prev_commit=$PREV_COMMIT" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Newly Added JSON Files
|
||||
id: new_json_files
|
||||
run: |
|
||||
git diff --name-only --diff-filter=A ${{ env.prev_commit }} HEAD | grep '^json/.*\.json$' > new_files.txt || true
|
||||
echo "New files detected:"
|
||||
cat new_files.txt || echo "No new files."
|
||||
|
||||
- name: Disable file mode changes
|
||||
run: git config core.fileMode false
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Actions"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Change JSON Date
|
||||
id: change-json-date
|
||||
run: |
|
||||
current_date=$(date +"%Y-%m-%d")
|
||||
while IFS= read -r file; do
|
||||
# Skip empty lines
|
||||
[[ -z "$file" ]] && continue
|
||||
|
||||
if [[ -f "$file" ]]; then
|
||||
echo "Processing $file..."
|
||||
current_json_date=$(jq -r '.date_created // empty' "$file")
|
||||
if [[ -z "$current_json_date" || "$current_json_date" != "$current_date" ]]; then
|
||||
echo "Updating $file with date $current_date"
|
||||
jq --arg date "$current_date" '.date_created = $date' "$file" > temp.json && mv temp.json "$file"
|
||||
else
|
||||
echo "Date in $file is already up to date."
|
||||
fi
|
||||
else
|
||||
echo "Warning: File $file not found!"
|
||||
fi
|
||||
done < new_files.txt
|
||||
rm new_files.txt
|
||||
|
||||
- name: Check if there are any changes
|
||||
run: |
|
||||
echo "Checking for changes..."
|
||||
git add -A # Untracked Dateien aufnehmen
|
||||
git status
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes detected."
|
||||
echo "changed=false" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "Changes detected:"
|
||||
git diff --stat --cached
|
||||
echo "changed=true" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
# Step 7: Commit and create PR if changes exist
|
||||
- name: Commit and create PR if changes exist
|
||||
if: env.changed == 'true'
|
||||
run: |
|
||||
|
||||
|
||||
git commit -m "Update date in json"
|
||||
git checkout -b ${{ env.BRANCH_NAME }}
|
||||
git push origin ${{ env.BRANCH_NAME }}
|
||||
|
||||
gh pr create --title "[core] update date in json" \
|
||||
--body "This PR is auto-generated by a GitHub Action to update the date in json." \
|
||||
--head ${{ env.BRANCH_NAME }} \
|
||||
--base main \
|
||||
--label "automated pr"
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Approve pull request
|
||||
if: env.changed == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
PR_NUMBER=$(gh pr list --head "${{ env.BRANCH_NAME }}" --json number --jq '.[].number')
|
||||
if [ -n "$PR_NUMBER" ]; then
|
||||
gh pr review $PR_NUMBER --approve
|
||||
fi
|
||||
|
||||
- name: No changes detected
|
||||
if: env.changed == 'false'
|
||||
run: echo "No changes to commit. Workflow completed successfully."
|
161
.github/workflows/live/validate-filenames.yml
vendored
161
.github/workflows/live/validate-filenames.yml
vendored
@ -1,161 +0,0 @@
|
||||
name: Validate filenames
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "ct/*.sh"
|
||||
- "install/*.sh"
|
||||
- "json/*.json"
|
||||
|
||||
jobs:
|
||||
check-files:
|
||||
name: Check changed files
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Get pull request information
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
id: pr
|
||||
with:
|
||||
script: |
|
||||
const { data: pullRequest } = await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
});
|
||||
return pullRequest;
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Ensure the full history is fetched for accurate diffing
|
||||
ref: ${{ github.event_name == 'pull_request_target' && fromJSON(steps.pr.outputs.result).merge_commit_sha || '' }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
if ${{ github.event_name == 'pull_request_target' }}; then
|
||||
echo "files=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ steps.pr.outputs.result && fromJSON(steps.pr.outputs.result).merge_commit_sha }} | xargs)" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Validate filenames in ct and install directory"
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-scripts
|
||||
run: |
|
||||
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^(ct|install)/.*\.sh$' || true; })
|
||||
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in $CHANGED_FILES; do
|
||||
# Datei "misc/create_lxc.sh" explizit überspringen
|
||||
if [[ "$FILE" == "misc/create_lxc.sh" ]]; then
|
||||
continue
|
||||
fi
|
||||
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
||||
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Non-compliant filenames found, change to lowercase:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: "Validate filenames in json directory."
|
||||
if: always() && steps.changed-files.outputs.files != ''
|
||||
id: check-json
|
||||
run: |
|
||||
CHANGED_FILES=$(printf "%s\n" ${{ steps.changed-files.outputs.files }} | { grep -E '^json/.*\.json$' || true; })
|
||||
|
||||
NON_COMPLIANT_FILES=""
|
||||
for FILE in $CHANGED_FILES; do
|
||||
BASENAME=$(echo "$(basename "${FILE%.*}")")
|
||||
if [[ ! "$BASENAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||
NON_COMPLIANT_FILES="$NON_COMPLIANT_FILES $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$NON_COMPLIANT_FILES" ]; then
|
||||
echo "files=$NON_COMPLIANT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Non-compliant filenames found, change to lowercase:"
|
||||
for FILE in $NON_COMPLIANT_FILES; do
|
||||
echo "$FILE"
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Post results and comment
|
||||
if: always() && steps.check-scripts.outputs.files != '' && steps.check-json.outputs.files != '' && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const result = "${{ job.status }}" === "success" ? "success" : "failure";
|
||||
const nonCompliantFiles = {
|
||||
script: "${{ steps.check-scripts.outputs.files }}",
|
||||
JSON: "${{ steps.check-json.outputs.files }}",
|
||||
};
|
||||
|
||||
const issueNumber = context.payload.pull_request
|
||||
? context.payload.pull_request.number
|
||||
: null;
|
||||
const commentIdentifier = "validate-filenames";
|
||||
let newCommentBody = `<!-- ${commentIdentifier}-start -->\n### Filename validation\n\n`;
|
||||
|
||||
if (result === "failure") {
|
||||
newCommentBody += ":x: We found issues in the following changed files:\n\n";
|
||||
for (const [check, files] of Object.entries(nonCompliantFiles)) {
|
||||
if (files) {
|
||||
newCommentBody += `**${check.charAt(0).toUpperCase() + check.slice(1)} filename invalid:**\n${files
|
||||
.trim()
|
||||
.split(" ")
|
||||
.map((file) => `- ${file}`)
|
||||
.join("\n")}\n\n`;
|
||||
}
|
||||
}
|
||||
newCommentBody +=
|
||||
"Please change the filenames to lowercase and use only alphanumeric characters and dashes.\n";
|
||||
} else {
|
||||
newCommentBody += `:rocket: All files passed filename validation!\n`;
|
||||
}
|
||||
|
||||
newCommentBody += `\n\n<!-- ${commentIdentifier}-end -->`;
|
||||
|
||||
if (issueNumber) {
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(
|
||||
(comment) => comment.user.login === "github-actions[bot]",
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
if (existingComment.body.includes(commentIdentifier)) {
|
||||
const re = new RegExp(String.raw`<!-- ${commentIdentifier}-start -->[\s\S]*?<!-- ${commentIdentifier}-end -->`, "");
|
||||
newCommentBody = existingComment.body.replace(re, newCommentBody);
|
||||
} else {
|
||||
newCommentBody = existingComment.body + '\n\n---\n\n' + newCommentBody;
|
||||
}
|
||||
|
||||
await github.rest.issues.updateComment({
|
||||
...context.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: newCommentBody,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: issueNumber,
|
||||
body: newCommentBody,
|
||||
});
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user