mirror of
https://github.com/ivuorinen/actions.git
synced 2026-02-03 17:42:40 +00:00
Compare commits
57 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f371da218e | ||
|
|
175a9f5356 | ||
|
|
b3299e0670 | ||
|
|
fb37d38f17 | ||
|
|
80621c08b4 | ||
|
|
77429988fd | ||
|
|
f5cedd5870 | ||
|
|
0b0e96a2ed | ||
|
|
3b71d19480 | ||
|
|
51861a9b40 | ||
|
|
f98ae7cd7d | ||
| cc842575b9 | |||
|
|
cbfddb2433 | ||
|
|
5664cdbfbf | ||
|
|
e740f9d893 | ||
| a247b78178 | |||
|
|
56ff9a511c | ||
|
|
81310f9bd7 | ||
|
|
95b8856c3f | ||
|
|
e69ddbc1e2 | ||
|
|
28e81adc2b | ||
|
|
fb25736f7e | ||
| 54886c3fd5 | |||
|
|
fd030b418f | ||
| 96c305c557 | |||
|
|
5b4e9c8e11 | ||
|
|
2d0bff84ad | ||
|
|
98f260793c | ||
|
|
09ae7517d6 | ||
|
|
61ebe619a8 | ||
|
|
a1d55ac125 | ||
|
|
db86bb2f0d | ||
|
|
5e7b2fbc11 | ||
|
|
43126631c2 | ||
|
|
f6ed49a6dd | ||
|
|
23ac5dbca3 | ||
|
|
a8031d3922 | ||
|
|
30149dd950 | ||
|
|
3a3cdcdefe | ||
|
|
7d28006a83 | ||
|
|
4008db6517 | ||
|
|
7aa206a02a | ||
|
|
8481bbb5cd | ||
|
|
4c0068e6e7 | ||
|
|
5cecfe7cbe | ||
|
|
0288a1c8b8 | ||
| 44a11e9773 | |||
|
|
a52399cf74 | ||
|
|
803165db8f | ||
|
|
d69ed9e999 | ||
|
|
8eea6f781b | ||
|
|
4889586a94 | ||
|
|
e02ca4d843 | ||
|
|
13ef0db9ba | ||
|
|
c366e99ee3 | ||
| fbbb487332 | |||
| abe24f8570 |
@@ -17,7 +17,7 @@ runs:
|
|||||||
using: composite
|
using: composite
|
||||||
steps:
|
steps:
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7.2.1
|
||||||
with:
|
with:
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ runs:
|
|||||||
run: uv sync --frozen
|
run: uv sync --frozen
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||||
with:
|
with:
|
||||||
node-version: '24'
|
node-version: '24'
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
16
.github/codeql/codeql-config.yml
vendored
16
.github/codeql/codeql-config.yml
vendored
@@ -15,3 +15,19 @@ paths-ignore:
|
|||||||
# Use security and quality query suite
|
# Use security and quality query suite
|
||||||
queries:
|
queries:
|
||||||
- uses: security-and-quality
|
- uses: security-and-quality
|
||||||
|
|
||||||
|
# Suppress specific false positives
|
||||||
|
# These findings have been manually reviewed and determined to be false positives
|
||||||
|
# with appropriate security controls in place
|
||||||
|
query-filters:
|
||||||
|
# docker-publish: Code injection in validated context
|
||||||
|
# False positive: User input is validated and sanitized before use
|
||||||
|
# - Only relative paths and trusted git URLs are allowed
|
||||||
|
# - Absolute paths and arbitrary URLs are rejected
|
||||||
|
# - Path traversal attempts are blocked
|
||||||
|
# - Custom contexts require explicit opt-in via use-custom-context: true
|
||||||
|
# - Wraps docker/build-push-action (trusted Docker-maintained action)
|
||||||
|
# - Action is designed for trusted workflows only (documented in action.yml)
|
||||||
|
- exclude:
|
||||||
|
id: js/actions/code-injection
|
||||||
|
kind: problem
|
||||||
|
|||||||
2
.github/workflows/action-security.yml
vendored
2
.github/workflows/action-security.yml
vendored
@@ -48,7 +48,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Notify on Critical Issues
|
- name: Notify on Critical Issues
|
||||||
if: failure() && steps.security-scan.outputs.critical_issues != '0'
|
if: failure() && steps.security-scan.outputs.critical_issues != '0'
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
with:
|
with:
|
||||||
script: |-
|
script: |-
|
||||||
const { repo, owner } = context.repo;
|
const { repo, owner } = context.repo;
|
||||||
|
|||||||
4
.github/workflows/build-testing-image.yml
vendored
4
.github/workflows/build-testing-image.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
|||||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
@@ -49,7 +49,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Extract metadata
|
- name: Extract metadata
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository_owner }}/actions
|
images: ghcr.io/${{ github.repository_owner }}/actions
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
1
.github/workflows/codeql-new.yml
vendored
1
.github/workflows/codeql-new.yml
vendored
@@ -42,4 +42,5 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
language: ${{ matrix.language }}
|
language: ${{ matrix.language }}
|
||||||
queries: security-and-quality
|
queries: security-and-quality
|
||||||
|
config-file: .github/codeql/codeql-config.yml
|
||||||
token: ${{ github.token }}
|
token: ${{ github.token }}
|
||||||
|
|||||||
2
.github/workflows/issue-stats.yml
vendored
2
.github/workflows/issue-stats.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
|||||||
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
|
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
- name: Run issue-metrics tool
|
- name: Run issue-metrics tool
|
||||||
uses: github/issue-metrics@78b1d469a1b1c94945b15bd71dedcb1928667f49 # v3.25.3
|
uses: github/issue-metrics@67526e7bd8100b870f10b1c120780a8375777b43 # v3.25.5
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
SEARCH_QUERY: 'repo:ivuorinen/actions is:issue created:${{ env.last_month }} -reason:"not planned"'
|
SEARCH_QUERY: 'repo:ivuorinen/actions is:issue created:${{ env.last_month }} -reason:"not planned"'
|
||||||
|
|||||||
57
.github/workflows/new-release.yml
vendored
57
.github/workflows/new-release.yml
vendored
@@ -21,28 +21,45 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history and tags for comparison
|
||||||
|
|
||||||
- name: Create tag if necessary
|
- name: Create daily release
|
||||||
uses: fregante/daily-version-action@fb1a60b7c4daf1410cd755e360ebec3901e58588 # v2.1.3
|
|
||||||
id: daily-version
|
id: daily-version
|
||||||
with:
|
run: |
|
||||||
prefix: v
|
set -eu
|
||||||
|
|
||||||
- name: Create changelog text
|
VERSION="v$(date '+%Y.%m.%d')"
|
||||||
if: steps.daily-version.outputs.created
|
printf '%s\n' "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||||
id: changelog
|
|
||||||
uses: loopwerk/tag-changelog@941366edb8920e2071eae0449031830984b9f26e # v1.3.0
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
config_file: .github/tag-changelog-config.js
|
|
||||||
|
|
||||||
- name: Create release
|
# Check if release already exists
|
||||||
if: steps.daily-version.outputs.created
|
if gh release view "$VERSION" >/dev/null 2>&1; then
|
||||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1.20.0
|
printf '%s\n' "created=false" >> "$GITHUB_OUTPUT"
|
||||||
|
printf '%s\n' "Release $VERSION already exists - skipping"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get the most recent tag
|
||||||
|
PREVIOUS_TAG=$(git tag --sort=-version:refname | head -1)
|
||||||
|
|
||||||
|
# Check if there are any changes since the previous tag
|
||||||
|
if [ -n "$PREVIOUS_TAG" ]; then
|
||||||
|
CHANGES=$(git rev-list "$PREVIOUS_TAG"..HEAD --count)
|
||||||
|
if [ "$CHANGES" -eq 0 ]; then
|
||||||
|
printf '%s\n' "created=false" >> "$GITHUB_OUTPUT"
|
||||||
|
printf '%s\n' "No changes since $PREVIOUS_TAG - skipping release"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
printf '%s\n' "Found $CHANGES commit(s) since $PREVIOUS_TAG"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create release with auto-generated changelog (also creates tag)
|
||||||
|
gh release create "$VERSION" \
|
||||||
|
--title "Release $VERSION" \
|
||||||
|
--generate-notes \
|
||||||
|
--target main
|
||||||
|
|
||||||
|
printf '%s\n' "created=true" >> "$GITHUB_OUTPUT"
|
||||||
|
printf '%s\n' "Created release $VERSION"
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
|
||||||
tag: ${{ steps.daily-version.outputs.version }}
|
|
||||||
name: Release ${{ steps.daily-version.outputs.version }}
|
|
||||||
body: ${{ steps.changelog.outputs.changes }}
|
|
||||||
allowUpdates: true
|
|
||||||
|
|||||||
4
.github/workflows/pr-lint.yml
vendored
4
.github/workflows/pr-lint.yml
vendored
@@ -74,14 +74,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Upload SARIF Report
|
- name: Upload SARIF Report
|
||||||
if: always() && hashFiles('megalinter-reports/sarif/*.sarif')
|
if: always() && hashFiles('megalinter-reports/sarif/*.sarif')
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: megalinter-reports/sarif
|
sarif_file: megalinter-reports/sarif
|
||||||
category: megalinter
|
category: megalinter
|
||||||
|
|
||||||
- name: Check Results
|
- name: Check Results
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const status = '${{ steps.pr-lint.outputs.validation_status }}';
|
const status = '${{ steps.pr-lint.outputs.validation_status }}';
|
||||||
|
|||||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -17,6 +17,6 @@ jobs:
|
|||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||||
- uses: softprops/action-gh-release@5be0e66d93ac7ed76da52eca8bb058f665c3a5fe # v2.4.2
|
- uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
|
||||||
with:
|
with:
|
||||||
generate_release_notes: true
|
generate_release_notes: true
|
||||||
|
|||||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: 🚀 Run stale
|
- name: 🚀 Run stale
|
||||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 30
|
days-before-stale: 30
|
||||||
|
|||||||
10
.github/workflows/test-actions.yml
vendored
10
.github/workflows/test-actions.yml
vendored
@@ -73,14 +73,14 @@ jobs:
|
|||||||
if: always()
|
if: always()
|
||||||
|
|
||||||
- name: Upload SARIF file
|
- name: Upload SARIF file
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
if: always() && hashFiles('_tests/reports/test-results.sarif') != ''
|
if: always() && hashFiles('_tests/reports/test-results.sarif') != ''
|
||||||
with:
|
with:
|
||||||
sarif_file: _tests/reports/test-results.sarif
|
sarif_file: _tests/reports/test-results.sarif
|
||||||
category: github-actions-tests
|
category: github-actions-tests
|
||||||
|
|
||||||
- name: Upload unit test results
|
- name: Upload unit test results
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
name: unit-test-results
|
name: unit-test-results
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload integration test results
|
- name: Upload integration test results
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
if: always() && steps.check-integration-reports.outputs.reports-found == 'true'
|
if: always() && steps.check-integration-reports.outputs.reports-found == 'true'
|
||||||
with:
|
with:
|
||||||
name: integration-test-results
|
name: integration-test-results
|
||||||
@@ -167,7 +167,7 @@ jobs:
|
|||||||
run: make test-coverage
|
run: make test-coverage
|
||||||
|
|
||||||
- name: Upload coverage report
|
- name: Upload coverage report
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: coverage-report
|
name: coverage-report
|
||||||
path: _tests/coverage/
|
path: _tests/coverage/
|
||||||
@@ -263,7 +263,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download test results
|
- name: Download test results
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
with:
|
with:
|
||||||
pattern: '*-test-results'
|
pattern: '*-test-results'
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
|||||||
31
.github/workflows/version-maintenance.yml
vendored
31
.github/workflows/version-maintenance.yml
vendored
@@ -40,6 +40,29 @@ jobs:
|
|||||||
printf '%s\n' "major=v$current_year" >> "$GITHUB_OUTPUT"
|
printf '%s\n' "major=v$current_year" >> "$GITHUB_OUTPUT"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: Ensure Major Version Tag Exists
|
||||||
|
id: ensure-tag
|
||||||
|
shell: sh
|
||||||
|
env:
|
||||||
|
MAJOR_VERSION: ${{ steps.version.outputs.major }}
|
||||||
|
run: |
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
git fetch --tags --force
|
||||||
|
|
||||||
|
if git rev-list -n 1 "$MAJOR_VERSION" >/dev/null 2>&1; then
|
||||||
|
echo "Tag $MAJOR_VERSION already exists"
|
||||||
|
printf '%s\n' "created=false" >> "$GITHUB_OUTPUT"
|
||||||
|
else
|
||||||
|
echo "Tag $MAJOR_VERSION not found, creating..."
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git tag -a "$MAJOR_VERSION" -m "Major version $MAJOR_VERSION"
|
||||||
|
git push origin "$MAJOR_VERSION"
|
||||||
|
echo "Created and pushed tag $MAJOR_VERSION"
|
||||||
|
printf '%s\n' "created=true" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Run Action Versioning
|
- name: Run Action Versioning
|
||||||
id: action-versioning
|
id: action-versioning
|
||||||
uses: ./action-versioning
|
uses: ./action-versioning
|
||||||
@@ -49,7 +72,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
if: steps.action-versioning.outputs.updated == 'true'
|
if: steps.action-versioning.outputs.updated == 'true'
|
||||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
commit-message: 'chore: update action references to ${{ steps.version.outputs.major }}'
|
commit-message: 'chore: update action references to ${{ steps.version.outputs.major }}'
|
||||||
@@ -68,8 +91,6 @@ jobs:
|
|||||||
```bash
|
```bash
|
||||||
make check-version-refs
|
make check-version-refs
|
||||||
```
|
```
|
||||||
|
|
||||||
🤖 Auto-generated by version-maintenance workflow
|
|
||||||
branch: automated/version-update-${{ steps.version.outputs.major }}
|
branch: automated/version-update-${{ steps.version.outputs.major }}
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
labels: |
|
labels: |
|
||||||
@@ -78,7 +99,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Check for Annual Bump
|
- name: Check for Annual Bump
|
||||||
if: steps.action-versioning.outputs.needs-annual-bump == 'true'
|
if: steps.action-versioning.outputs.needs-annual-bump == 'true'
|
||||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const currentYear = new Date().getFullYear();
|
const currentYear = new Date().getFullYear();
|
||||||
@@ -120,8 +141,6 @@ jobs:
|
|||||||
\`\`\`bash
|
\`\`\`bash
|
||||||
make check-version-refs
|
make check-version-refs
|
||||||
\`\`\`
|
\`\`\`
|
||||||
|
|
||||||
🤖 Auto-generated by version-maintenance workflow
|
|
||||||
`,
|
`,
|
||||||
labels: ['maintenance', 'high-priority']
|
labels: ['maintenance', 'high-priority']
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -9,5 +9,6 @@
|
|||||||
"siblings_only": true
|
"siblings_only": true
|
||||||
},
|
},
|
||||||
"MD033": false,
|
"MD033": false,
|
||||||
"MD041": false
|
"MD041": false,
|
||||||
|
"MD060": false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ repos:
|
|||||||
types: [markdown, python, yaml]
|
types: [markdown, python, yaml]
|
||||||
files: ^(docs/.*|README\.md|CONTRIBUTING\.md|CHANGELOG\.md|.*\.py|.*\.ya?ml)$
|
files: ^(docs/.*|README\.md|CONTRIBUTING\.md|CHANGELOG\.md|.*\.py|.*\.ya?ml)$
|
||||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||||
rev: 0.9.11
|
rev: 0.9.28
|
||||||
hooks:
|
hooks:
|
||||||
- id: uv-lock
|
- id: uv-lock
|
||||||
- id: uv-sync
|
- id: uv-sync
|
||||||
@@ -44,7 +44,7 @@ repos:
|
|||||||
args: [--autofix, --no-sort-keys]
|
args: [--autofix, --no-sort-keys]
|
||||||
|
|
||||||
- repo: https://github.com/DavidAnson/markdownlint-cli2
|
- repo: https://github.com/DavidAnson/markdownlint-cli2
|
||||||
rev: v0.19.1
|
rev: v0.20.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: markdownlint-cli2
|
- id: markdownlint-cli2
|
||||||
args: [--fix]
|
args: [--fix]
|
||||||
@@ -55,7 +55,7 @@ repos:
|
|||||||
- id: yamllint
|
- id: yamllint
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.14.6
|
rev: v0.14.14
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter with auto-fix
|
# Run the linter with auto-fix
|
||||||
- id: ruff-check
|
- id: ruff-check
|
||||||
@@ -78,24 +78,19 @@ repos:
|
|||||||
exclude: '^_tests/.*\.sh$'
|
exclude: '^_tests/.*\.sh$'
|
||||||
|
|
||||||
- repo: https://github.com/rhysd/actionlint
|
- repo: https://github.com/rhysd/actionlint
|
||||||
rev: v1.7.9
|
rev: v1.7.10
|
||||||
hooks:
|
hooks:
|
||||||
- id: actionlint
|
- id: actionlint
|
||||||
args: ['-shellcheck=']
|
args: ['-shellcheck=']
|
||||||
|
|
||||||
- repo: https://github.com/renovatebot/pre-commit-hooks
|
|
||||||
rev: 42.19.3
|
|
||||||
hooks:
|
|
||||||
- id: renovate-config-validator
|
|
||||||
|
|
||||||
- repo: https://github.com/bridgecrewio/checkov.git
|
- repo: https://github.com/bridgecrewio/checkov.git
|
||||||
rev: '3.2.495'
|
rev: '3.2.500'
|
||||||
hooks:
|
hooks:
|
||||||
- id: checkov
|
- id: checkov
|
||||||
args:
|
args:
|
||||||
- '--quiet'
|
- '--quiet'
|
||||||
|
|
||||||
- repo: https://github.com/gitleaks/gitleaks
|
- repo: https://github.com/gitleaks/gitleaks
|
||||||
rev: v8.29.1
|
rev: v8.30.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: gitleaks
|
- id: gitleaks
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
3.14.0
|
3.14.2
|
||||||
|
|||||||
@@ -21,6 +21,9 @@ import sys
|
|||||||
|
|
||||||
import yaml # pylint: disable=import-error
|
import yaml # pylint: disable=import-error
|
||||||
|
|
||||||
|
# Default value for unknown action names (matches shared.validation_core.DEFAULT_UNKNOWN)
|
||||||
|
_DEFAULT_UNKNOWN = "Unknown"
|
||||||
|
|
||||||
|
|
||||||
class ActionValidator:
|
class ActionValidator:
|
||||||
"""Handles validation of GitHub Action inputs using Python regex engine."""
|
"""Handles validation of GitHub Action inputs using Python regex engine."""
|
||||||
@@ -86,7 +89,7 @@ class ActionValidator:
|
|||||||
return True, ""
|
return True, ""
|
||||||
|
|
||||||
# Check for environment variable reference (e.g., $GITHUB_TOKEN)
|
# Check for environment variable reference (e.g., $GITHUB_TOKEN)
|
||||||
if re.match(r"^\$[A-Za-z_][A-Za-z0-9_]*$", token):
|
if re.match(r"^\$[A-Za-z_]\w*$", token, re.ASCII):
|
||||||
return True, ""
|
return True, ""
|
||||||
|
|
||||||
# Check against all known token patterns
|
# Check against all known token patterns
|
||||||
@@ -330,16 +333,16 @@ def get_action_name(action_file: str) -> str:
|
|||||||
action_file: Path to the action.yml file
|
action_file: Path to the action.yml file
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Action name or "Unknown" if not found
|
Action name or _DEFAULT_UNKNOWN if not found
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with Path(action_file).open(encoding="utf-8") as f:
|
with Path(action_file).open(encoding="utf-8") as f:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
return data.get("name", "Unknown")
|
return data.get("name", _DEFAULT_UNKNOWN)
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
return "Unknown"
|
return _DEFAULT_UNKNOWN
|
||||||
|
|
||||||
|
|
||||||
def _show_usage():
|
def _show_usage():
|
||||||
|
|||||||
@@ -25,6 +25,9 @@ from typing import Any
|
|||||||
|
|
||||||
import yaml # pylint: disable=import-error
|
import yaml # pylint: disable=import-error
|
||||||
|
|
||||||
|
# Default value for unknown items (used by ActionFileParser)
|
||||||
|
DEFAULT_UNKNOWN = "Unknown"
|
||||||
|
|
||||||
|
|
||||||
class ValidationCore:
|
class ValidationCore:
|
||||||
"""Core validation functionality with standardized patterns and functions."""
|
"""Core validation functionality with standardized patterns and functions."""
|
||||||
@@ -497,9 +500,9 @@ class ActionFileParser:
|
|||||||
"""Get the action name from an action.yml file."""
|
"""Get the action name from an action.yml file."""
|
||||||
try:
|
try:
|
||||||
data = ActionFileParser.load_action_file(action_file)
|
data = ActionFileParser.load_action_file(action_file)
|
||||||
return data.get("name", "Unknown")
|
return data.get("name", DEFAULT_UNKNOWN)
|
||||||
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
||||||
return "Unknown"
|
return DEFAULT_UNKNOWN
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_action_inputs(action_file: str) -> list[str]:
|
def get_action_inputs(action_file: str) -> list[str]:
|
||||||
|
|||||||
@@ -76,11 +76,7 @@ if ! git diff --quiet; then
|
|||||||
git commit -m "chore: bump major version from $OLD_VERSION to $NEW_VERSION
|
git commit -m "chore: bump major version from $OLD_VERSION to $NEW_VERSION
|
||||||
|
|
||||||
This commit updates all internal action references from $OLD_VERSION
|
This commit updates all internal action references from $OLD_VERSION
|
||||||
to $NEW_VERSION.
|
to $NEW_VERSION."
|
||||||
|
|
||||||
🤖 Generated with [Claude Code](https://claude.com/claude-code)
|
|
||||||
|
|
||||||
Co-Authored-By: Claude <noreply@anthropic.com>"
|
|
||||||
|
|
||||||
printf '%b' "${GREEN}✅ Committed version bump${NC}\n"
|
printf '%b' "${GREEN}✅ Committed version bump${NC}\n"
|
||||||
else
|
else
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ runs:
|
|||||||
find . -maxdepth 2 -name "action.yml" -path "*/action.yml" ! -path "./_*" ! -path "./.github/*" -exec grep -h "uses: ivuorinen/actions/" {} \; > "$temp_file"
|
find . -maxdepth 2 -name "action.yml" -path "*/action.yml" ! -path "./_*" ! -path "./.github/*" -exec grep -h "uses: ivuorinen/actions/" {} \; > "$temp_file"
|
||||||
|
|
||||||
while IFS= read -r line; do
|
while IFS= read -r line; do
|
||||||
current_sha=$(echo "$line" | grep -oE '@[a-f0-9]{40}' | sed 's/@//')
|
current_sha=$(printf '%s' "$line" | grep -oE '@[a-f0-9]{40}' | sed 's/@//')
|
||||||
|
|
||||||
if [ "$current_sha" != "$TAG_SHA" ]; then
|
if [ "$current_sha" != "$TAG_SHA" ]; then
|
||||||
echo "Found outdated reference: $current_sha (should be $TAG_SHA)"
|
echo "Found outdated reference: $current_sha (should be $TAG_SHA)"
|
||||||
@@ -153,11 +153,7 @@ runs:
|
|||||||
git commit -m "chore: update action references to $MAJOR_VERSION ($TAG_SHA)" \
|
git commit -m "chore: update action references to $MAJOR_VERSION ($TAG_SHA)" \
|
||||||
-m "" \
|
-m "" \
|
||||||
-m "This commit updates all internal action references to point to the latest" \
|
-m "This commit updates all internal action references to point to the latest" \
|
||||||
-m "$MAJOR_VERSION tag SHA." \
|
-m "$MAJOR_VERSION tag SHA."
|
||||||
-m "" \
|
|
||||||
-m "🤖 Generated with [Claude Code](https://claude.com/claude-code)" \
|
|
||||||
-m "" \
|
|
||||||
-m "Co-Authored-By: Claude <noreply@anthropic.com>"
|
|
||||||
|
|
||||||
commit_sha=$(git rev-parse HEAD)
|
commit_sha=$(git rev-parse HEAD)
|
||||||
printf '%s\n' "sha=$commit_sha" >> "$GITHUB_OUTPUT"
|
printf '%s\n' "sha=$commit_sha" >> "$GITHUB_OUTPUT"
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ runs:
|
|||||||
if: steps.check-files.outputs.files_found == 'true'
|
if: steps.check-files.outputs.files_found == 'true'
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.14'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
|
|
||||||
- name: Install ansible-lint
|
- name: Install ansible-lint
|
||||||
@@ -122,7 +122,7 @@ runs:
|
|||||||
|
|
||||||
- name: Commit Fixes
|
- name: Commit Fixes
|
||||||
if: steps.check-files.outputs.files_found == 'true'
|
if: steps.check-files.outputs.files_found == 'true'
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: 'style: apply ansible lint fixes'
|
commit_message: 'style: apply ansible lint fixes'
|
||||||
commit_user_name: ${{ inputs.username }}
|
commit_user_name: ${{ inputs.username }}
|
||||||
@@ -130,6 +130,6 @@ runs:
|
|||||||
|
|
||||||
- name: Upload SARIF Report
|
- name: Upload SARIF Report
|
||||||
if: steps.check-files.outputs.files_found == 'true'
|
if: steps.check-files.outputs.files_found == 'true'
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: ansible-lint.sarif
|
sarif_file: ansible-lint.sarif
|
||||||
|
|||||||
@@ -181,9 +181,9 @@ runs:
|
|||||||
echo "Detected package manager: $package_manager"
|
echo "Detected package manager: $package_manager"
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '24'
|
||||||
|
|
||||||
- name: Enable Corepack
|
- name: Enable Corepack
|
||||||
shell: sh
|
shell: sh
|
||||||
@@ -212,13 +212,13 @@ runs:
|
|||||||
|
|
||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
|
uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2.1.2
|
||||||
with:
|
with:
|
||||||
bun-version: latest
|
bun-version: latest
|
||||||
|
|
||||||
- name: Cache Node Dependencies
|
- name: Cache Node Dependencies
|
||||||
id: cache
|
id: cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: node_modules
|
path: node_modules
|
||||||
key: ${{ runner.os }}-biome-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
key: ${{ runner.os }}-biome-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||||
@@ -331,7 +331,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upload SARIF Report
|
- name: Upload SARIF Report
|
||||||
if: inputs.mode == 'check' && always()
|
if: inputs.mode == 'check' && always()
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: biome-report.sarif
|
sarif_file: biome-report.sarif
|
||||||
|
|
||||||
@@ -365,7 +365,7 @@ runs:
|
|||||||
|
|
||||||
- name: Commit and Push Fixes
|
- name: Commit and Push Fixes
|
||||||
if: inputs.mode == 'fix' && success()
|
if: inputs.mode == 'fix' && success()
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: 'style: autofix Biome violations'
|
commit_message: 'style: autofix Biome violations'
|
||||||
commit_user_name: ${{ inputs.username }}
|
commit_user_name: ${{ inputs.username }}
|
||||||
|
|||||||
@@ -81,21 +81,13 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate threads
|
# Validate threads
|
||||||
if inputs.get("threads"):
|
if inputs.get("threads"):
|
||||||
result = self.codeql_validator.validate_threads(inputs["threads"])
|
valid &= self.validate_with(
|
||||||
for error in self.codeql_validator.errors:
|
self.codeql_validator, "validate_threads", inputs["threads"]
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.codeql_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate RAM
|
# Validate RAM
|
||||||
if inputs.get("ram"):
|
if inputs.get("ram"):
|
||||||
result = self.codeql_validator.validate_ram(inputs["ram"])
|
valid &= self.validate_with(self.codeql_validator, "validate_ram", inputs["ram"])
|
||||||
for error in self.codeql_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.codeql_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate debug mode
|
# Validate debug mode
|
||||||
if inputs.get("debug"):
|
if inputs.get("debug"):
|
||||||
@@ -226,19 +218,10 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Check for empty queries first
|
|
||||||
if not queries or not queries.strip():
|
if not queries or not queries.strip():
|
||||||
self.add_error("CodeQL queries cannot be empty")
|
self.add_error("CodeQL queries cannot be empty")
|
||||||
return False
|
return False
|
||||||
|
return self.validate_with(self.codeql_validator, "validate_codeql_queries", queries)
|
||||||
# Use the CodeQL validator
|
|
||||||
result = self.codeql_validator.validate_codeql_queries(queries)
|
|
||||||
# Copy any errors from codeql validator
|
|
||||||
for error in self.codeql_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.codeql_validator.clear_errors()
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_categories(self, categories: str) -> bool:
|
def validate_categories(self, categories: str) -> bool:
|
||||||
"""Validate CodeQL categories.
|
"""Validate CodeQL categories.
|
||||||
@@ -249,14 +232,7 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Use the CodeQL validator
|
return self.validate_with(self.codeql_validator, "validate_category_format", categories)
|
||||||
result = self.codeql_validator.validate_category_format(categories)
|
|
||||||
# Copy any errors from codeql validator
|
|
||||||
for error in self.codeql_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.codeql_validator.clear_errors()
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_category(self, category: str) -> bool:
|
def validate_category(self, category: str) -> bool:
|
||||||
"""Validate CodeQL category (singular).
|
"""Validate CodeQL category (singular).
|
||||||
@@ -267,14 +243,7 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Use the CodeQL validator
|
return self.validate_with(self.codeql_validator, "validate_category_format", category)
|
||||||
result = self.codeql_validator.validate_category_format(category)
|
|
||||||
# Copy any errors from codeql validator
|
|
||||||
for error in self.codeql_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.codeql_validator.clear_errors()
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_config_file(self, config_file: str) -> bool:
|
def validate_config_file(self, config_file: str) -> bool:
|
||||||
"""Validate CodeQL configuration file path.
|
"""Validate CodeQL configuration file path.
|
||||||
@@ -287,21 +256,11 @@ class CustomValidator(BaseValidator):
|
|||||||
"""
|
"""
|
||||||
if not config_file or not config_file.strip():
|
if not config_file or not config_file.strip():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(config_file):
|
if self.is_github_expression(config_file):
|
||||||
return True
|
return True
|
||||||
|
return self.validate_with(
|
||||||
# Use FileValidator for yaml file validation
|
self.file_validator, "validate_yaml_file", config_file, "config-file"
|
||||||
result = self.file_validator.validate_yaml_file(config_file, "config-file")
|
)
|
||||||
|
|
||||||
# Copy any errors from file validator
|
|
||||||
for error in self.file_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_database(self, database: str) -> bool:
|
def validate_database(self, database: str) -> bool:
|
||||||
"""Validate CodeQL database path.
|
"""Validate CodeQL database path.
|
||||||
@@ -312,25 +271,13 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(database):
|
if self.is_github_expression(database):
|
||||||
return True
|
return True
|
||||||
|
result = self.validate_with(self.file_validator, "validate_file_path", database, "database")
|
||||||
# Use FileValidator for path validation
|
|
||||||
result = self.file_validator.validate_file_path(database, "database")
|
|
||||||
|
|
||||||
# Copy any errors from file validator
|
|
||||||
for error in self.file_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
|
|
||||||
# Database paths often contain the language
|
# Database paths often contain the language
|
||||||
# e.g., "codeql-database/javascript" or "/tmp/codeql_databases/python"
|
# e.g., "codeql-database/javascript" or "/tmp/codeql_databases/python"
|
||||||
# Just validate it's a reasonable path after basic validation
|
|
||||||
if result and database.startswith("/tmp/"): # noqa: S108
|
if result and database.startswith("/tmp/"): # noqa: S108
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def validate_debug(self, debug: str) -> bool:
|
def validate_debug(self, debug: str) -> bool:
|
||||||
@@ -342,20 +289,9 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(debug):
|
if self.is_github_expression(debug):
|
||||||
return True
|
return True
|
||||||
|
return self.validate_with(self.boolean_validator, "validate_boolean", debug, "debug")
|
||||||
# Use BooleanValidator
|
|
||||||
result = self.boolean_validator.validate_boolean(debug, "debug")
|
|
||||||
|
|
||||||
# Copy any errors from boolean validator
|
|
||||||
for error in self.boolean_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_upload_database(self, upload: str) -> bool:
|
def validate_upload_database(self, upload: str) -> bool:
|
||||||
"""Validate upload-database setting.
|
"""Validate upload-database setting.
|
||||||
@@ -366,20 +302,11 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(upload):
|
if self.is_github_expression(upload):
|
||||||
return True
|
return True
|
||||||
|
return self.validate_with(
|
||||||
# Use BooleanValidator
|
self.boolean_validator, "validate_boolean", upload, "upload-database"
|
||||||
result = self.boolean_validator.validate_boolean(upload, "upload-database")
|
)
|
||||||
|
|
||||||
# Copy any errors from boolean validator
|
|
||||||
for error in self.boolean_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_upload_sarif(self, upload: str) -> bool:
|
def validate_upload_sarif(self, upload: str) -> bool:
|
||||||
"""Validate upload-sarif setting.
|
"""Validate upload-sarif setting.
|
||||||
@@ -390,20 +317,11 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(upload):
|
if self.is_github_expression(upload):
|
||||||
return True
|
return True
|
||||||
|
return self.validate_with(
|
||||||
# Use BooleanValidator
|
self.boolean_validator, "validate_boolean", upload, "upload-sarif"
|
||||||
result = self.boolean_validator.validate_boolean(upload, "upload-sarif")
|
)
|
||||||
|
|
||||||
# Copy any errors from boolean validator
|
|
||||||
for error in self.boolean_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_packs(self, packs: str) -> bool:
|
def validate_packs(self, packs: str) -> bool:
|
||||||
"""Validate CodeQL packs.
|
"""Validate CodeQL packs.
|
||||||
@@ -487,16 +405,9 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Use the TokenValidator for proper validation
|
return self.validate_with(
|
||||||
result = self.token_validator.validate_github_token(token, required=False)
|
self.token_validator, "validate_github_token", token, required=False
|
||||||
|
)
|
||||||
# Copy any errors from token validator
|
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_token(self, token: str) -> bool:
|
def validate_token(self, token: str) -> bool:
|
||||||
"""Validate GitHub token.
|
"""Validate GitHub token.
|
||||||
@@ -507,21 +418,12 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Check for empty token
|
|
||||||
if not token or not token.strip():
|
if not token or not token.strip():
|
||||||
self.add_error("Input 'token' is missing or empty")
|
self.add_error("Input 'token' is missing or empty")
|
||||||
return False
|
return False
|
||||||
|
return self.validate_with(
|
||||||
# Use the TokenValidator for proper validation
|
self.token_validator, "validate_github_token", token, required=True
|
||||||
result = self.token_validator.validate_github_token(token, required=True)
|
)
|
||||||
|
|
||||||
# Copy any errors from token validator
|
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_working_directory(self, directory: str) -> bool:
|
def validate_working_directory(self, directory: str) -> bool:
|
||||||
"""Validate working directory path.
|
"""Validate working directory path.
|
||||||
@@ -532,20 +434,11 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(directory):
|
if self.is_github_expression(directory):
|
||||||
return True
|
return True
|
||||||
|
return self.validate_with(
|
||||||
# Use FileValidator for path validation
|
self.file_validator, "validate_file_path", directory, "working-directory"
|
||||||
result = self.file_validator.validate_file_path(directory, "working-directory")
|
)
|
||||||
|
|
||||||
# Copy any errors from file validator
|
|
||||||
for error in self.file_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_upload_results(self, value: str) -> bool:
|
def validate_upload_results(self, value: str) -> bool:
|
||||||
"""Validate upload-results boolean value.
|
"""Validate upload-results boolean value.
|
||||||
@@ -556,27 +449,14 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Check for empty
|
|
||||||
if not value or not value.strip():
|
if not value or not value.strip():
|
||||||
self.add_error("upload-results cannot be empty")
|
self.add_error("upload-results cannot be empty")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(value):
|
if self.is_github_expression(value):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Check for uppercase TRUE/FALSE first
|
|
||||||
if value in ["TRUE", "FALSE"]:
|
if value in ["TRUE", "FALSE"]:
|
||||||
self.add_error("Must be lowercase 'true' or 'false'")
|
self.add_error("Must be lowercase 'true' or 'false'")
|
||||||
return False
|
return False
|
||||||
|
return self.validate_with(
|
||||||
# Use BooleanValidator for normal validation
|
self.boolean_validator, "validate_boolean", value, "upload-results"
|
||||||
result = self.boolean_validator.validate_boolean(value, "upload-results")
|
)
|
||||||
|
|
||||||
# Copy any errors from boolean validator
|
|
||||||
for error in self.boolean_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|||||||
@@ -186,7 +186,7 @@ runs:
|
|||||||
echo "Using build mode: $build_mode"
|
echo "Using build mode: $build_mode"
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
languages: ${{ inputs.language }}
|
languages: ${{ inputs.language }}
|
||||||
queries: ${{ inputs.queries }}
|
queries: ${{ inputs.queries }}
|
||||||
@@ -199,12 +199,12 @@ runs:
|
|||||||
threads: ${{ inputs.threads }}
|
threads: ${{ inputs.threads }}
|
||||||
|
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
if: ${{ steps.set-build-mode.outputs.build-mode == 'autobuild' }}
|
if: ${{ steps.set-build-mode.outputs.build-mode == 'autobuild' }}
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
id: analysis
|
id: analysis
|
||||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
category: ${{ steps.set-category.outputs.category }}
|
category: ${{ steps.set-category.outputs.category }}
|
||||||
upload: ${{ inputs.upload-results }}
|
upload: ${{ inputs.upload-results }}
|
||||||
|
|||||||
@@ -36,47 +36,35 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate optional inputs
|
# Validate optional inputs
|
||||||
if inputs.get("image-quality"):
|
if inputs.get("image-quality"):
|
||||||
result = self.numeric_validator.validate_numeric_range(
|
valid &= self.validate_with(
|
||||||
inputs["image-quality"], min_val=0, max_val=100
|
self.numeric_validator,
|
||||||
|
"validate_numeric_range",
|
||||||
|
inputs["image-quality"],
|
||||||
|
min_val=0,
|
||||||
|
max_val=100,
|
||||||
)
|
)
|
||||||
for error in self.numeric_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.numeric_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
if inputs.get("png-quality"):
|
if inputs.get("png-quality"):
|
||||||
result = self.numeric_validator.validate_numeric_range(
|
valid &= self.validate_with(
|
||||||
inputs["png-quality"], min_val=0, max_val=100
|
self.numeric_validator,
|
||||||
|
"validate_numeric_range",
|
||||||
|
inputs["png-quality"],
|
||||||
|
min_val=0,
|
||||||
|
max_val=100,
|
||||||
)
|
)
|
||||||
for error in self.numeric_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.numeric_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
if inputs.get("directory"):
|
if inputs.get("directory"):
|
||||||
result = self.file_validator.validate_file_path(inputs["directory"], "directory")
|
valid &= self.validate_with(
|
||||||
for error in self.file_validator.errors:
|
self.file_validator, "validate_file_path", inputs["directory"], "directory"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
if inputs.get("ignore-paths"):
|
if inputs.get("ignore-paths"):
|
||||||
# Validate for injection
|
valid &= self.validate_with(
|
||||||
result = self.security_validator.validate_no_injection(
|
self.security_validator,
|
||||||
inputs["ignore-paths"], "ignore-paths"
|
"validate_no_injection",
|
||||||
|
inputs["ignore-paths"],
|
||||||
|
"ignore-paths",
|
||||||
)
|
)
|
||||||
for error in self.security_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.security_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|||||||
@@ -163,7 +163,7 @@ runs:
|
|||||||
|
|
||||||
- name: Create New Pull Request If Needed
|
- name: Create New Pull Request If Needed
|
||||||
if: steps.calibre.outputs.markdown != ''
|
if: steps.calibre.outputs.markdown != ''
|
||||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||||
with:
|
with:
|
||||||
token: ${{ inputs.token }}
|
token: ${{ inputs.token }}
|
||||||
title: 'chore: compress images'
|
title: 'chore: compress images'
|
||||||
|
|||||||
@@ -148,7 +148,7 @@ runs:
|
|||||||
echo "Final detected .NET version: $detected_version" >&2
|
echo "Final detected .NET version: $detected_version" >&2
|
||||||
|
|
||||||
- name: Setup .NET SDK
|
- name: Setup .NET SDK
|
||||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
uses: actions/setup-dotnet@baa11fbfe1d6520db94683bd5c7a3818018e4309 # v5.1.0
|
||||||
with:
|
with:
|
||||||
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
||||||
cache: true
|
cache: true
|
||||||
@@ -203,7 +203,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upload Test Results
|
- name: Upload Test Results
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: csharp-test-results
|
name: csharp-test-results
|
||||||
path: |
|
path: |
|
||||||
|
|||||||
@@ -164,7 +164,7 @@ runs:
|
|||||||
echo "Final detected .NET version: $detected_version" >&2
|
echo "Final detected .NET version: $detected_version" >&2
|
||||||
|
|
||||||
- name: Setup .NET SDK
|
- name: Setup .NET SDK
|
||||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
uses: actions/setup-dotnet@baa11fbfe1d6520db94683bd5c7a3818018e4309 # v5.1.0
|
||||||
with:
|
with:
|
||||||
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
||||||
cache: true
|
cache: true
|
||||||
@@ -206,6 +206,6 @@ runs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload SARIF Report
|
- name: Upload SARIF Report
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: dotnet-format.sarif
|
sarif_file: dotnet-format.sarif
|
||||||
|
|||||||
@@ -162,7 +162,7 @@ runs:
|
|||||||
echo "Final detected .NET version: $detected_version" >&2
|
echo "Final detected .NET version: $detected_version" >&2
|
||||||
|
|
||||||
- name: Setup .NET SDK
|
- name: Setup .NET SDK
|
||||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
uses: actions/setup-dotnet@baa11fbfe1d6520db94683bd5c7a3818018e4309 # v5.1.0
|
||||||
with:
|
with:
|
||||||
dotnet-version: ${{ inputs.dotnet-version || steps.detect-dotnet-version.outputs.detected-version }}
|
dotnet-version: ${{ inputs.dotnet-version || steps.detect-dotnet-version.outputs.detected-version }}
|
||||||
cache: true
|
cache: true
|
||||||
|
|||||||
@@ -65,35 +65,24 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate image name
|
# Validate image name
|
||||||
if inputs.get("image-name"):
|
if inputs.get("image-name"):
|
||||||
result = self.docker_validator.validate_image_name(inputs["image-name"], "image-name")
|
valid &= self.validate_with(
|
||||||
# Propagate errors from docker validator
|
self.docker_validator, "validate_image_name", inputs["image-name"], "image-name"
|
||||||
for error in self.docker_validator.errors:
|
)
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.docker_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate tag (singular - as per action.yml)
|
# Validate tag (singular - as per action.yml)
|
||||||
if inputs.get("tag"):
|
if inputs.get("tag"):
|
||||||
result = self.docker_validator.validate_docker_tag(inputs["tag"], "tag")
|
valid &= self.validate_with(
|
||||||
# Propagate errors
|
self.docker_validator, "validate_docker_tag", inputs["tag"], "tag"
|
||||||
for error in self.docker_validator.errors:
|
)
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.docker_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate architectures/platforms
|
# Validate architectures/platforms
|
||||||
if inputs.get("architectures"):
|
if inputs.get("architectures"):
|
||||||
result = self.docker_validator.validate_architectures(
|
valid &= self.validate_with(
|
||||||
inputs["architectures"], "architectures"
|
self.docker_validator,
|
||||||
|
"validate_architectures",
|
||||||
|
inputs["architectures"],
|
||||||
|
"architectures",
|
||||||
)
|
)
|
||||||
# Propagate errors
|
|
||||||
for error in self.docker_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.docker_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate build arguments
|
# Validate build arguments
|
||||||
if inputs.get("build-args"):
|
if inputs.get("build-args"):
|
||||||
@@ -101,12 +90,9 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate push flag
|
# Validate push flag
|
||||||
if inputs.get("push"):
|
if inputs.get("push"):
|
||||||
result = self.boolean_validator.validate_optional_boolean(inputs["push"], "push")
|
valid &= self.validate_with(
|
||||||
for error in self.boolean_validator.errors:
|
self.boolean_validator, "validate_optional_boolean", inputs["push"], "push"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate cache settings
|
# Validate cache settings
|
||||||
if inputs.get("cache-from"):
|
if inputs.get("cache-from"):
|
||||||
@@ -117,22 +103,35 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate cache-mode
|
# Validate cache-mode
|
||||||
if inputs.get("cache-mode"):
|
if inputs.get("cache-mode"):
|
||||||
valid &= self.validate_cache_mode(inputs["cache-mode"])
|
valid &= self.validate_enum(
|
||||||
|
inputs["cache-mode"],
|
||||||
|
"cache-mode",
|
||||||
|
["min", "max", "inline"],
|
||||||
|
case_sensitive=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Validate buildx-version
|
# Validate buildx-version
|
||||||
if inputs.get("buildx-version"):
|
if inputs.get("buildx-version"):
|
||||||
valid &= self.validate_buildx_version(inputs["buildx-version"])
|
version = inputs["buildx-version"]
|
||||||
|
# Allow 'latest' as special value
|
||||||
|
if version != "latest" and not self.is_github_expression(version):
|
||||||
|
valid &= self.validate_with(
|
||||||
|
self.version_validator,
|
||||||
|
"validate_semantic_version",
|
||||||
|
version,
|
||||||
|
"buildx-version",
|
||||||
|
)
|
||||||
|
|
||||||
# Validate parallel-builds
|
# Validate parallel-builds
|
||||||
if inputs.get("parallel-builds"):
|
if inputs.get("parallel-builds"):
|
||||||
result = self.numeric_validator.validate_numeric_range(
|
valid &= self.validate_with(
|
||||||
inputs["parallel-builds"], min_val=0, max_val=16, name="parallel-builds"
|
self.numeric_validator,
|
||||||
|
"validate_numeric_range",
|
||||||
|
inputs["parallel-builds"],
|
||||||
|
min_val=0,
|
||||||
|
max_val=16,
|
||||||
|
name="parallel-builds",
|
||||||
)
|
)
|
||||||
for error in self.numeric_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.numeric_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate boolean flags
|
# Validate boolean flags
|
||||||
for bool_input in [
|
for bool_input in [
|
||||||
@@ -144,29 +143,32 @@ class CustomValidator(BaseValidator):
|
|||||||
"auto-detect-platforms",
|
"auto-detect-platforms",
|
||||||
]:
|
]:
|
||||||
if inputs.get(bool_input):
|
if inputs.get(bool_input):
|
||||||
result = self.boolean_validator.validate_optional_boolean(
|
valid &= self.validate_with(
|
||||||
inputs[bool_input], bool_input
|
self.boolean_validator,
|
||||||
|
"validate_optional_boolean",
|
||||||
|
inputs[bool_input],
|
||||||
|
bool_input,
|
||||||
)
|
)
|
||||||
for error in self.boolean_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate sbom-format
|
# Validate sbom-format
|
||||||
if inputs.get("sbom-format"):
|
if inputs.get("sbom-format"):
|
||||||
valid &= self.validate_sbom_format(inputs["sbom-format"])
|
valid &= self.validate_enum(
|
||||||
|
inputs["sbom-format"],
|
||||||
|
"sbom-format",
|
||||||
|
["spdx-json", "cyclonedx-json", "syft-json"],
|
||||||
|
case_sensitive=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Validate max-retries
|
# Validate max-retries
|
||||||
if inputs.get("max-retries"):
|
if inputs.get("max-retries"):
|
||||||
result = self.numeric_validator.validate_numeric_range(
|
valid &= self.validate_with(
|
||||||
inputs["max-retries"], min_val=0, max_val=10, name="max-retries"
|
self.numeric_validator,
|
||||||
|
"validate_numeric_range",
|
||||||
|
inputs["max-retries"],
|
||||||
|
min_val=0,
|
||||||
|
max_val=10,
|
||||||
|
name="max-retries",
|
||||||
)
|
)
|
||||||
for error in self.numeric_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.numeric_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
@@ -209,19 +211,11 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(dockerfile):
|
if self.is_github_expression(dockerfile):
|
||||||
return True
|
return True
|
||||||
|
return self.validate_with(
|
||||||
# Use file validator for path validation
|
self.file_validator, "validate_file_path", dockerfile, "dockerfile"
|
||||||
result = self.file_validator.validate_file_path(dockerfile, "dockerfile")
|
)
|
||||||
# Propagate errors
|
|
||||||
for error in self.file_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_context(self, context: str) -> bool:
|
def validate_context(self, context: str) -> bool:
|
||||||
"""Validate build context path.
|
"""Validate build context path.
|
||||||
@@ -245,10 +239,9 @@ class CustomValidator(BaseValidator):
|
|||||||
# We allow path traversal for context as Docker needs to access parent directories
|
# We allow path traversal for context as Docker needs to access parent directories
|
||||||
# Only check for command injection patterns like ; | ` $()
|
# Only check for command injection patterns like ; | ` $()
|
||||||
dangerous_chars = [";", "|", "`", "$(", "&&", "||"]
|
dangerous_chars = [";", "|", "`", "$(", "&&", "||"]
|
||||||
for char in dangerous_chars:
|
if any(char in context for char in dangerous_chars):
|
||||||
if char in context:
|
self.add_error(f"Command injection detected in context: {context}")
|
||||||
self.add_error(f"Command injection detected in context: {context}")
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -261,15 +254,9 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Use docker validator for architectures
|
return self.validate_with(
|
||||||
result = self.docker_validator.validate_architectures(platforms, "platforms")
|
self.docker_validator, "validate_architectures", platforms, "platforms"
|
||||||
# Propagate errors
|
)
|
||||||
for error in self.docker_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.docker_validator.clear_errors()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def validate_build_args(self, build_args: str) -> bool:
|
def validate_build_args(self, build_args: str) -> bool:
|
||||||
"""Validate build arguments.
|
"""Validate build arguments.
|
||||||
@@ -353,78 +340,3 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Check for security issues
|
# Check for security issues
|
||||||
return self.validate_security_patterns(cache_to, "cache-to")
|
return self.validate_security_patterns(cache_to, "cache-to")
|
||||||
|
|
||||||
def validate_cache_mode(self, cache_mode: str) -> bool:
|
|
||||||
"""Validate cache mode.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cache_mode: Cache mode value
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if valid, False otherwise
|
|
||||||
"""
|
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(cache_mode):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Valid cache modes
|
|
||||||
valid_modes = ["min", "max", "inline"]
|
|
||||||
if cache_mode.lower() not in valid_modes:
|
|
||||||
self.add_error(f"Invalid cache-mode: {cache_mode}. Must be one of: min, max, inline")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def validate_buildx_version(self, version: str) -> bool:
|
|
||||||
"""Validate buildx version.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
version: Buildx version
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if valid, False otherwise
|
|
||||||
"""
|
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(version):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Allow 'latest'
|
|
||||||
if version == "latest":
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check for security issues (semicolon injection etc)
|
|
||||||
if not self.validate_security_patterns(version, "buildx-version"):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Basic version format validation (e.g., 0.12.0, v0.12.0)
|
|
||||||
import re
|
|
||||||
|
|
||||||
if not re.match(r"^v?\d+\.\d+(\.\d+)?$", version):
|
|
||||||
self.add_error(f"Invalid buildx-version format: {version}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def validate_sbom_format(self, sbom_format: str) -> bool:
|
|
||||||
"""Validate SBOM format.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
sbom_format: SBOM format value
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if valid, False otherwise
|
|
||||||
"""
|
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(sbom_format):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Valid SBOM formats
|
|
||||||
valid_formats = ["spdx-json", "cyclonedx-json", "syft-json"]
|
|
||||||
if sbom_format.lower() not in valid_formats:
|
|
||||||
self.add_error(
|
|
||||||
f"Invalid sbom-format: {sbom_format}. "
|
|
||||||
"Must be one of: spdx-json, cyclonedx-json, syft-json"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|||||||
@@ -175,7 +175,7 @@ runs:
|
|||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||||
with:
|
with:
|
||||||
version: ${{ inputs.buildx-version }}
|
version: ${{ inputs.buildx-version }}
|
||||||
platforms: ${{ inputs.architectures }}
|
platforms: ${{ inputs.architectures }}
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ This validator handles Docker publish-specific validation including:
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Add validate-inputs directory to path to import validators
|
# Add validate-inputs directory to path to import validators
|
||||||
@@ -58,12 +59,9 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate platforms
|
# Validate platforms
|
||||||
if inputs.get("platforms"):
|
if inputs.get("platforms"):
|
||||||
result = self.docker_validator.validate_architectures(inputs["platforms"], "platforms")
|
valid &= self.validate_with(
|
||||||
for error in self.docker_validator.errors:
|
self.docker_validator, "validate_architectures", inputs["platforms"], "platforms"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.docker_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate boolean flags
|
# Validate boolean flags
|
||||||
for bool_input in [
|
for bool_input in [
|
||||||
@@ -74,18 +72,18 @@ class CustomValidator(BaseValidator):
|
|||||||
"verbose",
|
"verbose",
|
||||||
]:
|
]:
|
||||||
if inputs.get(bool_input):
|
if inputs.get(bool_input):
|
||||||
result = self.boolean_validator.validate_optional_boolean(
|
valid &= self.validate_with(
|
||||||
inputs[bool_input], bool_input
|
self.boolean_validator,
|
||||||
|
"validate_optional_boolean",
|
||||||
|
inputs[bool_input],
|
||||||
|
bool_input,
|
||||||
)
|
)
|
||||||
for error in self.boolean_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate cache-mode
|
# Validate cache-mode
|
||||||
if inputs.get("cache-mode"):
|
if inputs.get("cache-mode"):
|
||||||
valid &= self.validate_cache_mode(inputs["cache-mode"])
|
valid &= self.validate_enum(
|
||||||
|
inputs["cache-mode"], "cache-mode", ["min", "max", "inline"]
|
||||||
|
)
|
||||||
|
|
||||||
# Validate buildx-version
|
# Validate buildx-version
|
||||||
if inputs.get("buildx-version"):
|
if inputs.get("buildx-version"):
|
||||||
@@ -96,24 +94,18 @@ class CustomValidator(BaseValidator):
|
|||||||
valid &= self.validate_username(inputs["dockerhub-username"])
|
valid &= self.validate_username(inputs["dockerhub-username"])
|
||||||
|
|
||||||
if inputs.get("dockerhub-password"):
|
if inputs.get("dockerhub-password"):
|
||||||
# Use token validator for password/token
|
valid &= self.validate_with(
|
||||||
result = self.token_validator.validate_docker_token(
|
self.token_validator,
|
||||||
inputs["dockerhub-password"], "dockerhub-password"
|
"validate_docker_token",
|
||||||
|
inputs["dockerhub-password"],
|
||||||
|
"dockerhub-password",
|
||||||
)
|
)
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
# Validate github-token
|
# Validate github-token
|
||||||
if inputs.get("github-token"):
|
if inputs.get("github-token"):
|
||||||
result = self.token_validator.validate_github_token(inputs["github-token"])
|
valid &= self.validate_with(
|
||||||
for error in self.token_validator.errors:
|
self.token_validator, "validate_github_token", inputs["github-token"]
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
valid &= result
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
@@ -156,40 +148,7 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
return self.validate_enum(registry, "registry", ["dockerhub", "github", "both"])
|
||||||
if self.is_github_expression(registry):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Valid registry values according to action description
|
|
||||||
valid_registries = ["dockerhub", "github", "both"]
|
|
||||||
if registry.lower() not in valid_registries:
|
|
||||||
self.add_error(
|
|
||||||
f"Invalid registry: {registry}. Must be one of: dockerhub, github, or both"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def validate_cache_mode(self, cache_mode: str) -> bool:
|
|
||||||
"""Validate cache mode.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cache_mode: Cache mode value
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if valid, False otherwise
|
|
||||||
"""
|
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(cache_mode):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Valid cache modes
|
|
||||||
valid_modes = ["min", "max", "inline"]
|
|
||||||
if cache_mode.lower() not in valid_modes:
|
|
||||||
self.add_error(f"Invalid cache-mode: {cache_mode}. Must be one of: min, max, inline")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def validate_buildx_version(self, version: str) -> bool:
|
def validate_buildx_version(self, version: str) -> bool:
|
||||||
"""Validate buildx version.
|
"""Validate buildx version.
|
||||||
@@ -213,8 +172,6 @@ class CustomValidator(BaseValidator):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Basic version format validation
|
# Basic version format validation
|
||||||
import re
|
|
||||||
|
|
||||||
if not re.match(r"^v?\d+\.\d+(\.\d+)?$", version):
|
if not re.match(r"^v?\d+\.\d+(\.\d+)?$", version):
|
||||||
self.add_error(f"Invalid buildx-version format: {version}")
|
self.add_error(f"Invalid buildx-version format: {version}")
|
||||||
return False
|
return False
|
||||||
@@ -244,8 +201,6 @@ class CustomValidator(BaseValidator):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Docker Hub username rules: lowercase letters, digits, periods, hyphens, underscores
|
# Docker Hub username rules: lowercase letters, digits, periods, hyphens, underscores
|
||||||
import re
|
|
||||||
|
|
||||||
if not re.match(r"^[a-z0-9._-]+$", username.lower()):
|
if not re.match(r"^[a-z0-9._-]+$", username.lower()):
|
||||||
self.add_error(f"Invalid Docker Hub username format: {username}")
|
self.add_error(f"Invalid Docker Hub username format: {username}")
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -112,7 +112,7 @@ runs:
|
|||||||
dockerhub|github|both)
|
dockerhub|github|both)
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo "::error::Invalid registry value. Must be 'dockerhub', 'github', or 'both'"
|
printf '%s\n' "::error::Invalid registry value. Must be 'dockerhub', 'github', or 'both'"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
@@ -120,7 +120,7 @@ runs:
|
|||||||
# Validate Docker Hub credentials if needed
|
# Validate Docker Hub credentials if needed
|
||||||
if [ "$INPUT_REGISTRY" = "dockerhub" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
if [ "$INPUT_REGISTRY" = "dockerhub" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
||||||
if [ -z "$INPUT_DOCKERHUB_USERNAME" ] || [ -z "$INPUT_DOCKERHUB_TOKEN" ]; then
|
if [ -z "$INPUT_DOCKERHUB_USERNAME" ] || [ -z "$INPUT_DOCKERHUB_TOKEN" ]; then
|
||||||
echo "::error::Docker Hub username and token are required when publishing to Docker Hub"
|
printf '%s\n' "::error::Docker Hub username and token are required when publishing to Docker Hub"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
@@ -129,49 +129,80 @@ runs:
|
|||||||
if [ "$INPUT_REGISTRY" = "github" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
if [ "$INPUT_REGISTRY" = "github" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
||||||
token="${INPUT_TOKEN:-${GITHUB_TOKEN:-}}"
|
token="${INPUT_TOKEN:-${GITHUB_TOKEN:-}}"
|
||||||
if [ -z "$token" ]; then
|
if [ -z "$token" ]; then
|
||||||
echo "::error::GitHub token is required when publishing to GitHub Packages"
|
printf '%s\n' "::error::GitHub token is required when publishing to GitHub Packages"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Validate context input for security
|
# Validate context input for security
|
||||||
INPUT_CONTEXT="${INPUT_CONTEXT:-.}"
|
INPUT_CONTEXT="${INPUT_CONTEXT:-.}"
|
||||||
|
|
||||||
case "$INPUT_CONTEXT" in
|
case "$INPUT_CONTEXT" in
|
||||||
.|./*|*/*)
|
.|./*|*/*)
|
||||||
# Relative paths are allowed
|
# Relative paths are allowed
|
||||||
|
# Check for path traversal attempts
|
||||||
|
case "$INPUT_CONTEXT" in
|
||||||
|
*/../*|../*|*/..)
|
||||||
|
printf '%s\n' "::error::Context path contains path traversal: '$INPUT_CONTEXT'"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
/*)
|
/*)
|
||||||
echo "::error::Context cannot be an absolute path: '$INPUT_CONTEXT'"
|
printf '%s\n' "::error::Context cannot be an absolute path: '$INPUT_CONTEXT'"
|
||||||
echo "::error::Use relative paths (e.g., '.', './app') to prevent code injection"
|
printf '%s\n' "::error::Use relative paths (e.g., '.', './app')"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
*://*)
|
git://*|git@*|https://*.git|https://github.com/*|https://gitlab.com/*)
|
||||||
echo "::warning::Context is a remote URL: '$INPUT_CONTEXT'"
|
# Allow trusted git repository URLs
|
||||||
echo "::warning::Ensure this URL is from a trusted source to prevent code injection"
|
printf '%s\n' "::notice::Using git repository URL for context"
|
||||||
|
;;
|
||||||
|
http://*|https://*)
|
||||||
|
printf '%s\n' "::error::Context cannot be an arbitrary HTTP URL: '$INPUT_CONTEXT'"
|
||||||
|
printf '%s\n' "::error::Only git repository URLs are allowed for remote contexts"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
printf '%s\n' "::error::Invalid context format: '$INPUT_CONTEXT'"
|
||||||
|
printf '%s\n' "::error::Must be a relative path or git repository URL"
|
||||||
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Validate dockerfile input for security
|
# Validate dockerfile input for security
|
||||||
INPUT_DOCKERFILE="${INPUT_DOCKERFILE:-Dockerfile}"
|
INPUT_DOCKERFILE="${INPUT_DOCKERFILE:-Dockerfile}"
|
||||||
|
|
||||||
case "$INPUT_DOCKERFILE" in
|
case "$INPUT_DOCKERFILE" in
|
||||||
Dockerfile|*/Dockerfile|*.dockerfile|*/*.dockerfile)
|
Dockerfile|*/Dockerfile|*.dockerfile|*/*.dockerfile)
|
||||||
# Common dockerfile patterns are allowed
|
# Common dockerfile patterns are allowed
|
||||||
|
# Check for path traversal attempts
|
||||||
|
case "$INPUT_DOCKERFILE" in
|
||||||
|
*/../*|../*|*/..)
|
||||||
|
printf '%s\n' "::error::Dockerfile path contains path traversal: '$INPUT_DOCKERFILE'"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
/*)
|
/*)
|
||||||
echo "::error::Dockerfile path cannot be absolute: '$INPUT_DOCKERFILE'"
|
printf '%s\n' "::error::Dockerfile path cannot be absolute: '$INPUT_DOCKERFILE'"
|
||||||
echo "::error::Use relative paths (e.g., 'Dockerfile', './docker/Dockerfile')"
|
printf '%s\n' "::error::Use relative paths (e.g., 'Dockerfile', './docker/Dockerfile')"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
*://*)
|
*://*)
|
||||||
echo "::error::Dockerfile path cannot be a URL: '$INPUT_DOCKERFILE'"
|
printf '%s\n' "::error::Dockerfile path cannot be a URL: '$INPUT_DOCKERFILE'"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
printf '%s\n' "::error::Invalid Dockerfile format: '$INPUT_DOCKERFILE'"
|
||||||
|
printf '%s\n' "::error::Must be 'Dockerfile', '*/Dockerfile', '*.dockerfile', or '*/*.dockerfile'"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
echo "Input validation completed successfully"
|
printf '%s\n' "Input validation completed successfully"
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||||
|
|
||||||
- name: Determine Image Names and Tags
|
- name: Determine Image Names and Tags
|
||||||
id: meta
|
id: meta
|
||||||
@@ -223,14 +254,14 @@ runs:
|
|||||||
# Output results
|
# Output results
|
||||||
printf 'image-name=%s\n' "$base_name" >> "$GITHUB_OUTPUT"
|
printf 'image-name=%s\n' "$base_name" >> "$GITHUB_OUTPUT"
|
||||||
{
|
{
|
||||||
echo 'tags<<EOF'
|
printf '%s\n' 'tags<<EOF'
|
||||||
echo "$tags"
|
printf '%s\n' "$tags"
|
||||||
echo 'EOF'
|
printf '%s\n' 'EOF'
|
||||||
} >> "$GITHUB_OUTPUT"
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
echo "Image name: $base_name"
|
printf 'Image name: %s\n' "$base_name"
|
||||||
echo "Tags:"
|
printf '%s\n' "Tags:"
|
||||||
echo "$tags"
|
printf '%s\n' "$tags"
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
if: inputs.registry == 'dockerhub' || inputs.registry == 'both'
|
if: inputs.registry == 'dockerhub' || inputs.registry == 'both'
|
||||||
|
|||||||
@@ -288,9 +288,9 @@ runs:
|
|||||||
echo "Detected package manager: $package_manager"
|
echo "Detected package manager: $package_manager"
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '24'
|
||||||
|
|
||||||
- name: Enable Corepack
|
- name: Enable Corepack
|
||||||
shell: sh
|
shell: sh
|
||||||
@@ -319,13 +319,13 @@ runs:
|
|||||||
|
|
||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
|
uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2.1.2
|
||||||
with:
|
with:
|
||||||
bun-version: latest
|
bun-version: latest
|
||||||
|
|
||||||
- name: Cache Node Dependencies
|
- name: Cache Node Dependencies
|
||||||
id: cache
|
id: cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: node_modules
|
path: node_modules
|
||||||
key: ${{ runner.os }}-eslint-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
key: ${{ runner.os }}-eslint-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||||
@@ -457,7 +457,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upload SARIF Report
|
- name: Upload SARIF Report
|
||||||
if: inputs.mode == 'check' && inputs.report-format == 'sarif' && always()
|
if: inputs.mode == 'check' && inputs.report-format == 'sarif' && always()
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: ${{ inputs.working-directory }}/eslint-results.sarif
|
sarif_file: ${{ inputs.working-directory }}/eslint-results.sarif
|
||||||
|
|
||||||
@@ -508,7 +508,7 @@ runs:
|
|||||||
|
|
||||||
- name: Commit and Push Fixes
|
- name: Commit and Push Fixes
|
||||||
if: inputs.mode == 'fix' && success()
|
if: inputs.mode == 'fix' && success()
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: 'style: autofix ESLint violations'
|
commit_message: 'style: autofix ESLint violations'
|
||||||
commit_user_name: ${{ inputs.username }}
|
commit_user_name: ${{ inputs.username }}
|
||||||
|
|||||||
@@ -159,7 +159,7 @@ runs:
|
|||||||
echo "Final detected Go version: $detected_version" >&2
|
echo "Final detected Go version: $detected_version" >&2
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
||||||
with:
|
with:
|
||||||
go-version: ${{ steps.detect-go-version.outputs.detected-version }}
|
go-version: ${{ steps.detect-go-version.outputs.detected-version }}
|
||||||
cache: true
|
cache: true
|
||||||
@@ -253,7 +253,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upload Build Artifacts
|
- name: Upload Build Artifacts
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: go-build-artifacts
|
name: go-build-artifacts
|
||||||
path: |
|
path: |
|
||||||
|
|||||||
@@ -37,105 +37,78 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate working-directory if provided
|
# Validate working-directory if provided
|
||||||
if inputs.get("working-directory"):
|
if inputs.get("working-directory"):
|
||||||
result = self.file_validator.validate_file_path(
|
valid &= self.validate_with(
|
||||||
inputs["working-directory"], "working-directory"
|
self.file_validator,
|
||||||
|
"validate_file_path",
|
||||||
|
inputs["working-directory"],
|
||||||
|
"working-directory",
|
||||||
)
|
)
|
||||||
for error in self.file_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate golangci-lint-version if provided
|
# Validate golangci-lint-version if provided
|
||||||
if inputs.get("golangci-lint-version"):
|
if inputs.get("golangci-lint-version"):
|
||||||
value = inputs["golangci-lint-version"]
|
value = inputs["golangci-lint-version"]
|
||||||
# Accept 'latest' or version format
|
|
||||||
if value != "latest" and not self.is_github_expression(value):
|
if value != "latest" and not self.is_github_expression(value):
|
||||||
result = self.version_validator.validate_semantic_version(
|
valid &= self.validate_with(
|
||||||
value, "golangci-lint-version"
|
self.version_validator,
|
||||||
|
"validate_semantic_version",
|
||||||
|
value,
|
||||||
|
"golangci-lint-version",
|
||||||
)
|
)
|
||||||
for error in self.version_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.version_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate go-version if provided
|
# Validate go-version if provided
|
||||||
if inputs.get("go-version"):
|
if inputs.get("go-version"):
|
||||||
value = inputs["go-version"]
|
value = inputs["go-version"]
|
||||||
# Accept 'stable', 'oldstable' or version format
|
|
||||||
if value not in ["stable", "oldstable"] and not self.is_github_expression(value):
|
if value not in ["stable", "oldstable"] and not self.is_github_expression(value):
|
||||||
result = self.version_validator.validate_go_version(value, "go-version")
|
valid &= self.validate_with(
|
||||||
for error in self.version_validator.errors:
|
self.version_validator, "validate_go_version", value, "go-version"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.version_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate config-file if provided
|
# Validate config-file if provided
|
||||||
if inputs.get("config-file"):
|
if inputs.get("config-file"):
|
||||||
result = self.file_validator.validate_file_path(inputs["config-file"], "config-file")
|
valid &= self.validate_with(
|
||||||
for error in self.file_validator.errors:
|
self.file_validator, "validate_file_path", inputs["config-file"], "config-file"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate timeout if provided
|
# Validate timeout if provided
|
||||||
if inputs.get("timeout"):
|
if inputs.get("timeout"):
|
||||||
value = inputs["timeout"]
|
value = inputs["timeout"]
|
||||||
# Validate timeout format (e.g., 5m, 1h, 30s)
|
if not self.is_github_expression(value) and not re.match(r"^\d+[smh]$", value):
|
||||||
if not self.is_github_expression(value):
|
self.add_error(
|
||||||
timeout_pattern = r"^\d+[smh]$"
|
f"Invalid timeout format: {value}. Expected format like '5m', '1h', '30s'"
|
||||||
if not re.match(timeout_pattern, value):
|
)
|
||||||
self.add_error(
|
valid = False
|
||||||
f"Invalid timeout format: {value}. Expected format like '5m', '1h', '30s'"
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate boolean inputs
|
# Validate boolean inputs
|
||||||
for field in ["cache", "fail-on-error", "only-new-issues", "disable-all"]:
|
for field in ["cache", "fail-on-error", "only-new-issues", "disable-all"]:
|
||||||
if inputs.get(field):
|
if inputs.get(field):
|
||||||
result = self.boolean_validator.validate_boolean(inputs[field], field)
|
valid &= self.validate_with(
|
||||||
for error in self.boolean_validator.errors:
|
self.boolean_validator, "validate_boolean", inputs[field], field
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate report-format
|
# Validate report-format
|
||||||
if inputs.get("report-format"):
|
if inputs.get("report-format"):
|
||||||
value = inputs["report-format"]
|
valid &= self.validate_enum(
|
||||||
valid_formats = ["json", "sarif", "github-actions", "colored-line-number", "tab"]
|
inputs["report-format"],
|
||||||
if value not in valid_formats and not self.is_github_expression(value):
|
"report-format",
|
||||||
self.add_error(
|
["json", "sarif", "github-actions", "colored-line-number", "tab"],
|
||||||
f"Invalid report format: {value}. Must be one of: {', '.join(valid_formats)}"
|
case_sensitive=True,
|
||||||
)
|
)
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate max-retries
|
# Validate max-retries
|
||||||
if inputs.get("max-retries"):
|
if inputs.get("max-retries"):
|
||||||
result = self.numeric_validator.validate_numeric_range(
|
valid &= self.validate_with(
|
||||||
inputs["max-retries"], min_val=1, max_val=10, name="max-retries"
|
self.numeric_validator,
|
||||||
|
"validate_numeric_range",
|
||||||
|
inputs["max-retries"],
|
||||||
|
min_val=1,
|
||||||
|
max_val=10,
|
||||||
|
name="max-retries",
|
||||||
)
|
)
|
||||||
for error in self.numeric_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.numeric_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate enable-linters and disable-linters
|
# Validate enable-linters and disable-linters
|
||||||
for field in ["enable-linters", "disable-linters"]:
|
for field in ["enable-linters", "disable-linters"]:
|
||||||
if inputs.get(field):
|
if inputs.get(field):
|
||||||
value = inputs[field]
|
value = inputs[field]
|
||||||
|
|
||||||
# First check format - must be comma-separated without spaces
|
|
||||||
if not self.is_github_expression(value):
|
if not self.is_github_expression(value):
|
||||||
if " " in value:
|
if " " in value:
|
||||||
self.add_error(f"Invalid {field} format: spaces not allowed in linter list")
|
self.add_error(f"Invalid {field} format: spaces not allowed in linter list")
|
||||||
@@ -145,15 +118,9 @@ class CustomValidator(BaseValidator):
|
|||||||
f"Invalid {field} format: must be comma-separated list of linters"
|
f"Invalid {field} format: must be comma-separated list of linters"
|
||||||
)
|
)
|
||||||
valid = False
|
valid = False
|
||||||
|
valid &= self.validate_with(
|
||||||
# Then check for injection
|
self.security_validator, "validate_no_injection", value, field
|
||||||
result = self.security_validator.validate_no_injection(value, field)
|
)
|
||||||
for error in self.security_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.security_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|||||||
@@ -205,7 +205,7 @@ runs:
|
|||||||
validate_linter_list "$DISABLE_LINTERS" "disable-linters"
|
validate_linter_list "$DISABLE_LINTERS" "disable-linters"
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version }}
|
go-version: ${{ inputs.go-version }}
|
||||||
cache: true
|
cache: true
|
||||||
@@ -218,7 +218,7 @@ runs:
|
|||||||
- name: Cache golangci-lint
|
- name: Cache golangci-lint
|
||||||
id: cache
|
id: cache
|
||||||
if: inputs.cache == 'true'
|
if: inputs.cache == 'true'
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.cache/golangci-lint
|
~/.cache/golangci-lint
|
||||||
@@ -414,7 +414,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upload Lint Results
|
- name: Upload Lint Results
|
||||||
if: always() && inputs.report-format == 'sarif'
|
if: always() && inputs.report-format == 'sarif'
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: ${{ inputs.working-directory }}/reports/golangci-lint.sarif
|
sarif_file: ${{ inputs.working-directory }}/reports/golangci-lint.sarif
|
||||||
category: golangci-lint
|
category: golangci-lint
|
||||||
|
|||||||
@@ -42,109 +42,40 @@ class CustomValidator(BaseValidator):
|
|||||||
self.add_error("Input 'npm_token' is required")
|
self.add_error("Input 'npm_token' is required")
|
||||||
valid = False
|
valid = False
|
||||||
elif inputs["npm_token"]:
|
elif inputs["npm_token"]:
|
||||||
token = inputs["npm_token"]
|
valid &= self._validate_npm_token(inputs["npm_token"])
|
||||||
# Check for NPM classic token format first
|
|
||||||
if token.startswith("npm_"):
|
|
||||||
# NPM classic token format: npm_ followed by 36+ alphanumeric characters
|
|
||||||
if not re.match(r"^npm_[a-zA-Z0-9]{36,}$", token):
|
|
||||||
self.add_error("Invalid NPM token format")
|
|
||||||
valid = False
|
|
||||||
# Also check for injection
|
|
||||||
result = self.security_validator.validate_no_injection(token, "npm_token")
|
|
||||||
for error in self.security_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.security_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
else:
|
|
||||||
# Otherwise validate as GitHub token
|
|
||||||
result = self.token_validator.validate_github_token(token, required=True)
|
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate registry-url
|
# Validate registry-url
|
||||||
if inputs.get("registry-url"):
|
if inputs.get("registry-url"):
|
||||||
url = inputs["registry-url"]
|
valid &= self._validate_registry_url(inputs["registry-url"])
|
||||||
if not self.is_github_expression(url):
|
|
||||||
# Must be http or https URL
|
|
||||||
if not url.startswith(("http://", "https://")):
|
|
||||||
self.add_error("Registry URL must use http or https protocol")
|
|
||||||
valid = False
|
|
||||||
else:
|
|
||||||
# Validate URL format
|
|
||||||
result = self.network_validator.validate_url(url, "registry-url")
|
|
||||||
for error in self.network_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.network_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate scope
|
# Validate scope
|
||||||
if inputs.get("scope"):
|
if inputs.get("scope"):
|
||||||
scope = inputs["scope"]
|
valid &= self._validate_scope(inputs["scope"])
|
||||||
if not self.is_github_expression(scope):
|
|
||||||
# Scope must start with @ and contain only valid characters
|
|
||||||
if not scope.startswith("@"):
|
|
||||||
self.add_error("Scope must start with @ symbol")
|
|
||||||
valid = False
|
|
||||||
elif not re.match(r"^@[a-z0-9][a-z0-9\-_.]*$", scope):
|
|
||||||
self.add_error(
|
|
||||||
"Invalid scope format: must be @org-name with lowercase "
|
|
||||||
"letters, numbers, hyphens, dots, and underscores"
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Check for injection
|
|
||||||
result = self.security_validator.validate_no_injection(scope, "scope")
|
|
||||||
for error in self.security_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.security_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate access
|
# Validate access
|
||||||
if inputs.get("access"):
|
if inputs.get("access"):
|
||||||
access = inputs["access"]
|
valid &= self.validate_enum(
|
||||||
if not self.is_github_expression(access):
|
inputs["access"], "access", ["public", "restricted", "private"]
|
||||||
valid_access = ["public", "restricted", "private"]
|
)
|
||||||
if access and access not in valid_access:
|
|
||||||
self.add_error(
|
|
||||||
f"Invalid access level: {access}. Must be one of: {', '.join(valid_access)}"
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate boolean inputs (only always-auth and include-merged-tags are strict)
|
# Validate boolean inputs (only always-auth and include-merged-tags are strict)
|
||||||
for field in ["always-auth", "include-merged-tags"]:
|
for field in ["always-auth", "include-merged-tags"]:
|
||||||
if inputs.get(field):
|
if inputs.get(field):
|
||||||
result = self.boolean_validator.validate_boolean(inputs[field], field)
|
valid &= self.validate_with(
|
||||||
for error in self.boolean_validator.errors:
|
self.boolean_validator, "validate_boolean", inputs[field], field
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# provenance and dry-run accept any value (npm handles them)
|
# provenance and dry-run accept any value (npm handles them)
|
||||||
# No validation needed for these
|
# No validation needed for these
|
||||||
|
|
||||||
# Validate package-version
|
# Validate package-version
|
||||||
if inputs.get("package-version"):
|
if inputs.get("package-version"):
|
||||||
result = self.version_validator.validate_semantic_version(
|
valid &= self.validate_with(
|
||||||
inputs["package-version"], "package-version"
|
self.version_validator,
|
||||||
|
"validate_semantic_version",
|
||||||
|
inputs["package-version"],
|
||||||
|
"package-version",
|
||||||
)
|
)
|
||||||
for error in self.version_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.version_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate tag
|
# Validate tag
|
||||||
if inputs.get("tag"):
|
if inputs.get("tag"):
|
||||||
@@ -161,16 +92,57 @@ class CustomValidator(BaseValidator):
|
|||||||
# Validate working-directory and ignore-scripts as file paths
|
# Validate working-directory and ignore-scripts as file paths
|
||||||
for field in ["working-directory", "ignore-scripts"]:
|
for field in ["working-directory", "ignore-scripts"]:
|
||||||
if inputs.get(field):
|
if inputs.get(field):
|
||||||
result = self.file_validator.validate_path(inputs[field], field)
|
valid &= self.validate_with(
|
||||||
for error in self.file_validator.errors:
|
self.file_validator, "validate_path", inputs[field], field
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
def _validate_npm_token(self, token: str) -> bool:
|
||||||
|
"""Validate NPM token format."""
|
||||||
|
# Check for NPM classic token format first
|
||||||
|
if token.startswith("npm_"):
|
||||||
|
# NPM classic token format: npm_ followed by 36+ alphanumeric characters
|
||||||
|
if not re.match(r"^npm_[a-zA-Z0-9]{36,}$", token):
|
||||||
|
self.add_error("Invalid NPM token format")
|
||||||
|
return False
|
||||||
|
# Also check for injection
|
||||||
|
return self.validate_with(
|
||||||
|
self.security_validator, "validate_no_injection", token, "npm_token"
|
||||||
|
)
|
||||||
|
# Otherwise validate as GitHub token
|
||||||
|
return self.validate_with(
|
||||||
|
self.token_validator, "validate_github_token", token, required=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def _validate_registry_url(self, url: str) -> bool:
|
||||||
|
"""Validate registry URL format."""
|
||||||
|
if self.is_github_expression(url):
|
||||||
|
return True
|
||||||
|
# Must be http or https URL
|
||||||
|
if not url.startswith(("http://", "https://")):
|
||||||
|
self.add_error("Registry URL must use http or https protocol")
|
||||||
|
return False
|
||||||
|
# Validate URL format
|
||||||
|
return self.validate_with(self.network_validator, "validate_url", url, "registry-url")
|
||||||
|
|
||||||
|
def _validate_scope(self, scope: str) -> bool:
|
||||||
|
"""Validate NPM scope format."""
|
||||||
|
if self.is_github_expression(scope):
|
||||||
|
return True
|
||||||
|
# Scope must start with @ and contain only valid characters
|
||||||
|
if not scope.startswith("@"):
|
||||||
|
self.add_error("Scope must start with @ symbol")
|
||||||
|
return False
|
||||||
|
if not re.match(r"^@[a-z0-9][a-z0-9\-_.]*$", scope):
|
||||||
|
self.add_error(
|
||||||
|
"Invalid scope format: must be @org-name with lowercase "
|
||||||
|
"letters, numbers, hyphens, dots, and underscores"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
# Check for injection
|
||||||
|
return self.validate_with(self.security_validator, "validate_no_injection", scope, "scope")
|
||||||
|
|
||||||
def get_required_inputs(self) -> list[str]:
|
def get_required_inputs(self) -> list[str]:
|
||||||
"""Get list of required inputs."""
|
"""Get list of required inputs."""
|
||||||
return ["npm_token"]
|
return ["npm_token"]
|
||||||
|
|||||||
@@ -121,9 +121,9 @@ runs:
|
|||||||
echo "Detected package manager: $package_manager"
|
echo "Detected package manager: $package_manager"
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '24'
|
||||||
|
|
||||||
- name: Enable Corepack
|
- name: Enable Corepack
|
||||||
shell: sh
|
shell: sh
|
||||||
@@ -152,13 +152,13 @@ runs:
|
|||||||
|
|
||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
|
uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2.1.2
|
||||||
with:
|
with:
|
||||||
bun-version: latest
|
bun-version: latest
|
||||||
|
|
||||||
- name: Cache Node Dependencies
|
- name: Cache Node Dependencies
|
||||||
id: cache
|
id: cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: node_modules
|
path: node_modules
|
||||||
key: ${{ runner.os }}-npm-publish-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
key: ${{ runner.os }}-npm-publish-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||||
|
|||||||
49
package-lock.json
generated
49
package-lock.json
generated
@@ -13,7 +13,7 @@
|
|||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"markdown-table": "^3.0.3",
|
"markdown-table": "^3.0.3",
|
||||||
"markdown-table-formatter": "^1.6.0",
|
"markdown-table-formatter": "^1.6.0",
|
||||||
"markdownlint-cli2": "^0.19.0",
|
"markdownlint-cli2": "^0.20.0",
|
||||||
"prettier": "^3.3.3",
|
"prettier": "^3.3.3",
|
||||||
"yaml-lint": "^1.7.0"
|
"yaml-lint": "^1.7.0"
|
||||||
},
|
},
|
||||||
@@ -661,6 +661,19 @@
|
|||||||
"node": "6.* || 8.* || >= 10.*"
|
"node": "6.* || 8.* || >= 10.*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/get-east-asian-width": {
|
||||||
|
"version": "1.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
|
||||||
|
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/glob": {
|
"node_modules/glob": {
|
||||||
"version": "10.5.0",
|
"version": "10.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
|
||||||
@@ -1051,9 +1064,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/markdownlint": {
|
"node_modules/markdownlint": {
|
||||||
"version": "0.39.0",
|
"version": "0.40.0",
|
||||||
"resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.39.0.tgz",
|
"resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.40.0.tgz",
|
||||||
"integrity": "sha512-Xt/oY7bAiHwukL1iru2np5LIkhwD19Y7frlsiDILK62v3jucXCD6JXlZlwMG12HZOR+roHIVuJZrfCkOhp6k3g==",
|
"integrity": "sha512-UKybllYNheWac61Ia7T6fzuQNDZimFIpCg2w6hHjgV1Qu0w1TV0LlSgryUGzM0bkKQCBhy2FDhEELB73Kb0kAg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -1064,7 +1077,8 @@
|
|||||||
"micromark-extension-gfm-footnote": "2.1.0",
|
"micromark-extension-gfm-footnote": "2.1.0",
|
||||||
"micromark-extension-gfm-table": "2.1.1",
|
"micromark-extension-gfm-table": "2.1.1",
|
||||||
"micromark-extension-math": "3.1.0",
|
"micromark-extension-math": "3.1.0",
|
||||||
"micromark-util-types": "2.0.2"
|
"micromark-util-types": "2.0.2",
|
||||||
|
"string-width": "8.1.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20"
|
"node": ">=20"
|
||||||
@@ -1074,9 +1088,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/markdownlint-cli2": {
|
"node_modules/markdownlint-cli2": {
|
||||||
"version": "0.19.0",
|
"version": "0.20.0",
|
||||||
"resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.19.0.tgz",
|
"resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.20.0.tgz",
|
||||||
"integrity": "sha512-0+g7Fi/Y3qfvwfhJr77CpC/dEEoc4k7SvumlnL1tb68O+7fjKtIUG7aKzNUQIMXTVi8x63jcfXg4swz/ZYKyCw==",
|
"integrity": "sha512-esPk+8Qvx/f0bzI7YelUeZp+jCtFOk3KjZ7s9iBQZ6HlymSXoTtWGiIRZP05/9Oy2ehIoIjenVwndxGtxOIJYQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -1084,7 +1098,7 @@
|
|||||||
"js-yaml": "4.1.1",
|
"js-yaml": "4.1.1",
|
||||||
"jsonc-parser": "3.3.1",
|
"jsonc-parser": "3.3.1",
|
||||||
"markdown-it": "14.1.0",
|
"markdown-it": "14.1.0",
|
||||||
"markdownlint": "0.39.0",
|
"markdownlint": "0.40.0",
|
||||||
"markdownlint-cli2-formatter-default": "0.0.6",
|
"markdownlint-cli2-formatter-default": "0.0.6",
|
||||||
"micromatch": "4.0.8"
|
"micromatch": "4.0.8"
|
||||||
},
|
},
|
||||||
@@ -1111,6 +1125,23 @@
|
|||||||
"markdownlint-cli2": ">=0.0.4"
|
"markdownlint-cli2": ">=0.0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/markdownlint/node_modules/string-width": {
|
||||||
|
"version": "8.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
|
||||||
|
"integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"get-east-asian-width": "^1.3.0",
|
||||||
|
"strip-ansi": "^7.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/mdurl": {
|
"node_modules/mdurl": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
|
||||||
|
|||||||
@@ -24,7 +24,7 @@
|
|||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"markdown-table": "^3.0.3",
|
"markdown-table": "^3.0.3",
|
||||||
"markdown-table-formatter": "^1.6.0",
|
"markdown-table-formatter": "^1.6.0",
|
||||||
"markdownlint-cli2": "^0.19.0",
|
"markdownlint-cli2": "^0.20.0",
|
||||||
"prettier": "^3.3.3",
|
"prettier": "^3.3.3",
|
||||||
"yaml-lint": "^1.7.0"
|
"yaml-lint": "^1.7.0"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -33,59 +33,31 @@ class CustomValidator(BaseValidator):
|
|||||||
# Validate token (optional)
|
# Validate token (optional)
|
||||||
if inputs.get("token"):
|
if inputs.get("token"):
|
||||||
token = inputs["token"]
|
token = inputs["token"]
|
||||||
result = self.token_validator.validate_github_token(token)
|
valid &= self.validate_with(self.token_validator, "validate_github_token", token)
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Also check for variable expansion
|
# Also check for variable expansion
|
||||||
if not self.is_github_expression(token):
|
if not self.is_github_expression(token):
|
||||||
result = self.security_validator.validate_no_injection(token, "token")
|
valid &= self.validate_with(
|
||||||
for error in self.security_validator.errors:
|
self.security_validator, "validate_no_injection", token, "token"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.security_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate email (optional, empty means use default)
|
# Validate email (optional, empty means use default)
|
||||||
if "email" in inputs and inputs["email"] and inputs["email"] != "":
|
if inputs.get("email"):
|
||||||
email = inputs["email"]
|
email = inputs["email"]
|
||||||
result = self.network_validator.validate_email(email, "email")
|
valid &= self.validate_with(self.network_validator, "validate_email", email, "email")
|
||||||
for error in self.network_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.network_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Also check for shell metacharacters (but allow @ and .)
|
# Also check for shell metacharacters (but allow @ and .)
|
||||||
if not self.is_github_expression(email):
|
if not self.is_github_expression(email):
|
||||||
# Only check for dangerous shell metacharacters, not @ or .
|
|
||||||
dangerous_chars = [";", "&", "|", "`", "$", "(", ")", "<", ">", "\n", "\r"]
|
dangerous_chars = [";", "&", "|", "`", "$", "(", ")", "<", ">", "\n", "\r"]
|
||||||
for char in dangerous_chars:
|
if any(char in email for char in dangerous_chars):
|
||||||
if char in email:
|
self.add_error("email: Contains dangerous shell metacharacter")
|
||||||
self.add_error(f"email: Contains dangerous character '{char}'")
|
valid = False
|
||||||
valid = False
|
|
||||||
break
|
|
||||||
|
|
||||||
# Validate username (optional)
|
# Validate username (optional)
|
||||||
if inputs.get("username"):
|
if inputs.get("username"):
|
||||||
username = inputs["username"]
|
username = inputs["username"]
|
||||||
if not self.is_github_expression(username):
|
if not self.is_github_expression(username):
|
||||||
# Check for injection
|
valid &= self.validate_with(
|
||||||
result = self.security_validator.validate_no_injection(username, "username")
|
self.security_validator, "validate_no_injection", username, "username"
|
||||||
for error in self.security_validator.errors:
|
)
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.security_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Check username length (GitHub usernames are max 39 characters)
|
|
||||||
if len(username) > 39:
|
if len(username) > 39:
|
||||||
self.add_error("Username is too long (max 39 characters)")
|
self.add_error("Username is too long (max 39 characters)")
|
||||||
valid = False
|
valid = False
|
||||||
|
|||||||
@@ -319,7 +319,7 @@ runs:
|
|||||||
|
|
||||||
- name: Setup PHP
|
- name: Setup PHP
|
||||||
id: setup-php
|
id: setup-php
|
||||||
uses: shivammathur/setup-php@bf6b4fbd49ca58e4608c9c89fba0b8d90bd2a39f # 2.35.5
|
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # 2.36.0
|
||||||
with:
|
with:
|
||||||
php-version: ${{ steps.detect-php-version.outputs.detected-version }}
|
php-version: ${{ steps.detect-php-version.outputs.detected-version }}
|
||||||
extensions: ${{ inputs.extensions }}
|
extensions: ${{ inputs.extensions }}
|
||||||
@@ -356,7 +356,7 @@ runs:
|
|||||||
|
|
||||||
- name: Cache Composer packages
|
- name: Cache Composer packages
|
||||||
id: composer-cache
|
id: composer-cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
vendor
|
vendor
|
||||||
|
|||||||
@@ -54,13 +54,9 @@ runs:
|
|||||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||||
with:
|
with:
|
||||||
token: ${{ inputs.token || github.token }}
|
token: ${{ inputs.token || github.token }}
|
||||||
ref: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref_name }}
|
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
# If you use VALIDATE_ALL_CODEBASE = true, you can remove this line to
|
|
||||||
# improve performance
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
# ╭──────────────────────────────────────────────────────────╮
|
# ╭──────────────────────────────────────────────────────────╮
|
||||||
# │ Install packages for linting │
|
# │ Install packages for linting │
|
||||||
# ╰──────────────────────────────────────────────────────────╯
|
# ╰──────────────────────────────────────────────────────────╯
|
||||||
@@ -74,6 +70,29 @@ runs:
|
|||||||
|
|
||||||
if [ -f package.json ]; then
|
if [ -f package.json ]; then
|
||||||
printf '%s\n' "found=true" >> "$GITHUB_OUTPUT"
|
printf '%s\n' "found=true" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
# Check if packageManager field is set (for corepack)
|
||||||
|
if command -v jq >/dev/null 2>&1; then
|
||||||
|
has_package_manager=$(jq -r '.packageManager // empty' package.json 2>/dev/null || printf '')
|
||||||
|
if [ -n "$has_package_manager" ]; then
|
||||||
|
printf '%s\n' "has-package-manager=true" >> "$GITHUB_OUTPUT"
|
||||||
|
printf 'Found packageManager field: %s\n' "$has_package_manager"
|
||||||
|
else
|
||||||
|
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Fallback: check with grep if jq not available
|
||||||
|
# Use robust pattern to verify non-empty value
|
||||||
|
if grep -q '"packageManager"[[:space:]]*:[[:space:]]*"[^"]\+"' package.json 2>/dev/null; then
|
||||||
|
printf '%s\n' "has-package-manager=true" >> "$GITHUB_OUTPUT"
|
||||||
|
printf '%s\n' "Found packageManager field in package.json"
|
||||||
|
else
|
||||||
|
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Explicitly set has-package-manager to false when package.json doesn't exist
|
||||||
|
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Detect Package Manager
|
- name: Detect Package Manager
|
||||||
@@ -99,30 +118,35 @@ runs:
|
|||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
if: steps.detect-node.outputs.found == 'true'
|
if: steps.detect-node.outputs.found == 'true'
|
||||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '24'
|
||||||
|
|
||||||
- name: Enable Corepack
|
- name: Enable Corepack
|
||||||
if: steps.detect-node.outputs.found == 'true'
|
if: steps.detect-node.outputs.found == 'true' && steps.detect-node.outputs.has-package-manager == 'true'
|
||||||
shell: sh
|
shell: sh
|
||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
corepack enable
|
corepack enable
|
||||||
|
printf '%s\n' "Corepack enabled - package manager will be installed automatically from package.json"
|
||||||
|
|
||||||
- name: Install Package Manager
|
- name: Install Package Manager (Fallback)
|
||||||
if: steps.detect-node.outputs.found == 'true'
|
if: steps.detect-node.outputs.found == 'true' && steps.detect-node.outputs.has-package-manager == 'false'
|
||||||
shell: sh
|
shell: sh
|
||||||
env:
|
env:
|
||||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
|
printf 'No packageManager field found, using detected package manager: %s\n' "$PACKAGE_MANAGER"
|
||||||
|
|
||||||
case "$PACKAGE_MANAGER" in
|
case "$PACKAGE_MANAGER" in
|
||||||
pnpm)
|
pnpm)
|
||||||
|
corepack enable
|
||||||
corepack prepare pnpm@latest --activate
|
corepack prepare pnpm@latest --activate
|
||||||
;;
|
;;
|
||||||
yarn)
|
yarn)
|
||||||
|
corepack enable
|
||||||
corepack prepare yarn@stable --activate
|
corepack prepare yarn@stable --activate
|
||||||
;;
|
;;
|
||||||
bun|npm)
|
bun|npm)
|
||||||
@@ -132,14 +156,14 @@ runs:
|
|||||||
|
|
||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
if: steps.detect-node.outputs.found == 'true' && steps.detect-pm.outputs.package-manager == 'bun'
|
if: steps.detect-node.outputs.found == 'true' && steps.detect-pm.outputs.package-manager == 'bun'
|
||||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
|
uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2.1.2
|
||||||
with:
|
with:
|
||||||
bun-version: latest
|
bun-version: latest
|
||||||
|
|
||||||
- name: Cache Node Dependencies
|
- name: Cache Node Dependencies
|
||||||
if: steps.detect-node.outputs.found == 'true'
|
if: steps.detect-node.outputs.found == 'true'
|
||||||
id: node-cache
|
id: node-cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: node_modules
|
path: node_modules
|
||||||
key: ${{ runner.os }}-pr-lint-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
key: ${{ runner.os }}-pr-lint-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||||
@@ -161,9 +185,14 @@ runs:
|
|||||||
pnpm install --frozen-lockfile
|
pnpm install --frozen-lockfile
|
||||||
;;
|
;;
|
||||||
"yarn")
|
"yarn")
|
||||||
if [ -f ".yarnrc.yml" ]; then
|
# Detect Yarn version by checking actual version output
|
||||||
|
# Yarn 2+ (Berry) uses --immutable, Yarn 1.x (Classic) uses --frozen-lockfile
|
||||||
|
yarn_version=$(yarn --version 2>/dev/null || printf '1.0.0')
|
||||||
|
if printf '%s' "$yarn_version" | grep -q '^[2-9]'; then
|
||||||
|
# Yarn 2+ (Berry) - use --immutable
|
||||||
yarn install --immutable
|
yarn install --immutable
|
||||||
else
|
else
|
||||||
|
# Yarn 1.x (Classic) - use --frozen-lockfile
|
||||||
yarn install --frozen-lockfile
|
yarn install --frozen-lockfile
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
@@ -306,7 +335,7 @@ runs:
|
|||||||
|
|
||||||
- name: Setup PHP
|
- name: Setup PHP
|
||||||
if: steps.detect-php.outputs.found == 'true'
|
if: steps.detect-php.outputs.found == 'true'
|
||||||
uses: shivammathur/setup-php@bf6b4fbd49ca58e4608c9c89fba0b8d90bd2a39f # 2.35.5
|
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # 2.36.0
|
||||||
with:
|
with:
|
||||||
php-version: ${{ steps.php-version.outputs.detected-version }}
|
php-version: ${{ steps.php-version.outputs.detected-version }}
|
||||||
tools: composer
|
tools: composer
|
||||||
@@ -323,7 +352,7 @@ runs:
|
|||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
matcher_path=$(printf '%s' "$RUNNER_TOOL_CACHE/php.json" | tr -d '\n\r')
|
matcher_path=$(printf '%s' "$RUNNER_TOOL_CACHE/php.json" | tr -d '\n\r')
|
||||||
echo "::add-matcher::$matcher_path"
|
printf '%s\n' "::add-matcher::$matcher_path"
|
||||||
|
|
||||||
- name: Install PHP dependencies
|
- name: Install PHP dependencies
|
||||||
if: steps.detect-php.outputs.found == 'true'
|
if: steps.detect-php.outputs.found == 'true'
|
||||||
@@ -349,7 +378,7 @@ runs:
|
|||||||
id: python-version
|
id: python-version
|
||||||
shell: sh
|
shell: sh
|
||||||
env:
|
env:
|
||||||
DEFAULT_VERSION: '3.11'
|
DEFAULT_VERSION: '3.14'
|
||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
@@ -486,7 +515,7 @@ runs:
|
|||||||
id: go-version
|
id: go-version
|
||||||
shell: sh
|
shell: sh
|
||||||
env:
|
env:
|
||||||
DEFAULT_VERSION: '1.24'
|
DEFAULT_VERSION: '1.25'
|
||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
@@ -592,7 +621,7 @@ runs:
|
|||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
if: steps.detect-go.outputs.found == 'true'
|
if: steps.detect-go.outputs.found == 'true'
|
||||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
||||||
with:
|
with:
|
||||||
go-version: ${{ steps.go-version.outputs.detected-version }}
|
go-version: ${{ steps.go-version.outputs.detected-version }}
|
||||||
cache: true
|
cache: true
|
||||||
@@ -603,7 +632,7 @@ runs:
|
|||||||
- name: MegaLinter
|
- name: MegaLinter
|
||||||
# You can override MegaLinter flavor used to have faster performances
|
# You can override MegaLinter flavor used to have faster performances
|
||||||
# More info at https://megalinter.io/latest/flavors/
|
# More info at https://megalinter.io/latest/flavors/
|
||||||
uses: oxsecurity/megalinter/flavors/cupcake@62c799d895af9bcbca5eacfebca29d527f125a57 # v9.1.0
|
uses: oxsecurity/megalinter/flavors/cupcake@42bb470545e359597e7f12156947c436e4e3fb9a # v9.3.0
|
||||||
id: ml
|
id: ml
|
||||||
|
|
||||||
# All available variables are described in documentation
|
# All available variables are described in documentation
|
||||||
@@ -621,11 +650,7 @@ runs:
|
|||||||
# github.event_name == 'push' &&
|
# github.event_name == 'push' &&
|
||||||
# contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)
|
# contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)
|
||||||
# }}
|
# }}
|
||||||
VALIDATE_ALL_CODEBASE: >-
|
VALIDATE_ALL_CODEBASE: false
|
||||||
${{
|
|
||||||
github.event_name == 'push' &&
|
|
||||||
contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)
|
|
||||||
}}
|
|
||||||
|
|
||||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||||
|
|
||||||
@@ -649,125 +674,13 @@ runs:
|
|||||||
# Uncomment to disable copy-paste and spell checks
|
# Uncomment to disable copy-paste and spell checks
|
||||||
DISABLE: COPYPASTE,SPELL
|
DISABLE: COPYPASTE,SPELL
|
||||||
|
|
||||||
# Export env vars to make them available for subsequent expressions
|
|
||||||
- name: Export Apply Fixes Variables
|
|
||||||
shell: sh
|
|
||||||
run: |
|
|
||||||
echo "APPLY_FIXES_EVENT=pull_request" >> "$GITHUB_ENV"
|
|
||||||
echo "APPLY_FIXES_MODE=commit" >> "$GITHUB_ENV"
|
|
||||||
|
|
||||||
# Upload MegaLinter artifacts
|
# Upload MegaLinter artifacts
|
||||||
- name: Archive production artifacts
|
- name: Archive production artifacts
|
||||||
if: success() || failure()
|
if: success() || failure()
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: MegaLinter reports
|
name: MegaLinter reports
|
||||||
include-hidden-files: 'true'
|
include-hidden-files: 'true'
|
||||||
path: |
|
path: |
|
||||||
megalinter-reports
|
megalinter-reports
|
||||||
mega-linter.log
|
mega-linter.log
|
||||||
|
|
||||||
# Set APPLY_FIXES_IF var for use in future steps
|
|
||||||
- name: Set APPLY_FIXES_IF var
|
|
||||||
shell: sh
|
|
||||||
env:
|
|
||||||
APPLY_FIXES_CONDITION: >-
|
|
||||||
${{
|
|
||||||
steps.ml.outputs.has_updated_sources == 1 &&
|
|
||||||
(env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) &&
|
|
||||||
(github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository)
|
|
||||||
}}
|
|
||||||
run: |
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
# Sanitize by removing newlines to prevent env var injection
|
|
||||||
sanitized_condition="$(echo "$APPLY_FIXES_CONDITION" | tr -d '\n\r')"
|
|
||||||
printf 'APPLY_FIXES_IF=%s\n' "$sanitized_condition" >> "${GITHUB_ENV}"
|
|
||||||
|
|
||||||
# Set APPLY_FIXES_IF_* vars for use in future steps
|
|
||||||
- name: Set APPLY_FIXES_IF_* vars
|
|
||||||
shell: sh
|
|
||||||
env:
|
|
||||||
APPLY_FIXES_IF_PR_CONDITION: ${{ env.APPLY_FIXES_IF == 'true' && env.APPLY_FIXES_MODE == 'pull_request' }}
|
|
||||||
APPLY_FIXES_IF_COMMIT_CONDITION: ${{ env.APPLY_FIXES_IF == 'true' && env.APPLY_FIXES_MODE == 'commit' && (!contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)) }}
|
|
||||||
run: |
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
# Sanitize by removing newlines to prevent env var injection
|
|
||||||
sanitized_pr="$(echo "$APPLY_FIXES_IF_PR_CONDITION" | tr -d '\n\r')"
|
|
||||||
sanitized_commit="$(echo "$APPLY_FIXES_IF_COMMIT_CONDITION" | tr -d '\n\r')"
|
|
||||||
|
|
||||||
printf 'APPLY_FIXES_IF_PR=%s\n' "$sanitized_pr" >> "${GITHUB_ENV}"
|
|
||||||
printf 'APPLY_FIXES_IF_COMMIT=%s\n' "$sanitized_commit" >> "${GITHUB_ENV}"
|
|
||||||
|
|
||||||
# Create pull request if applicable
|
|
||||||
# (for now works only on PR from same repository, not from forks)
|
|
||||||
- name: Create Pull Request with applied fixes
|
|
||||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
|
||||||
id: cpr
|
|
||||||
if: env.APPLY_FIXES_IF_PR == 'true'
|
|
||||||
with:
|
|
||||||
token: ${{ inputs.token || github.token }}
|
|
||||||
commit-message: 'style: apply linter fixes'
|
|
||||||
title: 'style: apply linter fixes'
|
|
||||||
labels: bot
|
|
||||||
|
|
||||||
- name: Create PR output
|
|
||||||
if: env.APPLY_FIXES_IF_PR == 'true'
|
|
||||||
shell: sh
|
|
||||||
env:
|
|
||||||
PR_NUMBER: ${{ steps.cpr.outputs.pull-request-number }}
|
|
||||||
PR_URL: ${{ steps.cpr.outputs.pull-request-url }}
|
|
||||||
run: |
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
printf 'PR Number - %s\n' "$PR_NUMBER"
|
|
||||||
printf 'PR URL - %s\n' "$PR_URL"
|
|
||||||
|
|
||||||
# Push new commit if applicable
|
|
||||||
# (for now works only on PR from same repository, not from forks)
|
|
||||||
- name: Prepare commit
|
|
||||||
if: env.APPLY_FIXES_IF_COMMIT == 'true'
|
|
||||||
shell: sh
|
|
||||||
env:
|
|
||||||
BRANCH_REF: >-
|
|
||||||
${{
|
|
||||||
github.event.pull_request.head.ref ||
|
|
||||||
github.head_ref ||
|
|
||||||
github.ref_name
|
|
||||||
}}
|
|
||||||
run: |
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
# Fix .git directory ownership after MegaLinter container execution
|
|
||||||
current_uid=$(id -u)
|
|
||||||
sudo chown -Rc "$current_uid" .git/
|
|
||||||
|
|
||||||
# Ensure we're on the correct branch (not in detached HEAD state)
|
|
||||||
# This is necessary because MegaLinter may leave the repo in a detached HEAD state
|
|
||||||
current_branch=$(git rev-parse --abbrev-ref HEAD)
|
|
||||||
if [ "$current_branch" = "HEAD" ]; then
|
|
||||||
echo "Repository is in detached HEAD state, checking out $BRANCH_REF"
|
|
||||||
# Validate branch reference to prevent command injection
|
|
||||||
if ! git check-ref-format --branch "$BRANCH_REF"; then
|
|
||||||
echo "::error::Invalid branch reference format: $BRANCH_REF"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
git checkout "$BRANCH_REF"
|
|
||||||
else
|
|
||||||
echo "Repository is on branch: $current_branch"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Commit and push applied linter fixes
|
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
|
||||||
if: env.APPLY_FIXES_IF_COMMIT == 'true'
|
|
||||||
with:
|
|
||||||
branch: >-
|
|
||||||
${{
|
|
||||||
github.event.pull_request.head.ref ||
|
|
||||||
github.head_ref ||
|
|
||||||
github.ref
|
|
||||||
}}
|
|
||||||
commit_message: 'style: apply linter fixes'
|
|
||||||
commit_user_name: ${{ inputs.username }}
|
|
||||||
commit_user_email: ${{ inputs.email }}
|
|
||||||
|
|||||||
@@ -34,74 +34,45 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate pre-commit-config if provided
|
# Validate pre-commit-config if provided
|
||||||
if "pre-commit-config" in inputs:
|
if "pre-commit-config" in inputs:
|
||||||
result = self.file_validator.validate_file_path(
|
valid &= self.validate_with(
|
||||||
inputs["pre-commit-config"], "pre-commit-config"
|
self.file_validator,
|
||||||
|
"validate_file_path",
|
||||||
|
inputs["pre-commit-config"],
|
||||||
|
"pre-commit-config",
|
||||||
)
|
)
|
||||||
for error in self.file_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate base-branch if provided (just check for injection)
|
# Validate base-branch if provided (just check for injection)
|
||||||
if inputs.get("base-branch"):
|
if inputs.get("base-branch"):
|
||||||
# Check for dangerous characters that could cause shell injection
|
valid &= self.validate_with(
|
||||||
result = self.security_validator.validate_no_injection(
|
self.security_validator,
|
||||||
inputs["base-branch"], "base-branch"
|
"validate_no_injection",
|
||||||
|
inputs["base-branch"],
|
||||||
|
"base-branch",
|
||||||
)
|
)
|
||||||
for error in self.security_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.security_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate token if provided
|
# Validate token if provided
|
||||||
if inputs.get("token"):
|
if inputs.get("token"):
|
||||||
result = self.token_validator.validate_github_token(inputs["token"])
|
valid &= self.validate_with(
|
||||||
for error in self.token_validator.errors:
|
self.token_validator, "validate_github_token", inputs["token"]
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate commit_user if provided (allow spaces for Git usernames)
|
# Validate commit_user if provided (allow spaces for Git usernames)
|
||||||
# Check both underscore and hyphen versions since inputs can have either
|
commit_user_key = self.get_key_variant(inputs, "commit_user", "commit-user")
|
||||||
commit_user_key = (
|
|
||||||
"commit_user"
|
|
||||||
if "commit_user" in inputs
|
|
||||||
else "commit-user"
|
|
||||||
if "commit-user" in inputs
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
if commit_user_key and inputs[commit_user_key]:
|
if commit_user_key and inputs[commit_user_key]:
|
||||||
# Check for dangerous injection patterns
|
|
||||||
value = inputs[commit_user_key]
|
value = inputs[commit_user_key]
|
||||||
if any(char in value for char in [";", "&", "|", "`", "$", "(", ")", "\n", "\r"]):
|
if any(c in value for c in [";", "&", "|", "`", "$", "(", ")", "\n", "\r"]):
|
||||||
self.add_error(f"{commit_user_key}: Contains potentially dangerous characters")
|
self.add_error(f"{commit_user_key}: Contains potentially dangerous characters")
|
||||||
valid = False
|
valid = False
|
||||||
|
|
||||||
# Validate commit_email if provided
|
# Validate commit_email if provided
|
||||||
# Check both underscore and hyphen versions
|
commit_email_key = self.get_key_variant(inputs, "commit_email", "commit-email")
|
||||||
commit_email_key = (
|
|
||||||
"commit_email"
|
|
||||||
if "commit_email" in inputs
|
|
||||||
else "commit-email"
|
|
||||||
if "commit-email" in inputs
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
if commit_email_key and inputs[commit_email_key]:
|
if commit_email_key and inputs[commit_email_key]:
|
||||||
result = self.network_validator.validate_email(
|
valid &= self.validate_with(
|
||||||
inputs[commit_email_key], commit_email_key
|
self.network_validator,
|
||||||
|
"validate_email",
|
||||||
|
inputs[commit_email_key],
|
||||||
|
commit_email_key,
|
||||||
)
|
)
|
||||||
for error in self.network_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.network_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ runs:
|
|||||||
- name: Push pre-commit fixes
|
- name: Push pre-commit fixes
|
||||||
id: push-fixes
|
id: push-fixes
|
||||||
if: always() # Push changes even when pre-commit fails
|
if: always() # Push changes even when pre-commit fails
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: 'style(pre-commit): autofix'
|
commit_message: 'style(pre-commit): autofix'
|
||||||
commit_user_name: ${{ inputs.commit_user }}
|
commit_user_name: ${{ inputs.commit_user }}
|
||||||
|
|||||||
@@ -274,9 +274,9 @@ runs:
|
|||||||
echo "Detected package manager: $package_manager"
|
echo "Detected package manager: $package_manager"
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||||
with:
|
with:
|
||||||
node-version: '22'
|
node-version: '24'
|
||||||
|
|
||||||
- name: Enable Corepack
|
- name: Enable Corepack
|
||||||
shell: sh
|
shell: sh
|
||||||
@@ -305,13 +305,13 @@ runs:
|
|||||||
|
|
||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
|
uses: oven-sh/setup-bun@3d267786b128fe76c2f16a390aa2448b815359f3 # v2.1.2
|
||||||
with:
|
with:
|
||||||
bun-version: latest
|
bun-version: latest
|
||||||
|
|
||||||
- name: Cache Node Dependencies
|
- name: Cache Node Dependencies
|
||||||
id: cache
|
id: cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: node_modules
|
path: node_modules
|
||||||
key: ${{ runner.os }}-prettier-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
key: ${{ runner.os }}-prettier-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||||
@@ -468,7 +468,7 @@ runs:
|
|||||||
|
|
||||||
- name: Commit and Push Fixes
|
- name: Commit and Push Fixes
|
||||||
if: inputs.mode == 'fix' && success()
|
if: inputs.mode == 'fix' && success()
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: 'style: autofix Prettier formatting'
|
commit_message: 'style: autofix Prettier formatting'
|
||||||
commit_user_name: ${{ inputs.username }}
|
commit_user_name: ${{ inputs.username }}
|
||||||
|
|||||||
@@ -31,68 +31,42 @@ class CustomValidator(BaseValidator):
|
|||||||
valid = True
|
valid = True
|
||||||
|
|
||||||
# Validate python-version if provided
|
# Validate python-version if provided
|
||||||
if "python-version" in inputs or "python_version" in inputs:
|
version_key = self.get_key_variant(inputs, "python-version", "python_version")
|
||||||
key = "python-version" if "python-version" in inputs else "python_version"
|
if version_key:
|
||||||
value = inputs[key]
|
value = inputs[version_key]
|
||||||
|
if not value:
|
||||||
# Empty string should fail validation
|
|
||||||
if value == "":
|
|
||||||
self.add_error("Python version cannot be empty")
|
self.add_error("Python version cannot be empty")
|
||||||
valid = False
|
valid = False
|
||||||
elif value:
|
else:
|
||||||
result = self.version_validator.validate_python_version(value, key)
|
valid &= self.validate_with(
|
||||||
|
self.version_validator, "validate_python_version", value, version_key
|
||||||
# Propagate errors from the version validator
|
)
|
||||||
for error in self.version_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
|
|
||||||
self.version_validator.clear_errors()
|
|
||||||
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate username
|
# Validate username
|
||||||
if "username" in inputs:
|
if inputs.get("username"):
|
||||||
username = inputs["username"]
|
username = inputs["username"]
|
||||||
if username:
|
if len(username) > 39:
|
||||||
# Check username length (GitHub usernames are max 39 characters)
|
self.add_error("Username is too long (max 39 characters)")
|
||||||
if len(username) > 39:
|
valid = False
|
||||||
self.add_error("Username is too long (max 39 characters)")
|
if ";" in username or "`" in username or "$" in username:
|
||||||
valid = False
|
self.add_error("Username contains potentially dangerous characters")
|
||||||
# Check for command injection patterns
|
valid = False
|
||||||
if ";" in username or "`" in username or "$" in username:
|
|
||||||
self.add_error("Username contains potentially dangerous characters")
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate email
|
# Validate email
|
||||||
if "email" in inputs:
|
if inputs.get("email"):
|
||||||
email = inputs["email"]
|
valid &= self.validate_with(
|
||||||
if email:
|
self.network_validator, "validate_email", inputs["email"], "email"
|
||||||
result = self.network_validator.validate_email(email, "email")
|
)
|
||||||
for error in self.network_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.network_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate token
|
# Validate token
|
||||||
if "token" in inputs:
|
if inputs.get("token"):
|
||||||
token = inputs["token"]
|
token = inputs["token"]
|
||||||
if token:
|
# Check for variable expansion (but allow GitHub Actions expressions)
|
||||||
# Check for variable expansion (but allow GitHub Actions expressions)
|
if "${" in token and not token.startswith("${{ ") and not token.endswith(" }}"):
|
||||||
if "${" in token and not token.startswith("${{ ") and not token.endswith(" }}"):
|
self.add_error("Token contains potentially dangerous variable expansion")
|
||||||
self.add_error("Token contains potentially dangerous variable expansion")
|
valid = False
|
||||||
valid = False
|
else:
|
||||||
else:
|
valid &= self.validate_with(self.token_validator, "validate_github_token", token)
|
||||||
result = self.token_validator.validate_github_token(token)
|
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|||||||
@@ -361,7 +361,7 @@ runs:
|
|||||||
|
|
||||||
- name: Commit Fixes
|
- name: Commit Fixes
|
||||||
if: ${{ fromJSON(steps.fix.outputs.fixed_count) > 0 }}
|
if: ${{ fromJSON(steps.fix.outputs.fixed_count) > 0 }}
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: 'style: apply python lint fixes'
|
commit_message: 'style: apply python lint fixes'
|
||||||
commit_user_name: ${{ inputs.username }}
|
commit_user_name: ${{ inputs.username }}
|
||||||
@@ -370,7 +370,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upload SARIF Report
|
- name: Upload SARIF Report
|
||||||
if: steps.check-files.outputs.result == 'found'
|
if: steps.check-files.outputs.result == 'found'
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: ${{ inputs.working-directory }}/reports/flake8.sarif
|
sarif_file: ${{ inputs.working-directory }}/reports/flake8.sarif
|
||||||
category: 'python-lint'
|
category: 'python-lint'
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ runs:
|
|||||||
|
|
||||||
- name: Run actionlint
|
- name: Run actionlint
|
||||||
if: steps.check-configs.outputs.run_actionlint == 'true'
|
if: steps.check-configs.outputs.run_actionlint == 'true'
|
||||||
uses: raven-actions/actionlint@3a24062651993d40fed1019b58ac6fbdfbf276cc # v2.0.1
|
uses: raven-actions/actionlint@e01d1ea33dd6a5ed517d95b4c0c357560ac6f518 # v2.1.1
|
||||||
with:
|
with:
|
||||||
cache: true
|
cache: true
|
||||||
fail-on-error: true
|
fail-on-error: true
|
||||||
@@ -161,21 +161,21 @@ runs:
|
|||||||
|
|
||||||
- name: Upload Trivy results
|
- name: Upload Trivy results
|
||||||
if: steps.verify-sarif.outputs.has_trivy == 'true'
|
if: steps.verify-sarif.outputs.has_trivy == 'true'
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: 'trivy-results.sarif'
|
sarif_file: 'trivy-results.sarif'
|
||||||
category: 'trivy'
|
category: 'trivy'
|
||||||
|
|
||||||
- name: Upload Gitleaks results
|
- name: Upload Gitleaks results
|
||||||
if: steps.verify-sarif.outputs.has_gitleaks == 'true'
|
if: steps.verify-sarif.outputs.has_gitleaks == 'true'
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: 'gitleaks-report.sarif'
|
sarif_file: 'gitleaks-report.sarif'
|
||||||
category: 'gitleaks'
|
category: 'gitleaks'
|
||||||
|
|
||||||
- name: Archive security reports
|
- name: Archive security reports
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||||
with:
|
with:
|
||||||
name: security-reports-${{ github.run_id }}
|
name: security-reports-${{ github.run_id }}
|
||||||
path: |
|
path: |
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ runs:
|
|||||||
|
|
||||||
- name: 🚀 Run stale
|
- name: 🚀 Run stale
|
||||||
id: stale
|
id: stale
|
||||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ inputs.token || github.token }}
|
repo-token: ${{ inputs.token || github.token }}
|
||||||
days-before-stale: ${{ inputs.days-before-stale }}
|
days-before-stale: ${{ inputs.days-before-stale }}
|
||||||
|
|||||||
@@ -78,16 +78,9 @@ class CustomValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate token if provided
|
# Validate token if provided
|
||||||
if "token" in inputs:
|
if "token" in inputs:
|
||||||
token_valid = self.token_validator.validate_github_token(
|
valid &= self.validate_with(
|
||||||
inputs["token"],
|
self.token_validator, "validate_github_token", inputs["token"], required=False
|
||||||
required=False, # Token is optional, defaults to ${{ github.token }}
|
|
||||||
)
|
)
|
||||||
# Copy any errors from token validator
|
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
valid &= token_valid
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
@@ -100,27 +93,15 @@ class CustomValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
# Allow GitHub Actions expressions
|
|
||||||
if self.is_github_expression(path):
|
if self.is_github_expression(path):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# First check basic file path security
|
result = self.validate_with(self.file_validator, "validate_file_path", path, "labels")
|
||||||
result = self.file_validator.validate_file_path(path, "labels")
|
|
||||||
# Copy any errors from file validator
|
|
||||||
for error in self.file_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Check file extension
|
|
||||||
if not (path.endswith(".yml") or path.endswith(".yaml")):
|
if not (path.endswith(".yml") or path.endswith(".yaml")):
|
||||||
self.add_error(f'Invalid labels file: "{path}". Must be a .yml or .yaml file')
|
self.add_error(f'Invalid labels file: "{path}". Must be a .yml or .yaml file')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Additional custom validation could go here
|
|
||||||
# For example, checking if the file exists, validating YAML structure, etc.
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -30,54 +30,32 @@ class CustomValidator(BaseValidator):
|
|||||||
"""Validate terraform-lint-fix action inputs."""
|
"""Validate terraform-lint-fix action inputs."""
|
||||||
valid = True
|
valid = True
|
||||||
|
|
||||||
# Validate terraform-version if provided
|
# Validate terraform-version if provided (empty is OK - uses default)
|
||||||
if "terraform-version" in inputs:
|
if inputs.get("terraform-version"):
|
||||||
value = inputs["terraform-version"]
|
valid &= self.validate_with(
|
||||||
|
self.version_validator,
|
||||||
|
"validate_terraform_version",
|
||||||
|
inputs["terraform-version"],
|
||||||
|
"terraform-version",
|
||||||
|
)
|
||||||
|
|
||||||
# Empty string is OK - uses default
|
# Validate token if provided (empty is OK - uses default)
|
||||||
if value == "":
|
if inputs.get("token"):
|
||||||
pass # Allow empty, will use default
|
valid &= self.validate_with(
|
||||||
elif value:
|
self.token_validator,
|
||||||
result = self.version_validator.validate_terraform_version(
|
"validate_github_token",
|
||||||
value, "terraform-version"
|
inputs["token"],
|
||||||
)
|
required=False,
|
||||||
|
)
|
||||||
# Propagate errors from the version validator
|
|
||||||
for error in self.version_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
|
|
||||||
self.version_validator.clear_errors()
|
|
||||||
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate token if provided
|
|
||||||
if "token" in inputs:
|
|
||||||
value = inputs["token"]
|
|
||||||
if value == "":
|
|
||||||
# Empty token is OK - uses default
|
|
||||||
pass
|
|
||||||
elif value:
|
|
||||||
result = self.token_validator.validate_github_token(value, required=False)
|
|
||||||
for error in self.token_validator.errors:
|
|
||||||
if error not in self.errors:
|
|
||||||
self.add_error(error)
|
|
||||||
self.token_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate working-directory if provided
|
# Validate working-directory if provided
|
||||||
if "working-directory" in inputs:
|
if inputs.get("working-directory"):
|
||||||
value = inputs["working-directory"]
|
valid &= self.validate_with(
|
||||||
if value:
|
self.file_validator,
|
||||||
result = self.file_validator.validate_file_path(value, "working-directory")
|
"validate_file_path",
|
||||||
for error in self.file_validator.errors:
|
inputs["working-directory"],
|
||||||
if error not in self.errors:
|
"working-directory",
|
||||||
self.add_error(error)
|
)
|
||||||
self.file_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|||||||
@@ -247,7 +247,7 @@ runs:
|
|||||||
|
|
||||||
- name: Commit Fixes
|
- name: Commit Fixes
|
||||||
if: steps.check-files.outputs.found == 'true' && inputs.auto-fix == 'true' && fromJSON(steps.fix.outputs.fixed_count) > 0
|
if: steps.check-files.outputs.found == 'true' && inputs.auto-fix == 'true' && fromJSON(steps.fix.outputs.fixed_count) > 0
|
||||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||||
with:
|
with:
|
||||||
commit_message: 'style: apply terraform formatting fixes'
|
commit_message: 'style: apply terraform formatting fixes'
|
||||||
commit_user_name: ${{ inputs.username }}
|
commit_user_name: ${{ inputs.username }}
|
||||||
@@ -256,7 +256,7 @@ runs:
|
|||||||
|
|
||||||
- name: Upload SARIF Report
|
- name: Upload SARIF Report
|
||||||
if: steps.check-files.outputs.found == 'true' && inputs.format == 'sarif'
|
if: steps.check-files.outputs.found == 'true' && inputs.format == 'sarif'
|
||||||
uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||||
with:
|
with:
|
||||||
sarif_file: ${{ env.VALIDATED_WORKING_DIR }}/reports/tflint.sarif
|
sarif_file: ${{ env.VALIDATED_WORKING_DIR }}/reports/tflint.sarif
|
||||||
category: terraform-lint
|
category: terraform-lint
|
||||||
|
|||||||
@@ -27,57 +27,45 @@ class CustomValidator(BaseValidator):
|
|||||||
self.boolean_validator = BooleanValidator()
|
self.boolean_validator = BooleanValidator()
|
||||||
self.file_validator = FileValidator()
|
self.file_validator = FileValidator()
|
||||||
|
|
||||||
def validate_inputs(self, inputs: dict[str, str]) -> bool: # pylint: disable=too-many-branches
|
def validate_inputs(self, inputs: dict[str, str]) -> bool:
|
||||||
"""Validate validate-inputs action inputs."""
|
"""Validate validate-inputs action inputs."""
|
||||||
valid = True
|
valid = True
|
||||||
|
|
||||||
# Validate action/action-type input
|
# Validate action/action-type input
|
||||||
if "action" in inputs or "action-type" in inputs:
|
action_key = self.get_key_variant(inputs, "action", "action-type")
|
||||||
action_input = inputs.get("action") or inputs.get("action-type", "")
|
if action_key:
|
||||||
# Check for empty action
|
action_input = inputs[action_key]
|
||||||
if action_input == "":
|
if action_input == "":
|
||||||
self.add_error("Action name cannot be empty")
|
self.add_error("Action name cannot be empty")
|
||||||
valid = False
|
valid = False
|
||||||
# Allow GitHub expressions
|
elif not self.is_github_expression(action_input):
|
||||||
elif action_input.startswith("${{") and action_input.endswith("}}"):
|
# Only validate non-GitHub expressions
|
||||||
pass # GitHub expressions are valid
|
if any(
|
||||||
# Check for dangerous characters
|
char in action_input
|
||||||
elif any(
|
for char in [";", "`", "$", "&", "|", ">", "<", "\n", "\r", "/"]
|
||||||
char in action_input
|
):
|
||||||
for char in [";", "`", "$", "&", "|", ">", "<", "\n", "\r", "/"]
|
self.add_error(f"Invalid characters in action name: {action_input}")
|
||||||
):
|
valid = False
|
||||||
self.add_error(f"Invalid characters in action name: {action_input}")
|
elif action_input and not re.match(r"^[a-z][a-z0-9_-]*[a-z0-9]$", action_input):
|
||||||
valid = False
|
self.add_error(f"Invalid action name format: {action_input}")
|
||||||
# Validate action name format (should be lowercase with hyphens or underscores)
|
valid = False
|
||||||
elif action_input and not re.match(r"^[a-z][a-z0-9_-]*[a-z0-9]$", action_input):
|
|
||||||
self.add_error(f"Invalid action name format: {action_input}")
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate rules-file if provided
|
# Validate rules-file if provided
|
||||||
if inputs.get("rules-file"):
|
if inputs.get("rules-file"):
|
||||||
result = self.file_validator.validate_file_path(inputs["rules-file"], "rules-file")
|
valid &= self.validate_with(
|
||||||
for error in self.file_validator.errors:
|
self.file_validator, "validate_file_path", inputs["rules-file"], "rules-file"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.file_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
# Validate fail-on-error boolean
|
# Validate fail-on-error boolean
|
||||||
if "fail-on-error" in inputs:
|
if "fail-on-error" in inputs:
|
||||||
value = inputs["fail-on-error"]
|
value = inputs["fail-on-error"]
|
||||||
# Reject empty string
|
|
||||||
if value == "":
|
if value == "":
|
||||||
self.add_error("fail-on-error cannot be empty")
|
self.add_error("fail-on-error cannot be empty")
|
||||||
valid = False
|
valid = False
|
||||||
elif value:
|
elif value:
|
||||||
result = self.boolean_validator.validate_boolean(value, "fail-on-error")
|
valid &= self.validate_with(
|
||||||
for error in self.boolean_validator.errors:
|
self.boolean_validator, "validate_boolean", value, "fail-on-error"
|
||||||
if error not in self.errors:
|
)
|
||||||
self.add_error(error)
|
|
||||||
self.boolean_validator.clear_errors()
|
|
||||||
if not result:
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|||||||
@@ -213,6 +213,10 @@ outputs:
|
|||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
steps:
|
steps:
|
||||||
|
- name: Install Python dependencies
|
||||||
|
shell: bash
|
||||||
|
run: pip install pyyaml==6.0.3
|
||||||
|
|
||||||
- name: Validate Action Inputs with Python
|
- name: Validate Action Inputs with Python
|
||||||
id: validate
|
id: validate
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
@@ -895,7 +895,7 @@ optional_inputs:
|
|||||||
self.validator._validate_multi_value_enum("test", "input", valid_values=["only_one"])
|
self.validator._validate_multi_value_enum("test", "input", valid_values=["only_one"])
|
||||||
raise AssertionError("Should raise ValueError for single value")
|
raise AssertionError("Should raise ValueError for single value")
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
assert "at least 2 valid values" in str(e)
|
assert ">= 2 values" in str(e)
|
||||||
|
|
||||||
# Should raise ValueError if more than max_values
|
# Should raise ValueError if more than max_values
|
||||||
try:
|
try:
|
||||||
@@ -906,7 +906,7 @@ optional_inputs:
|
|||||||
)
|
)
|
||||||
raise AssertionError("Should raise ValueError for 11 values")
|
raise AssertionError("Should raise ValueError for 11 values")
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
assert "at most 10 valid values" in str(e)
|
assert "<= 10 values" in str(e)
|
||||||
|
|
||||||
def test_validate_exit_code_list_valid(self):
|
def test_validate_exit_code_list_valid(self):
|
||||||
"""Test exit code list validation with valid values."""
|
"""Test exit code list validation with valid values."""
|
||||||
|
|||||||
@@ -227,3 +227,82 @@ class BaseValidator(ABC):
|
|||||||
or ("${{" in value and "}}" in value)
|
or ("${{" in value and "}}" in value)
|
||||||
or (value.strip().startswith("${{") and value.strip().endswith("}}"))
|
or (value.strip().startswith("${{") and value.strip().endswith("}}"))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def propagate_errors(self, validator: BaseValidator, result: bool) -> bool:
|
||||||
|
"""Copy errors from another validator and return result.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
validator: The validator to copy errors from
|
||||||
|
result: The validation result to return
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The result parameter unchanged
|
||||||
|
"""
|
||||||
|
for error in validator.errors:
|
||||||
|
if error not in self.errors:
|
||||||
|
self.add_error(error)
|
||||||
|
validator.clear_errors()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def validate_with(
|
||||||
|
self, validator: BaseValidator, method: str, *args: Any, **kwargs: Any
|
||||||
|
) -> bool:
|
||||||
|
"""Call validator method and propagate errors.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
validator: The validator instance to use
|
||||||
|
method: The method name to call on the validator
|
||||||
|
*args: Positional arguments to pass to the method
|
||||||
|
**kwargs: Keyword arguments to pass to the method
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The validation result
|
||||||
|
"""
|
||||||
|
result = getattr(validator, method)(*args, **kwargs)
|
||||||
|
return self.propagate_errors(validator, result)
|
||||||
|
|
||||||
|
def validate_enum(
|
||||||
|
self,
|
||||||
|
value: str,
|
||||||
|
name: str,
|
||||||
|
valid_values: list[str],
|
||||||
|
*,
|
||||||
|
case_sensitive: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
"""Validate value is one of allowed options.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: The value to validate
|
||||||
|
name: The name of the input for error messages
|
||||||
|
valid_values: List of allowed values
|
||||||
|
case_sensitive: Whether comparison should be case sensitive
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if value is valid or empty/GitHub expression, False otherwise
|
||||||
|
"""
|
||||||
|
if not value or self.is_github_expression(value):
|
||||||
|
return True
|
||||||
|
check = value if case_sensitive else value.lower()
|
||||||
|
allowed = valid_values if case_sensitive else [v.lower() for v in valid_values]
|
||||||
|
if check not in allowed:
|
||||||
|
self.add_error(f"Invalid {name}: {value}. Must be one of: {', '.join(valid_values)}")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_key_variant(inputs: dict[str, str], *variants: str) -> str | None:
|
||||||
|
"""Get first matching key variant from inputs.
|
||||||
|
|
||||||
|
Useful for inputs that may use underscore or hyphen variants.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
inputs: Dictionary of inputs to check
|
||||||
|
*variants: Key variants to search for in order
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The first matching key, or None if no match
|
||||||
|
"""
|
||||||
|
for key in variants:
|
||||||
|
if key in inputs:
|
||||||
|
return key
|
||||||
|
return None
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ This validator automatically applies validation based on input naming convention
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -424,7 +425,10 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
if error not in self.errors:
|
if error not in self.errors:
|
||||||
self.add_error(error)
|
self.add_error(error)
|
||||||
# Clear the module's errors after copying
|
# Clear the module's errors after copying
|
||||||
validator_module.errors = []
|
if hasattr(validator_module, "clear_errors"):
|
||||||
|
validator_module.clear_errors()
|
||||||
|
else:
|
||||||
|
validator_module.errors = []
|
||||||
|
|
||||||
return result
|
return result
|
||||||
# Method not found, skip validation
|
# Method not found, skip validation
|
||||||
@@ -629,7 +633,8 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
Args:
|
Args:
|
||||||
value: The comma-separated list value
|
value: The comma-separated list value
|
||||||
input_name: The input name for error messages
|
input_name: The input name for error messages
|
||||||
item_pattern: Regex pattern each item must match (default: alphanumeric+hyphens+underscores)
|
item_pattern: Regex pattern each item must match
|
||||||
|
(default: alphanumeric+hyphens+underscores)
|
||||||
valid_items: Optional list of valid items for enum-style validation
|
valid_items: Optional list of valid items for enum-style validation
|
||||||
check_injection: Whether to check for shell injection patterns
|
check_injection: Whether to check for shell injection patterns
|
||||||
item_name: Descriptive name for items in error messages (e.g., "linter", "extension")
|
item_name: Descriptive name for items in error messages (e.g., "linter", "extension")
|
||||||
@@ -654,8 +659,6 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
... )
|
... )
|
||||||
True
|
True
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
|
|
||||||
if not value or value.strip() == "":
|
if not value or value.strip() == "":
|
||||||
return True # Optional
|
return True # Optional
|
||||||
|
|
||||||
@@ -895,14 +898,12 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
|
|
||||||
# Validate valid_values count
|
# Validate valid_values count
|
||||||
if len(valid_values) < min_values:
|
if len(valid_values) < min_values:
|
||||||
raise ValueError(
|
msg = f"Multi-value enum needs >= {min_values} values, got {len(valid_values)}"
|
||||||
f"Multi-value enum requires at least {min_values} valid values, got {len(valid_values)}"
|
raise ValueError(msg)
|
||||||
)
|
|
||||||
|
|
||||||
if len(valid_values) > max_values:
|
if len(valid_values) > max_values:
|
||||||
raise ValueError(
|
msg = f"Multi-value enum allows <= {max_values} values, got {len(valid_values)}"
|
||||||
f"Multi-value enum supports at most {max_values} valid values, got {len(valid_values)}"
|
raise ValueError(msg)
|
||||||
)
|
|
||||||
|
|
||||||
if not value or value.strip() == "":
|
if not value or value.strip() == "":
|
||||||
return True # Optional
|
return True # Optional
|
||||||
@@ -1024,8 +1025,6 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
|
|
||||||
if not value or value.strip() == "":
|
if not value or value.strip() == "":
|
||||||
return True # Optional
|
return True # Optional
|
||||||
|
|
||||||
@@ -1123,8 +1122,6 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
Valid: "0", "0,1,2", "5,10,15", "0,130", ""
|
Valid: "0", "0,1,2", "5,10,15", "0,130", ""
|
||||||
Invalid: "256", "0,256", "-1", "0,abc", "0,,1"
|
Invalid: "256", "0,256", "-1", "0,abc", "0,,1"
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
|
|
||||||
if not value or value.strip() == "":
|
if not value or value.strip() == "":
|
||||||
return True # Optional
|
return True # Optional
|
||||||
|
|
||||||
@@ -1169,8 +1166,10 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
Args:
|
Args:
|
||||||
value: The key-value list value (comma-separated KEY=VALUE pairs)
|
value: The key-value list value (comma-separated KEY=VALUE pairs)
|
||||||
input_name: The input name for error messages
|
input_name: The input name for error messages
|
||||||
key_pattern: Regex pattern for key validation (default: alphanumeric+underscores+hyphens)
|
key_pattern: Regex pattern for key validation
|
||||||
check_injection: Whether to check for shell injection patterns in values (default: True)
|
(default: alphanumeric+underscores+hyphens)
|
||||||
|
check_injection: Whether to check for shell injection patterns
|
||||||
|
in values (default: True)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
True if valid, False otherwise
|
True if valid, False otherwise
|
||||||
@@ -1179,7 +1178,6 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
Valid: "KEY=value", "KEY1=value1,KEY2=value2", "BUILD_ARG=hello", ""
|
Valid: "KEY=value", "KEY1=value1,KEY2=value2", "BUILD_ARG=hello", ""
|
||||||
Invalid: "KEY", "=value", "KEY=", "KEY=value,", "KEY=val;whoami"
|
Invalid: "KEY", "=value", "KEY=", "KEY=value,", "KEY=val;whoami"
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
|
|
||||||
if not value or value.strip() == "":
|
if not value or value.strip() == "":
|
||||||
return True # Optional
|
return True # Optional
|
||||||
@@ -1260,8 +1258,6 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if valid, False otherwise
|
bool: True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
|
|
||||||
if not value or value.strip() == "":
|
if not value or value.strip() == "":
|
||||||
return True # Optional
|
return True # Optional
|
||||||
|
|
||||||
@@ -1412,8 +1408,6 @@ class ConventionBasedValidator(BaseValidator):
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if valid, False otherwise
|
bool: True if valid, False otherwise
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
|
|
||||||
if not value or value.strip() == "":
|
if not value or value.strip() == "":
|
||||||
return True # Optional
|
return True # Optional
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,8 @@ class TokenValidator(BaseValidator):
|
|||||||
"""Validator for various authentication tokens."""
|
"""Validator for various authentication tokens."""
|
||||||
|
|
||||||
# Token patterns for different token types (based on official GitHub documentation)
|
# Token patterns for different token types (based on official GitHub documentation)
|
||||||
# https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/about-authentication-to-github#githubs-token-formats
|
# See: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/
|
||||||
|
# about-authentication-to-github#githubs-token-formats
|
||||||
# Note: The lengths include the prefix
|
# Note: The lengths include the prefix
|
||||||
TOKEN_PATTERNS: ClassVar[dict[str, str]] = {
|
TOKEN_PATTERNS: ClassVar[dict[str, str]] = {
|
||||||
# Personal access token (classic):
|
# Personal access token (classic):
|
||||||
|
|||||||
Reference in New Issue
Block a user