mirror of
https://github.com/ivuorinen/actions.git
synced 2026-01-26 11:34:00 +00:00
Compare commits
52 Commits
v2025.11.2
...
v2025.12.2
| Author | SHA1 | Date | |
|---|---|---|---|
| 54886c3fd5 | |||
|
|
fd030b418f | ||
| 96c305c557 | |||
|
|
5b4e9c8e11 | ||
|
|
2d0bff84ad | ||
|
|
98f260793c | ||
|
|
09ae7517d6 | ||
|
|
61ebe619a8 | ||
|
|
a1d55ac125 | ||
|
|
db86bb2f0d | ||
|
|
5e7b2fbc11 | ||
|
|
43126631c2 | ||
|
|
f6ed49a6dd | ||
|
|
23ac5dbca3 | ||
|
|
a8031d3922 | ||
|
|
30149dd950 | ||
|
|
3a3cdcdefe | ||
|
|
7d28006a83 | ||
|
|
4008db6517 | ||
|
|
7aa206a02a | ||
|
|
8481bbb5cd | ||
|
|
4c0068e6e7 | ||
|
|
5cecfe7cbe | ||
|
|
0288a1c8b8 | ||
| 44a11e9773 | |||
|
|
a52399cf74 | ||
|
|
803165db8f | ||
|
|
d69ed9e999 | ||
|
|
8eea6f781b | ||
|
|
4889586a94 | ||
|
|
e02ca4d843 | ||
|
|
13ef0db9ba | ||
|
|
c366e99ee3 | ||
| fbbb487332 | |||
| abe24f8570 | |||
| 9aa16a8164 | |||
| e58465e5d3 | |||
| 9fe05efeec | |||
| 449669120c | |||
|
|
d9098ddead | ||
| f37d940c72 | |||
|
|
eea547998d | ||
|
|
49159fc895 | ||
|
|
89fd0f3627 | ||
|
|
83cf08ff76 | ||
|
|
90ab7c645c | ||
|
|
d05e898ea9 | ||
|
|
650ebb87b8 | ||
|
|
13316bd827 | ||
|
|
350fd30043 | ||
|
|
587853a9cd | ||
|
|
6cde6d088d |
@@ -17,12 +17,12 @@ runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version-file: pyproject.toml
|
||||
|
||||
@@ -31,7 +31,7 @@ runs:
|
||||
run: uv sync --frozen
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24'
|
||||
cache: npm
|
||||
|
||||
16
.github/codeql/codeql-config.yml
vendored
16
.github/codeql/codeql-config.yml
vendored
@@ -15,3 +15,19 @@ paths-ignore:
|
||||
# Use security and quality query suite
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
|
||||
# Suppress specific false positives
|
||||
# These findings have been manually reviewed and determined to be false positives
|
||||
# with appropriate security controls in place
|
||||
query-filters:
|
||||
# docker-publish: Code injection in validated context
|
||||
# False positive: User input is validated and sanitized before use
|
||||
# - Only relative paths and trusted git URLs are allowed
|
||||
# - Absolute paths and arbitrary URLs are rejected
|
||||
# - Path traversal attempts are blocked
|
||||
# - Custom contexts require explicit opt-in via use-custom-context: true
|
||||
# - Wraps docker/build-push-action (trusted Docker-maintained action)
|
||||
# - Action is designed for trusted workflows only (documented in action.yml)
|
||||
- exclude:
|
||||
id: js/actions/code-injection
|
||||
kind: problem
|
||||
|
||||
1
.github/tag-changelog-config.js
vendored
1
.github/tag-changelog-config.js
vendored
@@ -1,6 +1,7 @@
|
||||
module.exports = {
|
||||
types: [
|
||||
{ types: ['feat', 'feature', 'Feat'], label: '🎉 New Features' },
|
||||
{ types: ['security'], label: '🔐 Security' },
|
||||
{ types: ['fix', 'bugfix', 'Fix'], label: '🐛 Bugfixes' },
|
||||
{ types: ['improvements', 'enhancement'], label: '🔨 Improvements' },
|
||||
{ types: ['perf'], label: '🏎️ Performance Improvements' },
|
||||
|
||||
208
.github/workflows/action-security.yml
vendored
208
.github/workflows/action-security.yml
vendored
@@ -39,212 +39,30 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check Required Configurations
|
||||
id: check-configs
|
||||
shell: sh
|
||||
run: |
|
||||
# Initialize all flags as false
|
||||
{
|
||||
echo "run_gitleaks=false"
|
||||
echo "run_trivy=true"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check Gitleaks configuration and license
|
||||
if [ -f ".gitleaks.toml" ] && [ -n "${{ secrets.GITLEAKS_LICENSE }}" ]; then
|
||||
echo "Gitleaks config and license found"
|
||||
printf '%s\n' "run_gitleaks=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "::warning::Gitleaks config or license missing - skipping Gitleaks scan"
|
||||
fi
|
||||
|
||||
- name: Run actionlint
|
||||
uses: raven-actions/actionlint@3a24062651993d40fed1019b58ac6fbdfbf276cc # v2.0.1
|
||||
- name: Run Security Scan
|
||||
id: security-scan
|
||||
uses: ./security-scan
|
||||
with:
|
||||
cache: true
|
||||
fail-on-error: true
|
||||
shellcheck: false
|
||||
|
||||
- name: Run Gitleaks
|
||||
if: steps.check-configs.outputs.run_gitleaks == 'true'
|
||||
uses: gitleaks/gitleaks-action@ff98106e4c7b2bc287b24eaf42907196329070c7 # v2.3.9
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITLEAKS_LICENSE: ${{ secrets.GITLEAKS_LICENSE }}
|
||||
with:
|
||||
config-path: .gitleaks.toml
|
||||
report-format: sarif
|
||||
report-path: gitleaks-report.sarif
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@a11da62073708815958ea6d84f5650c78a3ef85b # master
|
||||
with:
|
||||
scan-type: 'fs'
|
||||
scanners: 'vuln,config,secret'
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
timeout: '10m'
|
||||
|
||||
- name: Verify SARIF files
|
||||
id: verify-sarif
|
||||
shell: sh
|
||||
run: |
|
||||
# Initialize outputs
|
||||
{
|
||||
echo "has_trivy=false"
|
||||
echo "has_gitleaks=false"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check Trivy results
|
||||
if [ -f "trivy-results.sarif" ]; then
|
||||
if jq -e . </dev/null 2>&1 <"trivy-results.sarif"; then
|
||||
printf '%s\n' "has_trivy=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "::warning::Trivy SARIF file exists but is not valid JSON"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check Gitleaks results if it ran
|
||||
if [ "${{ steps.check-configs.outputs.run_gitleaks }}" = "true" ]; then
|
||||
if [ -f "gitleaks-report.sarif" ]; then
|
||||
if jq -e . </dev/null 2>&1 <"gitleaks-report.sarif"; then
|
||||
printf '%s\n' "has_gitleaks=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "::warning::Gitleaks SARIF file exists but is not valid JSON"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Upload Trivy results
|
||||
if: steps.verify-sarif.outputs.has_trivy == 'true'
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: 'trivy'
|
||||
|
||||
- name: Upload Gitleaks results
|
||||
if: steps.verify-sarif.outputs.has_gitleaks == 'true'
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
sarif_file: 'gitleaks-report.sarif'
|
||||
category: 'gitleaks'
|
||||
|
||||
- name: Archive security reports
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: security-reports-${{ github.run_id }}
|
||||
path: |
|
||||
${{ steps.verify-sarif.outputs.has_trivy == 'true' && 'trivy-results.sarif' || '' }}
|
||||
${{ steps.verify-sarif.outputs.has_gitleaks == 'true' && 'gitleaks-report.sarif' || '' }}
|
||||
retention-days: 30
|
||||
|
||||
- name: Analyze Results
|
||||
if: always()
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
try {
|
||||
let totalIssues = 0;
|
||||
let criticalIssues = 0;
|
||||
|
||||
const analyzeSarif = (file, tool) => {
|
||||
if (!fs.existsSync(file)) {
|
||||
console.log(`No results file found for ${tool}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const sarif = JSON.parse(fs.readFileSync(file, 'utf8'));
|
||||
return sarif.runs.reduce((acc, run) => {
|
||||
if (!run.results) return acc;
|
||||
|
||||
const critical = run.results.filter(r =>
|
||||
r.level === 'error' ||
|
||||
r.level === 'critical' ||
|
||||
(r.ruleId || '').toLowerCase().includes('critical')
|
||||
).length;
|
||||
|
||||
return {
|
||||
total: acc.total + run.results.length,
|
||||
critical: acc.critical + critical
|
||||
};
|
||||
}, { total: 0, critical: 0 });
|
||||
} catch (error) {
|
||||
console.log(`Error analyzing ${tool} results: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// Only analyze results from tools that ran successfully
|
||||
const results = {
|
||||
trivy: ${{ steps.verify-sarif.outputs.has_trivy }} ?
|
||||
analyzeSarif('trivy-results.sarif', 'trivy') : null,
|
||||
gitleaks: ${{ steps.verify-sarif.outputs.has_gitleaks }} ?
|
||||
analyzeSarif('gitleaks-report.sarif', 'gitleaks') : null
|
||||
};
|
||||
|
||||
// Aggregate results
|
||||
Object.entries(results).forEach(([tool, result]) => {
|
||||
if (result) {
|
||||
totalIssues += result.total;
|
||||
criticalIssues += result.critical;
|
||||
console.log(`${tool}: ${result.total} total, ${result.critical} critical issues`);
|
||||
}
|
||||
});
|
||||
|
||||
// Create summary
|
||||
const summary = `## Security Scan Summary
|
||||
|
||||
- Total Issues Found: ${totalIssues}
|
||||
- Critical Issues: ${criticalIssues}
|
||||
|
||||
### Tool Breakdown
|
||||
${Object.entries(results)
|
||||
.filter(([_, r]) => r)
|
||||
.map(([tool, r]) =>
|
||||
`- ${tool}: ${r.total} total, ${r.critical} critical`
|
||||
).join('\n')}
|
||||
|
||||
### Tools Run Status
|
||||
- Trivy: ${{ steps.verify-sarif.outputs.has_trivy }}
|
||||
- Gitleaks: ${{ steps.check-configs.outputs.run_gitleaks }}
|
||||
`;
|
||||
|
||||
// Set output
|
||||
core.setOutput('total_issues', totalIssues);
|
||||
core.setOutput('critical_issues', criticalIssues);
|
||||
|
||||
// Add job summary
|
||||
await core.summary
|
||||
.addRaw(summary)
|
||||
.write();
|
||||
|
||||
// Fail if critical issues found
|
||||
if (criticalIssues > 0) {
|
||||
core.setFailed(`Found ${criticalIssues} critical security issues`);
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(`Analysis failed: ${error.message}`);
|
||||
}
|
||||
gitleaks-license: ${{ secrets.GITLEAKS_LICENSE }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Notify on Critical Issues
|
||||
if: failure()
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
if: failure() && steps.security-scan.outputs.critical_issues != '0'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |-
|
||||
const { repo, owner } = context.repo;
|
||||
const critical = core.getInput('critical_issues');
|
||||
const critical = '${{ steps.security-scan.outputs.critical_issues }}';
|
||||
const total = '${{ steps.security-scan.outputs.total_issues }}';
|
||||
|
||||
const body = `🚨 Critical security issues found in GitHub Actions
|
||||
|
||||
${critical} critical security issues were found during the security scan.
|
||||
${critical} critical security issues (out of ${total} total) were found during the security scan.
|
||||
|
||||
### Scan Results
|
||||
- Trivy: ${{ steps.verify-sarif.outputs.has_trivy == 'true' && 'Completed' || 'Skipped/Failed' }}
|
||||
- Gitleaks: ${{ steps.check-configs.outputs.run_gitleaks == 'true' && 'Completed' || 'Skipped' }}
|
||||
- Actionlint: Completed
|
||||
- Trivy: ${{ steps.security-scan.outputs.has_trivy_results == 'true' && 'Completed' || 'Skipped/Failed' }}
|
||||
- Gitleaks: ${{ steps.security-scan.outputs.has_gitleaks_results == 'true' && 'Completed' || 'Skipped' }}
|
||||
|
||||
[View detailed scan results](https://github.com/${owner}/${repo}/actions/runs/${context.runId})
|
||||
|
||||
|
||||
4
.github/workflows/build-testing-image.yml
vendored
4
.github/workflows/build-testing-image.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/actions
|
||||
tags: |
|
||||
|
||||
1
.github/workflows/codeql-new.yml
vendored
1
.github/workflows/codeql-new.yml
vendored
@@ -42,4 +42,5 @@ jobs:
|
||||
with:
|
||||
language: ${{ matrix.language }}
|
||||
queries: security-and-quality
|
||||
config-file: .github/codeql/codeql-config.yml
|
||||
token: ${{ github.token }}
|
||||
|
||||
51
.github/workflows/codeql.yml
vendored
51
.github/workflows/codeql.yml
vendored
@@ -1,51 +0,0 @@
|
||||
---
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
|
||||
name: 'CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
schedule:
|
||||
- cron: '30 1 * * 0' # Run at 1:30 AM UTC every Sunday
|
||||
merge_group:
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language:
|
||||
- 'actions'
|
||||
- 'javascript'
|
||||
- 'python'
|
||||
|
||||
steps: # Add languages used in your actions
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
queries: security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
2
.github/workflows/issue-stats.yml
vendored
2
.github/workflows/issue-stats.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Run issue-metrics tool
|
||||
uses: github/issue-metrics@637a24e71b78bc10881e61972b19ea9ff736e14a # v3.25.2
|
||||
uses: github/issue-metrics@67526e7bd8100b870f10b1c120780a8375777b43 # v3.25.5
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SEARCH_QUERY: 'repo:ivuorinen/actions is:issue created:${{ env.last_month }} -reason:"not planned"'
|
||||
|
||||
39
.github/workflows/new-release.yml
vendored
39
.github/workflows/new-release.yml
vendored
@@ -22,25 +22,28 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
|
||||
- name: Create tag if necessary
|
||||
uses: fregante/daily-version-action@fb1a60b7c4daf1410cd755e360ebec3901e58588 # v2.1.3
|
||||
- name: Create daily release
|
||||
id: daily-version
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
- name: Create changelog text
|
||||
if: steps.daily-version.outputs.created
|
||||
id: changelog
|
||||
uses: loopwerk/tag-changelog@941366edb8920e2071eae0449031830984b9f26e # v1.3.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
config_file: .github/tag-changelog-config.js
|
||||
VERSION="v$(date '+%Y.%m.%d')"
|
||||
printf '%s\n' "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Create release
|
||||
if: steps.daily-version.outputs.created
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1.20.0
|
||||
# Check if release already exists
|
||||
if gh release view "$VERSION" >/dev/null 2>&1; then
|
||||
printf '%s\n' "created=false" >> "$GITHUB_OUTPUT"
|
||||
printf '%s\n' "Release $VERSION already exists - skipping"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create release with auto-generated changelog (also creates tag)
|
||||
gh release create "$VERSION" \
|
||||
--title "Release $VERSION" \
|
||||
--generate-notes \
|
||||
--target main
|
||||
|
||||
printf '%s\n' "created=true" >> "$GITHUB_OUTPUT"
|
||||
printf '%s\n' "Created release $VERSION"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag: ${{ steps.daily-version.outputs.version }}
|
||||
name: Release ${{ steps.daily-version.outputs.version }}
|
||||
body: ${{ steps.changelog.outputs.changes }}
|
||||
allowUpdates: true
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
114
.github/workflows/pr-lint.yml
vendored
114
.github/workflows/pr-lint.yml
vendored
@@ -24,17 +24,9 @@ on:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
# Apply linter fixes configuration
|
||||
APPLY_FIXES: all
|
||||
APPLY_FIXES_EVENT: pull_request
|
||||
APPLY_FIXES_MODE: commit
|
||||
|
||||
# Disable linters that do not work or conflict
|
||||
# MegaLinter configuration - these override the action's defaults
|
||||
DISABLE_LINTERS: REPOSITORY_DEVSKIM
|
||||
|
||||
# Additional settings
|
||||
VALIDATE_ALL_CODEBASE: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
|
||||
GITHUB_TOKEN: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Report configuration
|
||||
REPORT_OUTPUT_FOLDER: megalinter-reports
|
||||
@@ -72,111 +64,27 @@ jobs:
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: MegaLinter
|
||||
id: ml
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@62c799d895af9bcbca5eacfebca29d527f125a57 # v9.1.0
|
||||
|
||||
- name: Check MegaLinter Results
|
||||
id: check-results
|
||||
if: always()
|
||||
shell: sh
|
||||
run: |
|
||||
printf '%s\n' "status=success" >> "$GITHUB_OUTPUT"
|
||||
|
||||
if [ -f "${{ env.REPORT_OUTPUT_FOLDER }}/megalinter.log" ]; then
|
||||
if grep -q "ERROR\|CRITICAL" "${{ env.REPORT_OUTPUT_FOLDER }}/megalinter.log"; then
|
||||
echo "Linting errors found"
|
||||
printf '%s\n' "status=failure" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
echo "::warning::MegaLinter log file not found"
|
||||
fi
|
||||
|
||||
- name: Upload Reports
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
- name: Run MegaLinter
|
||||
id: pr-lint
|
||||
uses: ./pr-lint
|
||||
with:
|
||||
name: MegaLinter reports
|
||||
path: |
|
||||
megalinter-reports
|
||||
mega-linter.log
|
||||
retention-days: 30
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
username: fiximus
|
||||
email: github-bot@ivuorinen.net
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: always() && hashFiles('megalinter-reports/sarif/*.sarif')
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: megalinter-reports/sarif
|
||||
category: megalinter
|
||||
|
||||
- name: Prepare Git for Fixes
|
||||
if: steps.ml.outputs.has_updated_sources == 1
|
||||
shell: sh
|
||||
run: |
|
||||
sudo chown -Rc $UID .git/
|
||||
git config --global user.name "fiximus"
|
||||
git config --global user.email "github-bot@ivuorinen.net"
|
||||
|
||||
- name: Create Pull Request
|
||||
if: |
|
||||
steps.ml.outputs.has_updated_sources == 1 &&
|
||||
(env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) &&
|
||||
env.APPLY_FIXES_MODE == 'pull_request' &&
|
||||
(github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) &&
|
||||
!contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
commit-message: '[MegaLinter] Apply linters automatic fixes'
|
||||
title: '[MegaLinter] Apply linters automatic fixes'
|
||||
labels: bot
|
||||
branch: megalinter/fixes-${{ github.ref_name }}
|
||||
branch-suffix: timestamp
|
||||
delete-branch: true
|
||||
body: |
|
||||
## MegaLinter Fixes
|
||||
|
||||
MegaLinter has identified and fixed code style issues.
|
||||
|
||||
### 🔍 Changes Made
|
||||
- Automated code style fixes
|
||||
- Formatting improvements
|
||||
- Lint error corrections
|
||||
|
||||
### 📝 Notes
|
||||
- Please review the changes carefully
|
||||
- Run tests before merging
|
||||
- Verify formatting matches project standards
|
||||
|
||||
> Generated automatically by MegaLinter
|
||||
|
||||
- name: Commit Fixes
|
||||
if: |
|
||||
steps.ml.outputs.has_updated_sources == 1 &&
|
||||
(env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) &&
|
||||
env.APPLY_FIXES_MODE == 'commit' &&
|
||||
github.ref != 'refs/heads/main' &&
|
||||
(github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) &&
|
||||
!contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
with:
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }}
|
||||
commit_message: |
|
||||
style: apply MegaLinter fixes
|
||||
|
||||
[skip ci]
|
||||
commit_user_name: fiximus
|
||||
commit_user_email: github-bot@ivuorinen.net
|
||||
push_options: --force
|
||||
|
||||
- name: Create Status Check
|
||||
- name: Check Results
|
||||
if: always()
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const status = '${{ steps.check-results.outputs.status }}';
|
||||
const status = '${{ steps.pr-lint.outputs.validation_status }}';
|
||||
const conclusion = status === 'success' ? 'success' : 'failure';
|
||||
|
||||
const summary = `## MegaLinter Results
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -17,6 +17,6 @@ jobs:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
- uses: softprops/action-gh-release@5be0e66d93ac7ed76da52eca8bb058f665c3a5fe # v2.4.2
|
||||
- uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
|
||||
with:
|
||||
generate_release_notes: true
|
||||
|
||||
2
.github/workflows/security-suite.yml
vendored
2
.github/workflows/security-suite.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
# Record the base commit for diffing without checking it out
|
||||
# Keep PR head checked out so scanners analyze the new changes
|
||||
BASE_REF="refs/remotes/origin-base/${{ github.event.pull_request.base.ref }}"
|
||||
echo "BASE_REF=${BASE_REF}" >> $GITHUB_ENV
|
||||
echo "BASE_REF=${BASE_REF}" >> "$GITHUB_ENV"
|
||||
echo "Base ref: ${BASE_REF}"
|
||||
git log -1 --oneline "${BASE_REF}"
|
||||
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: 🚀 Run stale
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 30
|
||||
|
||||
14
.github/workflows/test-actions.yml
vendored
14
.github/workflows/test-actions.yml
vendored
@@ -73,14 +73,14 @@ jobs:
|
||||
if: always()
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
if: always() && hashFiles('_tests/reports/test-results.sarif') != ''
|
||||
with:
|
||||
sarif_file: _tests/reports/test-results.sarif
|
||||
category: github-actions-tests
|
||||
|
||||
- name: Upload unit test results
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always()
|
||||
with:
|
||||
name: unit-test-results
|
||||
@@ -125,15 +125,15 @@ jobs:
|
||||
shell: sh
|
||||
run: |
|
||||
if [ -d "_tests/reports/integration" ] && [ -n "$(find _tests/reports/integration -type f 2>/dev/null)" ]; then
|
||||
printf '%s\n' "reports-found=true" >> $GITHUB_OUTPUT
|
||||
printf '%s\n' "reports-found=true" >> "$GITHUB_OUTPUT"
|
||||
echo "Integration test reports found"
|
||||
else
|
||||
printf '%s\n' "reports-found=false" >> $GITHUB_OUTPUT
|
||||
printf '%s\n' "reports-found=false" >> "$GITHUB_OUTPUT"
|
||||
echo "No integration test reports found"
|
||||
fi
|
||||
|
||||
- name: Upload integration test results
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always() && steps.check-integration-reports.outputs.reports-found == 'true'
|
||||
with:
|
||||
name: integration-test-results
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
run: make test-coverage
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: coverage-report
|
||||
path: _tests/coverage/
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Download test results
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
pattern: '*-test-results'
|
||||
merge-multiple: true
|
||||
|
||||
8
.github/workflows/version-maintenance.yml
vendored
8
.github/workflows/version-maintenance.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.action-versioning.outputs.updated == 'true'
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: 'chore: update action references to ${{ steps.version.outputs.major }}'
|
||||
@@ -68,8 +68,6 @@ jobs:
|
||||
```bash
|
||||
make check-version-refs
|
||||
```
|
||||
|
||||
🤖 Auto-generated by version-maintenance workflow
|
||||
branch: automated/version-update-${{ steps.version.outputs.major }}
|
||||
delete-branch: true
|
||||
labels: |
|
||||
@@ -78,7 +76,7 @@ jobs:
|
||||
|
||||
- name: Check for Annual Bump
|
||||
if: steps.action-versioning.outputs.needs-annual-bump == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const currentYear = new Date().getFullYear();
|
||||
@@ -120,8 +118,6 @@ jobs:
|
||||
\`\`\`bash
|
||||
make check-version-refs
|
||||
\`\`\`
|
||||
|
||||
🤖 Auto-generated by version-maintenance workflow
|
||||
`,
|
||||
labels: ['maintenance', 'high-priority']
|
||||
});
|
||||
|
||||
@@ -14,7 +14,7 @@ repos:
|
||||
types: [markdown, python, yaml]
|
||||
files: ^(docs/.*|README\.md|CONTRIBUTING\.md|CHANGELOG\.md|.*\.py|.*\.ya?ml)$
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
rev: 0.9.11
|
||||
rev: 0.9.18
|
||||
hooks:
|
||||
- id: uv-lock
|
||||
- id: uv-sync
|
||||
@@ -44,7 +44,7 @@ repos:
|
||||
args: [--autofix, --no-sort-keys]
|
||||
|
||||
- repo: https://github.com/DavidAnson/markdownlint-cli2
|
||||
rev: v0.19.0
|
||||
rev: v0.20.0
|
||||
hooks:
|
||||
- id: markdownlint-cli2
|
||||
args: [--fix]
|
||||
@@ -55,7 +55,7 @@ repos:
|
||||
- id: yamllint
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.5
|
||||
rev: v0.14.10
|
||||
hooks:
|
||||
# Run the linter with auto-fix
|
||||
- id: ruff-check
|
||||
@@ -78,24 +78,19 @@ repos:
|
||||
exclude: '^_tests/.*\.sh$'
|
||||
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.8
|
||||
rev: v1.7.9
|
||||
hooks:
|
||||
- id: actionlint
|
||||
args: ['-shellcheck=']
|
||||
|
||||
- repo: https://github.com/renovatebot/pre-commit-hooks
|
||||
rev: 42.19.0
|
||||
hooks:
|
||||
- id: renovate-config-validator
|
||||
|
||||
- repo: https://github.com/bridgecrewio/checkov.git
|
||||
rev: '3.2.494'
|
||||
rev: '3.2.495'
|
||||
hooks:
|
||||
- id: checkov
|
||||
args:
|
||||
- '--quiet'
|
||||
|
||||
- repo: https://github.com/gitleaks/gitleaks
|
||||
rev: v8.29.0
|
||||
rev: v8.30.0
|
||||
hooks:
|
||||
- id: gitleaks
|
||||
|
||||
@@ -1 +1 @@
|
||||
3.14.0
|
||||
3.14.2
|
||||
|
||||
@@ -5,13 +5,14 @@
|
||||
- **Path**: /Users/ivuorinen/Code/ivuorinen/actions
|
||||
- **Branch**: main
|
||||
- **External Usage**: `ivuorinen/actions/<action-name>@main`
|
||||
- **Total Actions**: 43 self-contained actions
|
||||
- **Total Actions**: 44 self-contained actions
|
||||
- **Dogfooding**: Workflows use local actions (pr-lint, codeql-analysis, security-scan)
|
||||
|
||||
## Structure
|
||||
|
||||
```text
|
||||
/
|
||||
├── <action-dirs>/ # 43 self-contained actions
|
||||
├── <action-dirs>/ # 44 self-contained actions
|
||||
│ ├── action.yml # Action definition
|
||||
│ ├── README.md # Auto-generated
|
||||
│ └── CustomValidator.py # Optional validator
|
||||
@@ -25,12 +26,14 @@
|
||||
└── Makefile # Build automation
|
||||
```
|
||||
|
||||
## Action Categories (43 total)
|
||||
## Action Categories (44 total)
|
||||
|
||||
**Setup (7)**: node-setup, set-git-config, php-version-detect, python-version-detect, python-version-detect-v2, go-version-detect, dotnet-version-detect
|
||||
|
||||
**Linting (13)**: ansible-lint-fix, biome-check/fix, csharp-lint-check, eslint-check/fix, go-lint, pr-lint, pre-commit, prettier-check/fix, python-lint-fix, terraform-lint-fix
|
||||
|
||||
**Security (1)**: security-scan (actionlint, Gitleaks, Trivy scanning)
|
||||
|
||||
**Build (3)**: csharp-build, go-build, docker-build
|
||||
|
||||
**Publishing (5)**: npm-publish, docker-publish, docker-publish-gh, docker-publish-hub, csharp-publish
|
||||
@@ -85,3 +88,28 @@ make test # All tests (pytest + ShellSpec)
|
||||
- ✅ Convention-based validation
|
||||
- ✅ Test generation system
|
||||
- ✅ Full backward compatibility
|
||||
|
||||
## Dogfooding Strategy
|
||||
|
||||
The repository actively dogfoods its own actions in workflows:
|
||||
|
||||
**Fully Dogfooded Workflows**:
|
||||
|
||||
- **pr-lint.yml**: Uses `./pr-lint` (was 204 lines, now 112 lines - 45% reduction)
|
||||
- **action-security.yml**: Uses `./security-scan` (was 264 lines, now 82 lines - 69% reduction)
|
||||
- **codeql-new.yml**: Uses `./codeql-analysis`
|
||||
- **sync-labels.yml**: Uses `./sync-labels`
|
||||
- **version-maintenance.yml**: Uses `./action-versioning`
|
||||
|
||||
**Intentionally External**:
|
||||
|
||||
- **build-testing-image.yml**: Uses docker/\* actions directly (needs metadata extraction)
|
||||
- Core GitHub actions (checkout, upload-artifact, setup-\*) kept for standardization
|
||||
|
||||
**Benefits**:
|
||||
|
||||
- Early detection of action issues
|
||||
- Real-world testing of actions
|
||||
- Reduced workflow duplication
|
||||
- Improved maintainability
|
||||
- Better documentation through usage examples
|
||||
|
||||
19
README.md
19
README.md
@@ -22,9 +22,9 @@ Each action is fully self-contained and can be used independently in any GitHub
|
||||
|
||||
## 📚 Action Catalog
|
||||
|
||||
This repository contains **25 reusable GitHub Actions** for CI/CD automation.
|
||||
This repository contains **26 reusable GitHub Actions** for CI/CD automation.
|
||||
|
||||
### Quick Reference (25 Actions)
|
||||
### Quick Reference (26 Actions)
|
||||
|
||||
| Icon | Action | Category | Description | Key Features |
|
||||
|:----:|:-----------------------------------------------------|:-----------|:----------------------------------------------------------------|:---------------------------------------------|
|
||||
@@ -34,7 +34,7 @@ This repository contains **25 reusable GitHub Actions** for CI/CD automation.
|
||||
| 🛡️ | [`codeql-analysis`][codeql-analysis] | Repository | Run CodeQL security analysis for a single language with conf... | Auto-detection, Token auth, Outputs |
|
||||
| 🖼️ | [`compress-images`][compress-images] | Repository | Compress images on demand (workflow_dispatch), and at 11pm e... | Token auth, Outputs |
|
||||
| 📝 | [`csharp-build`][csharp-build] | Build | Builds and tests C# projects. | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`csharp-lint-check`][csharp-lint-check] | Linting | Runs linters like StyleCop or dotnet-format for C# code styl... | Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`csharp-lint-check`][csharp-lint-check] | Linting | Runs linters like StyleCop or dotnet-format for C# code styl... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`csharp-publish`][csharp-publish] | Publishing | Publishes a C# project to GitHub Packages. | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`docker-build`][docker-build] | Build | Builds a Docker image for multiple architectures with enhanc... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| ☁️ | [`docker-publish`][docker-publish] | Publishing | Simple wrapper to publish Docker images to GitHub Packages a... | Token auth, Outputs |
|
||||
@@ -49,6 +49,7 @@ This repository contains **25 reusable GitHub Actions** for CI/CD automation.
|
||||
| ✅ | [`prettier-lint`][prettier-lint] | Linting | Run Prettier in check or fix mode with advanced configuratio... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`python-lint-fix`][python-lint-fix] | Linting | Lints and fixes Python files, commits changes, and uploads S... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`release-monthly`][release-monthly] | Repository | Creates a release for the current month, incrementing patch ... | Token auth, Outputs |
|
||||
| 🛡️ | [`security-scan`][security-scan] | Security | Comprehensive security scanning for GitHub Actions including... | Caching, Token auth, Outputs |
|
||||
| 📦 | [`stale`][stale] | Repository | A GitHub Action to close stale issues and pull requests. | Token auth, Outputs |
|
||||
| 🏷️ | [`sync-labels`][sync-labels] | Repository | Sync labels from a YAML file to a GitHub repository | Token auth, Outputs |
|
||||
| 🖥️ | [`terraform-lint-fix`][terraform-lint-fix] | Linting | Lints and fixes Terraform files with advanced validation and... | Token auth, Outputs |
|
||||
@@ -74,7 +75,7 @@ This repository contains **25 reusable GitHub Actions** for CI/CD automation.
|
||||
|:-----------------------------------------------|:------------------------------------------------------|:---------------------------------------------|:---------------------------------------------|
|
||||
| 📦 [`ansible-lint-fix`][ansible-lint-fix] | Lints and fixes Ansible playbooks, commits changes... | Ansible, YAML | Caching, Token auth, Outputs |
|
||||
| ✅ [`biome-lint`][biome-lint] | Run Biome linter in check or fix mode | JavaScript, TypeScript, JSON | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 [`csharp-lint-check`][csharp-lint-check] | Runs linters like StyleCop or dotnet-format for C#... | C#, .NET | Auto-detection, Token auth, Outputs |
|
||||
| 📝 [`csharp-lint-check`][csharp-lint-check] | Runs linters like StyleCop or dotnet-format for C#... | C#, .NET | Caching, Auto-detection, Token auth, Outputs |
|
||||
| ✅ [`eslint-lint`][eslint-lint] | Run ESLint in check or fix mode with advanced conf... | JavaScript, TypeScript | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 [`go-lint`][go-lint] | Run golangci-lint with advanced configuration, cac... | Go | Caching, Token auth, Outputs |
|
||||
| ✅ [`pr-lint`][pr-lint] | Runs MegaLinter against pull requests | Conventional Commits | Caching, Auto-detection, Token auth, Outputs |
|
||||
@@ -115,6 +116,12 @@ This repository contains **25 reusable GitHub Actions** for CI/CD automation.
|
||||
| 📦 [`stale`][stale] | A GitHub Action to close stale issues and pull req... | GitHub Actions | Token auth, Outputs |
|
||||
| 🏷️ [`sync-labels`][sync-labels] | Sync labels from a YAML file to a GitHub repositor... | YAML, GitHub | Token auth, Outputs |
|
||||
|
||||
#### 🛡️ Security (1 action)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:-------------------------------------|:------------------------------------------------------|:----------|:-----------------------------|
|
||||
| 🛡️ [`security-scan`][security-scan] | Comprehensive security scanning for GitHub Actions... | - | Caching, Token auth, Outputs |
|
||||
|
||||
#### ✅ Validation (1 action)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
@@ -131,7 +138,7 @@ This repository contains **25 reusable GitHub Actions** for CI/CD automation.
|
||||
| [`codeql-analysis`][codeql-analysis] | - | ✅ | ✅ | ✅ |
|
||||
| [`compress-images`][compress-images] | - | - | ✅ | ✅ |
|
||||
| [`csharp-build`][csharp-build] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`csharp-lint-check`][csharp-lint-check] | - | ✅ | ✅ | ✅ |
|
||||
| [`csharp-lint-check`][csharp-lint-check] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`csharp-publish`][csharp-publish] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`docker-build`][docker-build] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`docker-publish`][docker-publish] | - | - | ✅ | ✅ |
|
||||
@@ -146,6 +153,7 @@ This repository contains **25 reusable GitHub Actions** for CI/CD automation.
|
||||
| [`prettier-lint`][prettier-lint] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`python-lint-fix`][python-lint-fix] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`release-monthly`][release-monthly] | - | - | ✅ | ✅ |
|
||||
| [`security-scan`][security-scan] | ✅ | - | ✅ | ✅ |
|
||||
| [`stale`][stale] | - | - | ✅ | ✅ |
|
||||
| [`sync-labels`][sync-labels] | - | - | ✅ | ✅ |
|
||||
| [`terraform-lint-fix`][terraform-lint-fix] | - | - | ✅ | ✅ |
|
||||
@@ -224,6 +232,7 @@ All actions can be used independently in your workflows:
|
||||
[prettier-lint]: prettier-lint/README.md
|
||||
[python-lint-fix]: python-lint-fix/README.md
|
||||
[release-monthly]: release-monthly/README.md
|
||||
[security-scan]: security-scan/README.md
|
||||
[stale]: stale/README.md
|
||||
[sync-labels]: sync-labels/README.md
|
||||
[terraform-lint-fix]: terraform-lint-fix/README.md
|
||||
|
||||
@@ -6,8 +6,8 @@ set -euo pipefail
|
||||
|
||||
# Source setup utilities
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
# shellcheck source=_tests/framework/setup.sh
|
||||
# shellcheck disable=SC1091
|
||||
source "${SCRIPT_DIR}/setup.sh"
|
||||
|
||||
# Action testing utilities
|
||||
@@ -57,6 +57,13 @@ get_action_name() {
|
||||
uv run "$script_dir/../shared/validation_core.py" --name "$action_file"
|
||||
}
|
||||
|
||||
get_action_runs_using() {
|
||||
local action_file="$1"
|
||||
local script_dir
|
||||
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
uv run "$script_dir/../shared/validation_core.py" --runs-using "$action_file"
|
||||
}
|
||||
|
||||
# Check if an input is required in an action.yml file
|
||||
is_input_required() {
|
||||
local action_file="$1"
|
||||
@@ -69,7 +76,7 @@ is_input_required() {
|
||||
required_status=$(uv run "$script_dir/../shared/validation_core.py" --property "$action_file" "$input_name" "required")
|
||||
|
||||
# Return 0 (success) if input is required, 1 (failure) if optional
|
||||
[[ $required_status == "required" ]]
|
||||
[[ "$required_status" == "required" ]]
|
||||
}
|
||||
|
||||
# Test input validation using Python validation module
|
||||
@@ -363,5 +370,5 @@ run_action_tests() {
|
||||
}
|
||||
|
||||
# Export all functions
|
||||
export -f validate_action_yml get_action_inputs get_action_outputs get_action_name is_input_required
|
||||
export -f validate_action_yml get_action_inputs get_action_outputs get_action_name get_action_runs_using is_input_required
|
||||
export -f test_input_validation test_action_outputs test_external_usage measure_action_time run_action_tests
|
||||
|
||||
@@ -21,6 +21,9 @@ import sys
|
||||
|
||||
import yaml # pylint: disable=import-error
|
||||
|
||||
# Default value for unknown action names (matches shared.validation_core.DEFAULT_UNKNOWN)
|
||||
_DEFAULT_UNKNOWN = "Unknown"
|
||||
|
||||
|
||||
class ActionValidator:
|
||||
"""Handles validation of GitHub Action inputs using Python regex engine."""
|
||||
@@ -86,7 +89,7 @@ class ActionValidator:
|
||||
return True, ""
|
||||
|
||||
# Check for environment variable reference (e.g., $GITHUB_TOKEN)
|
||||
if re.match(r"^\$[A-Za-z_][A-Za-z0-9_]*$", token):
|
||||
if re.match(r"^\$[A-Za-z_]\w*$", token, re.ASCII):
|
||||
return True, ""
|
||||
|
||||
# Check against all known token patterns
|
||||
@@ -330,16 +333,16 @@ def get_action_name(action_file: str) -> str:
|
||||
action_file: Path to the action.yml file
|
||||
|
||||
Returns:
|
||||
Action name or "Unknown" if not found
|
||||
Action name or _DEFAULT_UNKNOWN if not found
|
||||
"""
|
||||
try:
|
||||
with Path(action_file).open(encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f)
|
||||
|
||||
return data.get("name", "Unknown")
|
||||
return data.get("name", _DEFAULT_UNKNOWN)
|
||||
|
||||
except Exception:
|
||||
return "Unknown"
|
||||
return _DEFAULT_UNKNOWN
|
||||
|
||||
|
||||
def _show_usage():
|
||||
|
||||
@@ -25,6 +25,9 @@ from typing import Any
|
||||
|
||||
import yaml # pylint: disable=import-error
|
||||
|
||||
# Default value for unknown items (used by ActionFileParser)
|
||||
DEFAULT_UNKNOWN = "Unknown"
|
||||
|
||||
|
||||
class ValidationCore:
|
||||
"""Core validation functionality with standardized patterns and functions."""
|
||||
@@ -497,9 +500,9 @@ class ActionFileParser:
|
||||
"""Get the action name from an action.yml file."""
|
||||
try:
|
||||
data = ActionFileParser.load_action_file(action_file)
|
||||
return data.get("name", "Unknown")
|
||||
return data.get("name", DEFAULT_UNKNOWN)
|
||||
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
||||
return "Unknown"
|
||||
return DEFAULT_UNKNOWN
|
||||
|
||||
@staticmethod
|
||||
def get_action_inputs(action_file: str) -> list[str]:
|
||||
@@ -521,6 +524,16 @@ class ActionFileParser:
|
||||
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def get_action_runs_using(action_file: str) -> str:
|
||||
"""Get the runs.using value from an action.yml file."""
|
||||
try:
|
||||
data = ActionFileParser.load_action_file(action_file)
|
||||
runs = data.get("runs", {})
|
||||
return runs.get("using", "unknown")
|
||||
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
||||
return "unknown"
|
||||
|
||||
@staticmethod
|
||||
def _get_required_property(input_data: dict, property_name: str) -> str:
|
||||
"""Get the required/optional property."""
|
||||
@@ -787,6 +800,11 @@ Examples:
|
||||
mode_group.add_argument("--inputs", metavar="ACTION_FILE", help="List action inputs")
|
||||
mode_group.add_argument("--outputs", metavar="ACTION_FILE", help="List action outputs")
|
||||
mode_group.add_argument("--name", metavar="ACTION_FILE", help="Get action name")
|
||||
mode_group.add_argument(
|
||||
"--runs-using",
|
||||
metavar="ACTION_FILE",
|
||||
help="Get action runs.using value",
|
||||
)
|
||||
mode_group.add_argument(
|
||||
"--validate-yaml",
|
||||
metavar="YAML_FILE",
|
||||
@@ -834,6 +852,12 @@ def _handle_name_command(args):
|
||||
print(name)
|
||||
|
||||
|
||||
def _handle_runs_using_command(args):
|
||||
"""Handle the runs-using command."""
|
||||
runs_using = ActionFileParser.get_action_runs_using(args.runs_using)
|
||||
print(runs_using)
|
||||
|
||||
|
||||
def _handle_validate_yaml_command(args):
|
||||
"""Handle the validate-yaml command."""
|
||||
try:
|
||||
@@ -853,6 +877,7 @@ def _execute_command(args):
|
||||
"inputs": _handle_inputs_command,
|
||||
"outputs": _handle_outputs_command,
|
||||
"name": _handle_name_command,
|
||||
"runs_using": _handle_runs_using_command,
|
||||
"validate_yaml": _handle_validate_yaml_command,
|
||||
}
|
||||
|
||||
|
||||
116
_tests/unit/security-scan/validation.spec.sh
Executable file
116
_tests/unit/security-scan/validation.spec.sh
Executable file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env shellspec
|
||||
# Unit tests for security-scan action validation and logic
|
||||
# Framework is automatically loaded via spec_helper.sh
|
||||
|
||||
Describe "security-scan action"
|
||||
ACTION_DIR="security-scan"
|
||||
ACTION_FILE="$ACTION_DIR/action.yml"
|
||||
|
||||
Context "when validating token input"
|
||||
It "accepts valid GitHub token"
|
||||
When call validate_input_python "security-scan" "token" "ghp_123456789012345678901234567890123456"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects injection in token"
|
||||
When call validate_input_python "security-scan" "token" "token; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "accepts empty token (optional)"
|
||||
When call validate_input_python "security-scan" "token" ""
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating actionlint-enabled input"
|
||||
It "accepts true value"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "true"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts false value"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "false"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects non-boolean value"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "maybe"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when checking action.yml structure"
|
||||
It "has valid YAML syntax"
|
||||
When call validate_action_yml_quiet "$ACTION_FILE"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "has correct action name"
|
||||
name=$(get_action_name "$ACTION_FILE")
|
||||
When call echo "$name"
|
||||
The output should equal "Security Scan"
|
||||
End
|
||||
|
||||
It "defines all expected inputs"
|
||||
inputs=$(get_action_inputs "$ACTION_FILE")
|
||||
When call echo "$inputs"
|
||||
The output should include "gitleaks-license"
|
||||
The output should include "gitleaks-config"
|
||||
The output should include "trivy-severity"
|
||||
The output should include "trivy-scanners"
|
||||
The output should include "trivy-timeout"
|
||||
The output should include "actionlint-enabled"
|
||||
The output should include "token"
|
||||
End
|
||||
|
||||
It "defines all expected outputs"
|
||||
outputs=$(get_action_outputs "$ACTION_FILE")
|
||||
When call echo "$outputs"
|
||||
The output should include "has_trivy_results"
|
||||
The output should include "has_gitleaks_results"
|
||||
The output should include "total_issues"
|
||||
The output should include "critical_issues"
|
||||
End
|
||||
|
||||
It "uses composite run type"
|
||||
run_type=$(get_action_runs_using "$ACTION_FILE")
|
||||
When call echo "$run_type"
|
||||
The output should equal "composite"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating inputs per conventions"
|
||||
It "validates token against github_token convention"
|
||||
When call validate_input_python "security-scan" "token" "ghp_123456789012345678901234567890123456"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "validates actionlint-enabled as boolean"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "true"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid boolean for actionlint-enabled"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "1"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing optional inputs"
|
||||
It "accepts empty gitleaks-license"
|
||||
When call validate_input_python "security-scan" "gitleaks-license" ""
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts empty token"
|
||||
When call validate_input_python "security-scan" "token" ""
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts valid gitleaks-license value"
|
||||
When call validate_input_python "security-scan" "gitleaks-license" "license-key-123"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
End
|
||||
@@ -76,11 +76,7 @@ if ! git diff --quiet; then
|
||||
git commit -m "chore: bump major version from $OLD_VERSION to $NEW_VERSION
|
||||
|
||||
This commit updates all internal action references from $OLD_VERSION
|
||||
to $NEW_VERSION.
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.com/claude-code)
|
||||
|
||||
Co-Authored-By: Claude <noreply@anthropic.com>"
|
||||
to $NEW_VERSION."
|
||||
|
||||
printf '%b' "${GREEN}✅ Committed version bump${NC}\n"
|
||||
else
|
||||
|
||||
@@ -95,7 +95,7 @@ runs:
|
||||
find . -maxdepth 2 -name "action.yml" -path "*/action.yml" ! -path "./_*" ! -path "./.github/*" -exec grep -h "uses: ivuorinen/actions/" {} \; > "$temp_file"
|
||||
|
||||
while IFS= read -r line; do
|
||||
current_sha=$(echo "$line" | grep -oE '@[a-f0-9]{40}' | sed 's/@//')
|
||||
current_sha=$(printf '%s' "$line" | grep -oE '@[a-f0-9]{40}' | sed 's/@//')
|
||||
|
||||
if [ "$current_sha" != "$TAG_SHA" ]; then
|
||||
echo "Found outdated reference: $current_sha (should be $TAG_SHA)"
|
||||
@@ -153,11 +153,7 @@ runs:
|
||||
git commit -m "chore: update action references to $MAJOR_VERSION ($TAG_SHA)" \
|
||||
-m "" \
|
||||
-m "This commit updates all internal action references to point to the latest" \
|
||||
-m "$MAJOR_VERSION tag SHA." \
|
||||
-m "" \
|
||||
-m "🤖 Generated with [Claude Code](https://claude.com/claude-code)" \
|
||||
-m "" \
|
||||
-m "Co-Authored-By: Claude <noreply@anthropic.com>"
|
||||
-m "$MAJOR_VERSION tag SHA."
|
||||
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
printf '%s\n' "sha=$commit_sha" >> "$GITHUB_OUTPUT"
|
||||
|
||||
@@ -45,7 +45,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'ansible-lint-fix'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -75,15 +75,15 @@ runs:
|
||||
|
||||
- name: Setup Python
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.14'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install ansible-lint
|
||||
id: install-ansible-lint
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 5
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
@@ -122,7 +122,7 @@ runs:
|
||||
|
||||
- name: Commit Fixes
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: apply ansible lint fixes'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
@@ -130,6 +130,6 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ansible-lint.sarif
|
||||
|
||||
@@ -181,9 +181,9 @@ runs:
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '22'
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
@@ -218,7 +218,7 @@ runs:
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-biome-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
@@ -331,7 +331,7 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: inputs.mode == 'check' && always()
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: biome-report.sarif
|
||||
|
||||
@@ -365,7 +365,7 @@ runs:
|
||||
|
||||
- name: Commit and Push Fixes
|
||||
if: inputs.mode == 'fix' && success()
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: autofix Biome violations'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
|
||||
@@ -28,7 +28,8 @@ conventions:
|
||||
mode: mode_enum
|
||||
token: github_token
|
||||
username: username
|
||||
overrides: {}
|
||||
overrides:
|
||||
mode: mode_enum
|
||||
statistics:
|
||||
total_inputs: 6
|
||||
validated_inputs: 6
|
||||
|
||||
@@ -81,21 +81,13 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate threads
|
||||
if inputs.get("threads"):
|
||||
result = self.codeql_validator.validate_threads(inputs["threads"])
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.codeql_validator, "validate_threads", inputs["threads"]
|
||||
)
|
||||
|
||||
# Validate RAM
|
||||
if inputs.get("ram"):
|
||||
result = self.codeql_validator.validate_ram(inputs["ram"])
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(self.codeql_validator, "validate_ram", inputs["ram"])
|
||||
|
||||
# Validate debug mode
|
||||
if inputs.get("debug"):
|
||||
@@ -226,19 +218,10 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty queries first
|
||||
if not queries or not queries.strip():
|
||||
self.add_error("CodeQL queries cannot be empty")
|
||||
return False
|
||||
|
||||
# Use the CodeQL validator
|
||||
result = self.codeql_validator.validate_codeql_queries(queries)
|
||||
# Copy any errors from codeql validator
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
return result
|
||||
return self.validate_with(self.codeql_validator, "validate_codeql_queries", queries)
|
||||
|
||||
def validate_categories(self, categories: str) -> bool:
|
||||
"""Validate CodeQL categories.
|
||||
@@ -249,14 +232,7 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use the CodeQL validator
|
||||
result = self.codeql_validator.validate_category_format(categories)
|
||||
# Copy any errors from codeql validator
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
return result
|
||||
return self.validate_with(self.codeql_validator, "validate_category_format", categories)
|
||||
|
||||
def validate_category(self, category: str) -> bool:
|
||||
"""Validate CodeQL category (singular).
|
||||
@@ -267,14 +243,7 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use the CodeQL validator
|
||||
result = self.codeql_validator.validate_category_format(category)
|
||||
# Copy any errors from codeql validator
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
return result
|
||||
return self.validate_with(self.codeql_validator, "validate_category_format", category)
|
||||
|
||||
def validate_config_file(self, config_file: str) -> bool:
|
||||
"""Validate CodeQL configuration file path.
|
||||
@@ -287,21 +256,11 @@ class CustomValidator(BaseValidator):
|
||||
"""
|
||||
if not config_file or not config_file.strip():
|
||||
return True
|
||||
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(config_file):
|
||||
return True
|
||||
|
||||
# Use FileValidator for yaml file validation
|
||||
result = self.file_validator.validate_yaml_file(config_file, "config-file")
|
||||
|
||||
# Copy any errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.file_validator, "validate_yaml_file", config_file, "config-file"
|
||||
)
|
||||
|
||||
def validate_database(self, database: str) -> bool:
|
||||
"""Validate CodeQL database path.
|
||||
@@ -312,25 +271,13 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(database):
|
||||
return True
|
||||
|
||||
# Use FileValidator for path validation
|
||||
result = self.file_validator.validate_file_path(database, "database")
|
||||
|
||||
# Copy any errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
result = self.validate_with(self.file_validator, "validate_file_path", database, "database")
|
||||
# Database paths often contain the language
|
||||
# e.g., "codeql-database/javascript" or "/tmp/codeql_databases/python"
|
||||
# Just validate it's a reasonable path after basic validation
|
||||
if result and database.startswith("/tmp/"): # noqa: S108
|
||||
return True
|
||||
|
||||
return result
|
||||
|
||||
def validate_debug(self, debug: str) -> bool:
|
||||
@@ -342,20 +289,9 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(debug):
|
||||
return True
|
||||
|
||||
# Use BooleanValidator
|
||||
result = self.boolean_validator.validate_boolean(debug, "debug")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(self.boolean_validator, "validate_boolean", debug, "debug")
|
||||
|
||||
def validate_upload_database(self, upload: str) -> bool:
|
||||
"""Validate upload-database setting.
|
||||
@@ -366,20 +302,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(upload):
|
||||
return True
|
||||
|
||||
# Use BooleanValidator
|
||||
result = self.boolean_validator.validate_boolean(upload, "upload-database")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", upload, "upload-database"
|
||||
)
|
||||
|
||||
def validate_upload_sarif(self, upload: str) -> bool:
|
||||
"""Validate upload-sarif setting.
|
||||
@@ -390,20 +317,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(upload):
|
||||
return True
|
||||
|
||||
# Use BooleanValidator
|
||||
result = self.boolean_validator.validate_boolean(upload, "upload-sarif")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", upload, "upload-sarif"
|
||||
)
|
||||
|
||||
def validate_packs(self, packs: str) -> bool:
|
||||
"""Validate CodeQL packs.
|
||||
@@ -487,16 +405,9 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use the TokenValidator for proper validation
|
||||
result = self.token_validator.validate_github_token(token, required=False)
|
||||
|
||||
# Copy any errors from token validator
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.token_validator, "validate_github_token", token, required=False
|
||||
)
|
||||
|
||||
def validate_token(self, token: str) -> bool:
|
||||
"""Validate GitHub token.
|
||||
@@ -507,21 +418,12 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty token
|
||||
if not token or not token.strip():
|
||||
self.add_error("Input 'token' is missing or empty")
|
||||
return False
|
||||
|
||||
# Use the TokenValidator for proper validation
|
||||
result = self.token_validator.validate_github_token(token, required=True)
|
||||
|
||||
# Copy any errors from token validator
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.token_validator, "validate_github_token", token, required=True
|
||||
)
|
||||
|
||||
def validate_working_directory(self, directory: str) -> bool:
|
||||
"""Validate working directory path.
|
||||
@@ -532,20 +434,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(directory):
|
||||
return True
|
||||
|
||||
# Use FileValidator for path validation
|
||||
result = self.file_validator.validate_file_path(directory, "working-directory")
|
||||
|
||||
# Copy any errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.file_validator, "validate_file_path", directory, "working-directory"
|
||||
)
|
||||
|
||||
def validate_upload_results(self, value: str) -> bool:
|
||||
"""Validate upload-results boolean value.
|
||||
@@ -556,27 +449,14 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty
|
||||
if not value or not value.strip():
|
||||
self.add_error("upload-results cannot be empty")
|
||||
return False
|
||||
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(value):
|
||||
return True
|
||||
|
||||
# Check for uppercase TRUE/FALSE first
|
||||
if value in ["TRUE", "FALSE"]:
|
||||
self.add_error("Must be lowercase 'true' or 'false'")
|
||||
return False
|
||||
|
||||
# Use BooleanValidator for normal validation
|
||||
result = self.boolean_validator.validate_boolean(value, "upload-results")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", value, "upload-results"
|
||||
)
|
||||
|
||||
@@ -107,7 +107,7 @@ runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: codeql-analysis
|
||||
language: ${{ inputs.language }}
|
||||
@@ -186,7 +186,7 @@ runs:
|
||||
echo "Using build mode: $build_mode"
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: ${{ inputs.language }}
|
||||
queries: ${{ inputs.queries }}
|
||||
@@ -199,12 +199,12 @@ runs:
|
||||
threads: ${{ inputs.threads }}
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
if: ${{ steps.set-build-mode.outputs.build-mode == 'autobuild' }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
id: analysis
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: ${{ steps.set-category.outputs.category }}
|
||||
upload: ${{ inputs.upload-results }}
|
||||
|
||||
@@ -42,7 +42,7 @@ conventions:
|
||||
packs: codeql_packs
|
||||
queries: codeql_queries
|
||||
ram: numeric_range_256_32768
|
||||
skip-queries: codeql_queries
|
||||
skip-queries: boolean
|
||||
source-root: file_path
|
||||
threads: numeric_range_1_128
|
||||
token: github_token
|
||||
@@ -51,6 +51,7 @@ overrides:
|
||||
build-mode: codeql_build_mode
|
||||
category: category_format
|
||||
config: codeql_config
|
||||
language: codeql_language
|
||||
output: file_path
|
||||
packs: codeql_packs
|
||||
queries: codeql_queries
|
||||
|
||||
@@ -36,47 +36,35 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate optional inputs
|
||||
if inputs.get("image-quality"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["image-quality"], min_val=0, max_val=100
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["image-quality"],
|
||||
min_val=0,
|
||||
max_val=100,
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
if inputs.get("png-quality"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["png-quality"], min_val=0, max_val=100
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["png-quality"],
|
||||
min_val=0,
|
||||
max_val=100,
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
if inputs.get("directory"):
|
||||
result = self.file_validator.validate_file_path(inputs["directory"], "directory")
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.file_validator, "validate_file_path", inputs["directory"], "directory"
|
||||
)
|
||||
|
||||
if inputs.get("ignore-paths"):
|
||||
# Validate for injection
|
||||
result = self.security_validator.validate_no_injection(
|
||||
inputs["ignore-paths"], "ignore-paths"
|
||||
valid &= self.validate_with(
|
||||
self.security_validator,
|
||||
"validate_no_injection",
|
||||
inputs["ignore-paths"],
|
||||
"ignore-paths",
|
||||
)
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -163,7 +163,7 @@ runs:
|
||||
|
||||
- name: Create New Pull Request If Needed
|
||||
if: steps.calibre.outputs.markdown != ''
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
token: ${{ inputs.token }}
|
||||
title: 'chore: compress images'
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for compress-images action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 86% (6/7 inputs)
|
||||
# Coverage: 100% (7/7 inputs)
|
||||
#
|
||||
# This file defines validation rules for the compress-images GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -24,6 +24,7 @@ optional_inputs:
|
||||
- working-directory
|
||||
conventions:
|
||||
email: email
|
||||
ignore-paths: path_list
|
||||
image-quality: numeric_range_0_100
|
||||
png-quality: numeric_range_0_100
|
||||
token: github_token
|
||||
@@ -32,10 +33,10 @@ conventions:
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 7
|
||||
validated_inputs: 6
|
||||
validated_inputs: 7
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 86
|
||||
validation_coverage: 86
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
|
||||
@@ -148,14 +148,14 @@ runs:
|
||||
echo "Final detected .NET version: $detected_version" >&2
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@d4c94342e560b34958eacfc5d055d21461ed1c5d # v5.0.0
|
||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
||||
with:
|
||||
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
||||
cache: true
|
||||
cache-dependency-path: '**/packages.lock.json'
|
||||
|
||||
- name: Restore Dependencies
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
@@ -203,7 +203,7 @@ runs:
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: csharp-test-results
|
||||
path: |
|
||||
|
||||
@@ -164,7 +164,7 @@ runs:
|
||||
echo "Final detected .NET version: $detected_version" >&2
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@d4c94342e560b34958eacfc5d055d21461ed1c5d # v5.0.0
|
||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
||||
with:
|
||||
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
||||
cache: true
|
||||
@@ -206,6 +206,6 @@ runs:
|
||||
fi
|
||||
|
||||
- name: Upload SARIF Report
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: dotnet-format.sarif
|
||||
|
||||
@@ -55,7 +55,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'csharp-publish'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -162,14 +162,14 @@ runs:
|
||||
echo "Final detected .NET version: $detected_version" >&2
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@d4c94342e560b34958eacfc5d055d21461ed1c5d # v5.0.0
|
||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
||||
with:
|
||||
dotnet-version: ${{ inputs.dotnet-version || steps.detect-dotnet-version.outputs.detected-version }}
|
||||
cache: true
|
||||
cache-dependency-path: '**/packages.lock.json'
|
||||
|
||||
- name: Restore Dependencies
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
|
||||
@@ -65,35 +65,24 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate image name
|
||||
if inputs.get("image-name"):
|
||||
result = self.docker_validator.validate_image_name(inputs["image-name"], "image-name")
|
||||
# Propagate errors from docker validator
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator, "validate_image_name", inputs["image-name"], "image-name"
|
||||
)
|
||||
|
||||
# Validate tag (singular - as per action.yml)
|
||||
if inputs.get("tag"):
|
||||
result = self.docker_validator.validate_docker_tag(inputs["tag"], "tag")
|
||||
# Propagate errors
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator, "validate_docker_tag", inputs["tag"], "tag"
|
||||
)
|
||||
|
||||
# Validate architectures/platforms
|
||||
if inputs.get("architectures"):
|
||||
result = self.docker_validator.validate_architectures(
|
||||
inputs["architectures"], "architectures"
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator,
|
||||
"validate_architectures",
|
||||
inputs["architectures"],
|
||||
"architectures",
|
||||
)
|
||||
# Propagate errors
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate build arguments
|
||||
if inputs.get("build-args"):
|
||||
@@ -101,12 +90,9 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate push flag
|
||||
if inputs.get("push"):
|
||||
result = self.boolean_validator.validate_optional_boolean(inputs["push"], "push")
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator, "validate_optional_boolean", inputs["push"], "push"
|
||||
)
|
||||
|
||||
# Validate cache settings
|
||||
if inputs.get("cache-from"):
|
||||
@@ -117,22 +103,35 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate cache-mode
|
||||
if inputs.get("cache-mode"):
|
||||
valid &= self.validate_cache_mode(inputs["cache-mode"])
|
||||
valid &= self.validate_enum(
|
||||
inputs["cache-mode"],
|
||||
"cache-mode",
|
||||
["min", "max", "inline"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
# Validate buildx-version
|
||||
if inputs.get("buildx-version"):
|
||||
valid &= self.validate_buildx_version(inputs["buildx-version"])
|
||||
version = inputs["buildx-version"]
|
||||
# Allow 'latest' as special value
|
||||
if version != "latest" and not self.is_github_expression(version):
|
||||
valid &= self.validate_with(
|
||||
self.version_validator,
|
||||
"validate_semantic_version",
|
||||
version,
|
||||
"buildx-version",
|
||||
)
|
||||
|
||||
# Validate parallel-builds
|
||||
if inputs.get("parallel-builds"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["parallel-builds"], min_val=0, max_val=16, name="parallel-builds"
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["parallel-builds"],
|
||||
min_val=0,
|
||||
max_val=16,
|
||||
name="parallel-builds",
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate boolean flags
|
||||
for bool_input in [
|
||||
@@ -144,29 +143,32 @@ class CustomValidator(BaseValidator):
|
||||
"auto-detect-platforms",
|
||||
]:
|
||||
if inputs.get(bool_input):
|
||||
result = self.boolean_validator.validate_optional_boolean(
|
||||
inputs[bool_input], bool_input
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator,
|
||||
"validate_optional_boolean",
|
||||
inputs[bool_input],
|
||||
bool_input,
|
||||
)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate sbom-format
|
||||
if inputs.get("sbom-format"):
|
||||
valid &= self.validate_sbom_format(inputs["sbom-format"])
|
||||
valid &= self.validate_enum(
|
||||
inputs["sbom-format"],
|
||||
"sbom-format",
|
||||
["spdx-json", "cyclonedx-json", "syft-json"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
# Validate max-retries
|
||||
if inputs.get("max-retries"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["max-retries"], min_val=0, max_val=10, name="max-retries"
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["max-retries"],
|
||||
min_val=0,
|
||||
max_val=10,
|
||||
name="max-retries",
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
return valid
|
||||
|
||||
@@ -209,19 +211,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(dockerfile):
|
||||
return True
|
||||
|
||||
# Use file validator for path validation
|
||||
result = self.file_validator.validate_file_path(dockerfile, "dockerfile")
|
||||
# Propagate errors
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.file_validator, "validate_file_path", dockerfile, "dockerfile"
|
||||
)
|
||||
|
||||
def validate_context(self, context: str) -> bool:
|
||||
"""Validate build context path.
|
||||
@@ -245,10 +239,9 @@ class CustomValidator(BaseValidator):
|
||||
# We allow path traversal for context as Docker needs to access parent directories
|
||||
# Only check for command injection patterns like ; | ` $()
|
||||
dangerous_chars = [";", "|", "`", "$(", "&&", "||"]
|
||||
for char in dangerous_chars:
|
||||
if char in context:
|
||||
self.add_error(f"Command injection detected in context: {context}")
|
||||
return False
|
||||
if any(char in context for char in dangerous_chars):
|
||||
self.add_error(f"Command injection detected in context: {context}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -261,15 +254,9 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use docker validator for architectures
|
||||
result = self.docker_validator.validate_architectures(platforms, "platforms")
|
||||
# Propagate errors
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.docker_validator, "validate_architectures", platforms, "platforms"
|
||||
)
|
||||
|
||||
def validate_build_args(self, build_args: str) -> bool:
|
||||
"""Validate build arguments.
|
||||
@@ -353,78 +340,3 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Check for security issues
|
||||
return self.validate_security_patterns(cache_to, "cache-to")
|
||||
|
||||
def validate_cache_mode(self, cache_mode: str) -> bool:
|
||||
"""Validate cache mode.
|
||||
|
||||
Args:
|
||||
cache_mode: Cache mode value
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(cache_mode):
|
||||
return True
|
||||
|
||||
# Valid cache modes
|
||||
valid_modes = ["min", "max", "inline"]
|
||||
if cache_mode.lower() not in valid_modes:
|
||||
self.add_error(f"Invalid cache-mode: {cache_mode}. Must be one of: min, max, inline")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_buildx_version(self, version: str) -> bool:
|
||||
"""Validate buildx version.
|
||||
|
||||
Args:
|
||||
version: Buildx version
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(version):
|
||||
return True
|
||||
|
||||
# Allow 'latest'
|
||||
if version == "latest":
|
||||
return True
|
||||
|
||||
# Check for security issues (semicolon injection etc)
|
||||
if not self.validate_security_patterns(version, "buildx-version"):
|
||||
return False
|
||||
|
||||
# Basic version format validation (e.g., 0.12.0, v0.12.0)
|
||||
import re
|
||||
|
||||
if not re.match(r"^v?\d+\.\d+(\.\d+)?$", version):
|
||||
self.add_error(f"Invalid buildx-version format: {version}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_sbom_format(self, sbom_format: str) -> bool:
|
||||
"""Validate SBOM format.
|
||||
|
||||
Args:
|
||||
sbom_format: SBOM format value
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(sbom_format):
|
||||
return True
|
||||
|
||||
# Valid SBOM formats
|
||||
valid_formats = ["spdx-json", "cyclonedx-json", "syft-json"]
|
||||
if sbom_format.lower() not in valid_formats:
|
||||
self.add_error(
|
||||
f"Invalid sbom-format: {sbom_format}. "
|
||||
"Must be one of: spdx-json, cyclonedx-json, syft-json"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -147,7 +147,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'docker-build'
|
||||
image-name: ${{ inputs.image-name }}
|
||||
@@ -175,7 +175,7 @@ runs:
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
with:
|
||||
version: ${{ inputs.buildx-version }}
|
||||
platforms: ${{ inputs.architectures }}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for docker-build action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 63% (17/27 inputs)
|
||||
# Coverage: 100% (27/27 inputs)
|
||||
#
|
||||
# This file defines validation rules for the docker-build GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -45,17 +45,27 @@ optional_inputs:
|
||||
conventions:
|
||||
architectures: docker_architectures
|
||||
auto-detect-platforms: docker_architectures
|
||||
build-args: key_value_list
|
||||
build-contexts: key_value_list
|
||||
buildkit-version: semantic_version
|
||||
buildx-version: semantic_version
|
||||
cache-mode: boolean
|
||||
cache-export: cache_config
|
||||
cache-from: cache_config
|
||||
cache-import: cache_config
|
||||
cache-mode: cache_mode
|
||||
context: file_path
|
||||
dockerfile: file_path
|
||||
dry-run: boolean
|
||||
image-name: docker_image_name
|
||||
max-retries: numeric_range_1_10
|
||||
network: network_mode
|
||||
parallel-builds: numeric_range_0_16
|
||||
platform-build-args: json_format
|
||||
platform-fallback: docker_architectures
|
||||
sbom-format: report_format
|
||||
push: boolean
|
||||
sbom-format: sbom_format
|
||||
scan-image: boolean
|
||||
secrets: key_value_list
|
||||
sign-image: boolean
|
||||
tag: docker_tag
|
||||
token: github_token
|
||||
@@ -65,12 +75,12 @@ overrides:
|
||||
sbom-format: sbom_format
|
||||
statistics:
|
||||
total_inputs: 27
|
||||
validated_inputs: 17
|
||||
validated_inputs: 27
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 63
|
||||
validation_coverage: 63
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: true
|
||||
has_token_validation: true
|
||||
|
||||
@@ -11,6 +11,7 @@ This validator handles Docker publish-specific validation including:
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Add validate-inputs directory to path to import validators
|
||||
@@ -58,12 +59,9 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate platforms
|
||||
if inputs.get("platforms"):
|
||||
result = self.docker_validator.validate_architectures(inputs["platforms"], "platforms")
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator, "validate_architectures", inputs["platforms"], "platforms"
|
||||
)
|
||||
|
||||
# Validate boolean flags
|
||||
for bool_input in [
|
||||
@@ -74,18 +72,18 @@ class CustomValidator(BaseValidator):
|
||||
"verbose",
|
||||
]:
|
||||
if inputs.get(bool_input):
|
||||
result = self.boolean_validator.validate_optional_boolean(
|
||||
inputs[bool_input], bool_input
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator,
|
||||
"validate_optional_boolean",
|
||||
inputs[bool_input],
|
||||
bool_input,
|
||||
)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate cache-mode
|
||||
if inputs.get("cache-mode"):
|
||||
valid &= self.validate_cache_mode(inputs["cache-mode"])
|
||||
valid &= self.validate_enum(
|
||||
inputs["cache-mode"], "cache-mode", ["min", "max", "inline"]
|
||||
)
|
||||
|
||||
# Validate buildx-version
|
||||
if inputs.get("buildx-version"):
|
||||
@@ -96,24 +94,18 @@ class CustomValidator(BaseValidator):
|
||||
valid &= self.validate_username(inputs["dockerhub-username"])
|
||||
|
||||
if inputs.get("dockerhub-password"):
|
||||
# Use token validator for password/token
|
||||
result = self.token_validator.validate_docker_token(
|
||||
inputs["dockerhub-password"], "dockerhub-password"
|
||||
valid &= self.validate_with(
|
||||
self.token_validator,
|
||||
"validate_docker_token",
|
||||
inputs["dockerhub-password"],
|
||||
"dockerhub-password",
|
||||
)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate github-token
|
||||
if inputs.get("github-token"):
|
||||
result = self.token_validator.validate_github_token(inputs["github-token"])
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.token_validator, "validate_github_token", inputs["github-token"]
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
@@ -156,40 +148,7 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(registry):
|
||||
return True
|
||||
|
||||
# Valid registry values according to action description
|
||||
valid_registries = ["dockerhub", "github", "both"]
|
||||
if registry.lower() not in valid_registries:
|
||||
self.add_error(
|
||||
f"Invalid registry: {registry}. Must be one of: dockerhub, github, or both"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_cache_mode(self, cache_mode: str) -> bool:
|
||||
"""Validate cache mode.
|
||||
|
||||
Args:
|
||||
cache_mode: Cache mode value
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(cache_mode):
|
||||
return True
|
||||
|
||||
# Valid cache modes
|
||||
valid_modes = ["min", "max", "inline"]
|
||||
if cache_mode.lower() not in valid_modes:
|
||||
self.add_error(f"Invalid cache-mode: {cache_mode}. Must be one of: min, max, inline")
|
||||
return False
|
||||
|
||||
return True
|
||||
return self.validate_enum(registry, "registry", ["dockerhub", "github", "both"])
|
||||
|
||||
def validate_buildx_version(self, version: str) -> bool:
|
||||
"""Validate buildx version.
|
||||
@@ -213,8 +172,6 @@ class CustomValidator(BaseValidator):
|
||||
return False
|
||||
|
||||
# Basic version format validation
|
||||
import re
|
||||
|
||||
if not re.match(r"^v?\d+\.\d+(\.\d+)?$", version):
|
||||
self.add_error(f"Invalid buildx-version format: {version}")
|
||||
return False
|
||||
@@ -244,8 +201,6 @@ class CustomValidator(BaseValidator):
|
||||
return False
|
||||
|
||||
# Docker Hub username rules: lowercase letters, digits, periods, hyphens, underscores
|
||||
import re
|
||||
|
||||
if not re.match(r"^[a-z0-9._-]+$", username.lower()):
|
||||
self.add_error(f"Invalid Docker Hub username format: {username}")
|
||||
return False
|
||||
|
||||
@@ -112,7 +112,7 @@ runs:
|
||||
dockerhub|github|both)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid registry value. Must be 'dockerhub', 'github', or 'both'"
|
||||
printf '%s\n' "::error::Invalid registry value. Must be 'dockerhub', 'github', or 'both'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -120,7 +120,7 @@ runs:
|
||||
# Validate Docker Hub credentials if needed
|
||||
if [ "$INPUT_REGISTRY" = "dockerhub" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
||||
if [ -z "$INPUT_DOCKERHUB_USERNAME" ] || [ -z "$INPUT_DOCKERHUB_TOKEN" ]; then
|
||||
echo "::error::Docker Hub username and token are required when publishing to Docker Hub"
|
||||
printf '%s\n' "::error::Docker Hub username and token are required when publishing to Docker Hub"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
@@ -129,49 +129,80 @@ runs:
|
||||
if [ "$INPUT_REGISTRY" = "github" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
||||
token="${INPUT_TOKEN:-${GITHUB_TOKEN:-}}"
|
||||
if [ -z "$token" ]; then
|
||||
echo "::error::GitHub token is required when publishing to GitHub Packages"
|
||||
printf '%s\n' "::error::GitHub token is required when publishing to GitHub Packages"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate context input for security
|
||||
INPUT_CONTEXT="${INPUT_CONTEXT:-.}"
|
||||
|
||||
case "$INPUT_CONTEXT" in
|
||||
.|./*|*/*)
|
||||
# Relative paths are allowed
|
||||
# Check for path traversal attempts
|
||||
case "$INPUT_CONTEXT" in
|
||||
*/../*|../*|*/..)
|
||||
printf '%s\n' "::error::Context path contains path traversal: '$INPUT_CONTEXT'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
/*)
|
||||
echo "::error::Context cannot be an absolute path: '$INPUT_CONTEXT'"
|
||||
echo "::error::Use relative paths (e.g., '.', './app') to prevent code injection"
|
||||
printf '%s\n' "::error::Context cannot be an absolute path: '$INPUT_CONTEXT'"
|
||||
printf '%s\n' "::error::Use relative paths (e.g., '.', './app')"
|
||||
exit 1
|
||||
;;
|
||||
*://*)
|
||||
echo "::warning::Context is a remote URL: '$INPUT_CONTEXT'"
|
||||
echo "::warning::Ensure this URL is from a trusted source to prevent code injection"
|
||||
git://*|git@*|https://*.git|https://github.com/*|https://gitlab.com/*)
|
||||
# Allow trusted git repository URLs
|
||||
printf '%s\n' "::notice::Using git repository URL for context"
|
||||
;;
|
||||
http://*|https://*)
|
||||
printf '%s\n' "::error::Context cannot be an arbitrary HTTP URL: '$INPUT_CONTEXT'"
|
||||
printf '%s\n' "::error::Only git repository URLs are allowed for remote contexts"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
printf '%s\n' "::error::Invalid context format: '$INPUT_CONTEXT'"
|
||||
printf '%s\n' "::error::Must be a relative path or git repository URL"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate dockerfile input for security
|
||||
INPUT_DOCKERFILE="${INPUT_DOCKERFILE:-Dockerfile}"
|
||||
|
||||
case "$INPUT_DOCKERFILE" in
|
||||
Dockerfile|*/Dockerfile|*.dockerfile|*/*.dockerfile)
|
||||
# Common dockerfile patterns are allowed
|
||||
# Check for path traversal attempts
|
||||
case "$INPUT_DOCKERFILE" in
|
||||
*/../*|../*|*/..)
|
||||
printf '%s\n' "::error::Dockerfile path contains path traversal: '$INPUT_DOCKERFILE'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
/*)
|
||||
echo "::error::Dockerfile path cannot be absolute: '$INPUT_DOCKERFILE'"
|
||||
echo "::error::Use relative paths (e.g., 'Dockerfile', './docker/Dockerfile')"
|
||||
printf '%s\n' "::error::Dockerfile path cannot be absolute: '$INPUT_DOCKERFILE'"
|
||||
printf '%s\n' "::error::Use relative paths (e.g., 'Dockerfile', './docker/Dockerfile')"
|
||||
exit 1
|
||||
;;
|
||||
*://*)
|
||||
echo "::error::Dockerfile path cannot be a URL: '$INPUT_DOCKERFILE'"
|
||||
printf '%s\n' "::error::Dockerfile path cannot be a URL: '$INPUT_DOCKERFILE'"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
printf '%s\n' "::error::Invalid Dockerfile format: '$INPUT_DOCKERFILE'"
|
||||
printf '%s\n' "::error::Must be 'Dockerfile', '*/Dockerfile', '*.dockerfile', or '*/*.dockerfile'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Input validation completed successfully"
|
||||
printf '%s\n' "Input validation completed successfully"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Determine Image Names and Tags
|
||||
id: meta
|
||||
@@ -223,25 +254,25 @@ runs:
|
||||
# Output results
|
||||
printf 'image-name=%s\n' "$base_name" >> "$GITHUB_OUTPUT"
|
||||
{
|
||||
echo 'tags<<EOF'
|
||||
echo "$tags"
|
||||
echo 'EOF'
|
||||
printf '%s\n' 'tags<<EOF'
|
||||
printf '%s\n' "$tags"
|
||||
printf '%s\n' 'EOF'
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "Image name: $base_name"
|
||||
echo "Tags:"
|
||||
echo "$tags"
|
||||
printf 'Image name: %s\n' "$base_name"
|
||||
printf '%s\n' "Tags:"
|
||||
printf '%s\n' "$tags"
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: inputs.registry == 'dockerhub' || inputs.registry == 'both'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-username }}
|
||||
password: ${{ inputs.dockerhub-token }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: inputs.registry == 'github' || inputs.registry == 'both'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -249,7 +280,7 @@ runs:
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
id: build
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ inputs.context }}
|
||||
file: ${{ inputs.dockerfile }}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for docker-publish action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 73% (8/11 inputs)
|
||||
# Coverage: 100% (11/11 inputs)
|
||||
#
|
||||
# This file defines validation rules for the docker-publish GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -27,25 +27,27 @@ optional_inputs:
|
||||
- tags
|
||||
- token
|
||||
conventions:
|
||||
build-args: key_value_list
|
||||
context: file_path
|
||||
dockerfile: file_path
|
||||
dockerhub-token: github_token
|
||||
dockerhub-username: username
|
||||
image-name: docker_image_name
|
||||
platforms: docker_architectures
|
||||
registry: registry
|
||||
push: boolean
|
||||
registry: registry_enum
|
||||
tags: docker_tag
|
||||
token: github_token
|
||||
overrides:
|
||||
platforms: null
|
||||
registry: registry_enum
|
||||
statistics:
|
||||
total_inputs: 11
|
||||
validated_inputs: 8
|
||||
skipped_inputs: 1
|
||||
coverage_percentage: 73
|
||||
validation_coverage: 73
|
||||
validated_inputs: 11
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: false
|
||||
has_token_validation: true
|
||||
|
||||
@@ -288,9 +288,9 @@ runs:
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '22'
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
@@ -325,7 +325,7 @@ runs:
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-eslint-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
@@ -457,7 +457,7 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: inputs.mode == 'check' && inputs.report-format == 'sarif' && always()
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ${{ inputs.working-directory }}/eslint-results.sarif
|
||||
|
||||
@@ -508,7 +508,7 @@ runs:
|
||||
|
||||
- name: Commit and Push Fixes
|
||||
if: inputs.mode == 'fix' && success()
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: autofix ESLint violations'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
|
||||
@@ -44,7 +44,8 @@ conventions:
|
||||
token: github_token
|
||||
username: username
|
||||
working-directory: file_path
|
||||
overrides: {}
|
||||
overrides:
|
||||
mode: mode_enum
|
||||
statistics:
|
||||
total_inputs: 14
|
||||
validated_inputs: 14
|
||||
|
||||
@@ -46,6 +46,9 @@ const CATEGORIES = {
|
||||
'compress-images': 'Repository',
|
||||
'codeql-analysis': 'Repository',
|
||||
|
||||
// Security
|
||||
'security-scan': 'Security',
|
||||
|
||||
// Validation
|
||||
'validate-inputs': 'Validation',
|
||||
};
|
||||
@@ -120,6 +123,7 @@ const CATEGORY_ICONS = {
|
||||
Build: '🏗️',
|
||||
Publishing: '🚀',
|
||||
Repository: '📦',
|
||||
Security: '🛡️',
|
||||
Validation: '✅',
|
||||
};
|
||||
|
||||
@@ -232,7 +236,7 @@ function generateCategoryTables(actions) {
|
||||
let output = '';
|
||||
|
||||
// Sort categories by priority
|
||||
const categoryOrder = ['Setup', 'Utilities', 'Linting', 'Testing', 'Build', 'Publishing', 'Repository', 'Validation'];
|
||||
const categoryOrder = ['Setup', 'Utilities', 'Linting', 'Testing', 'Build', 'Publishing', 'Repository', 'Security', 'Validation'];
|
||||
|
||||
for (const category of categoryOrder) {
|
||||
if (!categories[category]) continue;
|
||||
|
||||
@@ -159,13 +159,13 @@ runs:
|
||||
echo "Final detected Go version: $detected_version" >&2
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
go-version: ${{ steps.detect-go-version.outputs.detected-version }}
|
||||
cache: true
|
||||
|
||||
- name: Download Dependencies
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
@@ -253,7 +253,7 @@ runs:
|
||||
|
||||
- name: Upload Build Artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: go-build-artifacts
|
||||
path: |
|
||||
|
||||
@@ -37,105 +37,78 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate working-directory if provided
|
||||
if inputs.get("working-directory"):
|
||||
result = self.file_validator.validate_file_path(
|
||||
inputs["working-directory"], "working-directory"
|
||||
valid &= self.validate_with(
|
||||
self.file_validator,
|
||||
"validate_file_path",
|
||||
inputs["working-directory"],
|
||||
"working-directory",
|
||||
)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate golangci-lint-version if provided
|
||||
if inputs.get("golangci-lint-version"):
|
||||
value = inputs["golangci-lint-version"]
|
||||
# Accept 'latest' or version format
|
||||
if value != "latest" and not self.is_github_expression(value):
|
||||
result = self.version_validator.validate_semantic_version(
|
||||
value, "golangci-lint-version"
|
||||
valid &= self.validate_with(
|
||||
self.version_validator,
|
||||
"validate_semantic_version",
|
||||
value,
|
||||
"golangci-lint-version",
|
||||
)
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.version_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate go-version if provided
|
||||
if inputs.get("go-version"):
|
||||
value = inputs["go-version"]
|
||||
# Accept 'stable', 'oldstable' or version format
|
||||
if value not in ["stable", "oldstable"] and not self.is_github_expression(value):
|
||||
result = self.version_validator.validate_go_version(value, "go-version")
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.version_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.version_validator, "validate_go_version", value, "go-version"
|
||||
)
|
||||
|
||||
# Validate config-file if provided
|
||||
if inputs.get("config-file"):
|
||||
result = self.file_validator.validate_file_path(inputs["config-file"], "config-file")
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.file_validator, "validate_file_path", inputs["config-file"], "config-file"
|
||||
)
|
||||
|
||||
# Validate timeout if provided
|
||||
if inputs.get("timeout"):
|
||||
value = inputs["timeout"]
|
||||
# Validate timeout format (e.g., 5m, 1h, 30s)
|
||||
if not self.is_github_expression(value):
|
||||
timeout_pattern = r"^\d+[smh]$"
|
||||
if not re.match(timeout_pattern, value):
|
||||
self.add_error(
|
||||
f"Invalid timeout format: {value}. Expected format like '5m', '1h', '30s'"
|
||||
)
|
||||
valid = False
|
||||
if not self.is_github_expression(value) and not re.match(r"^\d+[smh]$", value):
|
||||
self.add_error(
|
||||
f"Invalid timeout format: {value}. Expected format like '5m', '1h', '30s'"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Validate boolean inputs
|
||||
for field in ["cache", "fail-on-error", "only-new-issues", "disable-all"]:
|
||||
if inputs.get(field):
|
||||
result = self.boolean_validator.validate_boolean(inputs[field], field)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", inputs[field], field
|
||||
)
|
||||
|
||||
# Validate report-format
|
||||
if inputs.get("report-format"):
|
||||
value = inputs["report-format"]
|
||||
valid_formats = ["json", "sarif", "github-actions", "colored-line-number", "tab"]
|
||||
if value not in valid_formats and not self.is_github_expression(value):
|
||||
self.add_error(
|
||||
f"Invalid report format: {value}. Must be one of: {', '.join(valid_formats)}"
|
||||
)
|
||||
valid = False
|
||||
valid &= self.validate_enum(
|
||||
inputs["report-format"],
|
||||
"report-format",
|
||||
["json", "sarif", "github-actions", "colored-line-number", "tab"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
# Validate max-retries
|
||||
if inputs.get("max-retries"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["max-retries"], min_val=1, max_val=10, name="max-retries"
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["max-retries"],
|
||||
min_val=1,
|
||||
max_val=10,
|
||||
name="max-retries",
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate enable-linters and disable-linters
|
||||
for field in ["enable-linters", "disable-linters"]:
|
||||
if inputs.get(field):
|
||||
value = inputs[field]
|
||||
|
||||
# First check format - must be comma-separated without spaces
|
||||
if not self.is_github_expression(value):
|
||||
if " " in value:
|
||||
self.add_error(f"Invalid {field} format: spaces not allowed in linter list")
|
||||
@@ -145,15 +118,9 @@ class CustomValidator(BaseValidator):
|
||||
f"Invalid {field} format: must be comma-separated list of linters"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Then check for injection
|
||||
result = self.security_validator.validate_no_injection(value, field)
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.security_validator, "validate_no_injection", value, field
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -205,7 +205,7 @@ runs:
|
||||
validate_linter_list "$DISABLE_LINTERS" "disable-linters"
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
go-version: ${{ inputs.go-version }}
|
||||
cache: true
|
||||
@@ -218,7 +218,7 @@ runs:
|
||||
- name: Cache golangci-lint
|
||||
id: cache
|
||||
if: inputs.cache == 'true'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: |
|
||||
~/.cache/golangci-lint
|
||||
@@ -414,7 +414,7 @@ runs:
|
||||
|
||||
- name: Upload Lint Results
|
||||
if: always() && inputs.report-format == 'sarif'
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ${{ inputs.working-directory }}/reports/golangci-lint.sarif
|
||||
category: golangci-lint
|
||||
|
||||
@@ -36,15 +36,17 @@ conventions:
|
||||
disable-linters: linter_list
|
||||
enable-linters: linter_list
|
||||
fail-on-error: boolean
|
||||
go-version: semantic_version
|
||||
go-version: go_version
|
||||
golangci-lint-version: semantic_version
|
||||
max-retries: numeric_range_1_10
|
||||
only-new-issues: branch_name
|
||||
only-new-issues: boolean
|
||||
report-format: report_format
|
||||
timeout: numeric_range_1_3600
|
||||
timeout: timeout_with_unit
|
||||
token: github_token
|
||||
working-directory: file_path
|
||||
overrides:
|
||||
disable-linters: linter_list
|
||||
enable-linters: linter_list
|
||||
go-version: go_version
|
||||
only-new-issues: boolean
|
||||
timeout: timeout_with_unit
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for language-version-detect action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 67% (2/3 inputs)
|
||||
# Coverage: 100% (3/3 inputs)
|
||||
#
|
||||
# This file defines validation rules for the language-version-detect GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -21,16 +21,17 @@ optional_inputs:
|
||||
- token
|
||||
conventions:
|
||||
default-version: semantic_version
|
||||
language: language_enum
|
||||
token: github_token
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 3
|
||||
validated_inputs: 2
|
||||
validated_inputs: 3
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 67
|
||||
validation_coverage: 67
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: true
|
||||
has_token_validation: true
|
||||
|
||||
@@ -42,109 +42,40 @@ class CustomValidator(BaseValidator):
|
||||
self.add_error("Input 'npm_token' is required")
|
||||
valid = False
|
||||
elif inputs["npm_token"]:
|
||||
token = inputs["npm_token"]
|
||||
# Check for NPM classic token format first
|
||||
if token.startswith("npm_"):
|
||||
# NPM classic token format: npm_ followed by 36+ alphanumeric characters
|
||||
if not re.match(r"^npm_[a-zA-Z0-9]{36,}$", token):
|
||||
self.add_error("Invalid NPM token format")
|
||||
valid = False
|
||||
# Also check for injection
|
||||
result = self.security_validator.validate_no_injection(token, "npm_token")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
else:
|
||||
# Otherwise validate as GitHub token
|
||||
result = self.token_validator.validate_github_token(token, required=True)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self._validate_npm_token(inputs["npm_token"])
|
||||
|
||||
# Validate registry-url
|
||||
if inputs.get("registry-url"):
|
||||
url = inputs["registry-url"]
|
||||
if not self.is_github_expression(url):
|
||||
# Must be http or https URL
|
||||
if not url.startswith(("http://", "https://")):
|
||||
self.add_error("Registry URL must use http or https protocol")
|
||||
valid = False
|
||||
else:
|
||||
# Validate URL format
|
||||
result = self.network_validator.validate_url(url, "registry-url")
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self._validate_registry_url(inputs["registry-url"])
|
||||
|
||||
# Validate scope
|
||||
if inputs.get("scope"):
|
||||
scope = inputs["scope"]
|
||||
if not self.is_github_expression(scope):
|
||||
# Scope must start with @ and contain only valid characters
|
||||
if not scope.startswith("@"):
|
||||
self.add_error("Scope must start with @ symbol")
|
||||
valid = False
|
||||
elif not re.match(r"^@[a-z0-9][a-z0-9\-_.]*$", scope):
|
||||
self.add_error(
|
||||
"Invalid scope format: must be @org-name with lowercase "
|
||||
"letters, numbers, hyphens, dots, and underscores"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Check for injection
|
||||
result = self.security_validator.validate_no_injection(scope, "scope")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self._validate_scope(inputs["scope"])
|
||||
|
||||
# Validate access
|
||||
if inputs.get("access"):
|
||||
access = inputs["access"]
|
||||
if not self.is_github_expression(access):
|
||||
valid_access = ["public", "restricted", "private"]
|
||||
if access and access not in valid_access:
|
||||
self.add_error(
|
||||
f"Invalid access level: {access}. Must be one of: {', '.join(valid_access)}"
|
||||
)
|
||||
valid = False
|
||||
valid &= self.validate_enum(
|
||||
inputs["access"], "access", ["public", "restricted", "private"]
|
||||
)
|
||||
|
||||
# Validate boolean inputs (only always-auth and include-merged-tags are strict)
|
||||
for field in ["always-auth", "include-merged-tags"]:
|
||||
if inputs.get(field):
|
||||
result = self.boolean_validator.validate_boolean(inputs[field], field)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", inputs[field], field
|
||||
)
|
||||
|
||||
# provenance and dry-run accept any value (npm handles them)
|
||||
# No validation needed for these
|
||||
|
||||
# Validate package-version
|
||||
if inputs.get("package-version"):
|
||||
result = self.version_validator.validate_semantic_version(
|
||||
inputs["package-version"], "package-version"
|
||||
valid &= self.validate_with(
|
||||
self.version_validator,
|
||||
"validate_semantic_version",
|
||||
inputs["package-version"],
|
||||
"package-version",
|
||||
)
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.version_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate tag
|
||||
if inputs.get("tag"):
|
||||
@@ -161,16 +92,57 @@ class CustomValidator(BaseValidator):
|
||||
# Validate working-directory and ignore-scripts as file paths
|
||||
for field in ["working-directory", "ignore-scripts"]:
|
||||
if inputs.get(field):
|
||||
result = self.file_validator.validate_path(inputs[field], field)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.file_validator, "validate_path", inputs[field], field
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
def _validate_npm_token(self, token: str) -> bool:
|
||||
"""Validate NPM token format."""
|
||||
# Check for NPM classic token format first
|
||||
if token.startswith("npm_"):
|
||||
# NPM classic token format: npm_ followed by 36+ alphanumeric characters
|
||||
if not re.match(r"^npm_[a-zA-Z0-9]{36,}$", token):
|
||||
self.add_error("Invalid NPM token format")
|
||||
return False
|
||||
# Also check for injection
|
||||
return self.validate_with(
|
||||
self.security_validator, "validate_no_injection", token, "npm_token"
|
||||
)
|
||||
# Otherwise validate as GitHub token
|
||||
return self.validate_with(
|
||||
self.token_validator, "validate_github_token", token, required=True
|
||||
)
|
||||
|
||||
def _validate_registry_url(self, url: str) -> bool:
|
||||
"""Validate registry URL format."""
|
||||
if self.is_github_expression(url):
|
||||
return True
|
||||
# Must be http or https URL
|
||||
if not url.startswith(("http://", "https://")):
|
||||
self.add_error("Registry URL must use http or https protocol")
|
||||
return False
|
||||
# Validate URL format
|
||||
return self.validate_with(self.network_validator, "validate_url", url, "registry-url")
|
||||
|
||||
def _validate_scope(self, scope: str) -> bool:
|
||||
"""Validate NPM scope format."""
|
||||
if self.is_github_expression(scope):
|
||||
return True
|
||||
# Scope must start with @ and contain only valid characters
|
||||
if not scope.startswith("@"):
|
||||
self.add_error("Scope must start with @ symbol")
|
||||
return False
|
||||
if not re.match(r"^@[a-z0-9][a-z0-9\-_.]*$", scope):
|
||||
self.add_error(
|
||||
"Invalid scope format: must be @org-name with lowercase "
|
||||
"letters, numbers, hyphens, dots, and underscores"
|
||||
)
|
||||
return False
|
||||
# Check for injection
|
||||
return self.validate_with(self.security_validator, "validate_no_injection", scope, "scope")
|
||||
|
||||
def get_required_inputs(self) -> list[str]:
|
||||
"""Get list of required inputs."""
|
||||
return ["npm_token"]
|
||||
|
||||
@@ -121,9 +121,9 @@ runs:
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '22'
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
@@ -158,7 +158,7 @@ runs:
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-npm-publish-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
|
||||
@@ -22,7 +22,7 @@ optional_inputs:
|
||||
- token
|
||||
conventions:
|
||||
npm_token: github_token
|
||||
package-version: semantic_version
|
||||
package-version: strict_semantic_version
|
||||
registry-url: url
|
||||
scope: scope
|
||||
token: github_token
|
||||
|
||||
50
package-lock.json
generated
50
package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"js-yaml": "^4.1.0",
|
||||
"markdown-table": "^3.0.3",
|
||||
"markdown-table-formatter": "^1.6.0",
|
||||
"markdownlint-cli2": "^0.19.0",
|
||||
"markdownlint-cli2": "^0.20.0",
|
||||
"prettier": "^3.3.3",
|
||||
"yaml-lint": "^1.7.0"
|
||||
},
|
||||
@@ -661,6 +661,19 @@
|
||||
"node": "6.* || 8.* || >= 10.*"
|
||||
}
|
||||
},
|
||||
"node_modules/get-east-asian-width": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
|
||||
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "10.5.0",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
|
||||
@@ -1051,9 +1064,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/markdownlint": {
|
||||
"version": "0.39.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.39.0.tgz",
|
||||
"integrity": "sha512-Xt/oY7bAiHwukL1iru2np5LIkhwD19Y7frlsiDILK62v3jucXCD6JXlZlwMG12HZOR+roHIVuJZrfCkOhp6k3g==",
|
||||
"version": "0.40.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.40.0.tgz",
|
||||
"integrity": "sha512-UKybllYNheWac61Ia7T6fzuQNDZimFIpCg2w6hHjgV1Qu0w1TV0LlSgryUGzM0bkKQCBhy2FDhEELB73Kb0kAg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1064,7 +1077,8 @@
|
||||
"micromark-extension-gfm-footnote": "2.1.0",
|
||||
"micromark-extension-gfm-table": "2.1.1",
|
||||
"micromark-extension-math": "3.1.0",
|
||||
"micromark-util-types": "2.0.2"
|
||||
"micromark-util-types": "2.0.2",
|
||||
"string-width": "8.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
@@ -1074,17 +1088,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/markdownlint-cli2": {
|
||||
"version": "0.19.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.19.0.tgz",
|
||||
"integrity": "sha512-0+g7Fi/Y3qfvwfhJr77CpC/dEEoc4k7SvumlnL1tb68O+7fjKtIUG7aKzNUQIMXTVi8x63jcfXg4swz/ZYKyCw==",
|
||||
"version": "0.20.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.20.0.tgz",
|
||||
"integrity": "sha512-esPk+8Qvx/f0bzI7YelUeZp+jCtFOk3KjZ7s9iBQZ6HlymSXoTtWGiIRZP05/9Oy2ehIoIjenVwndxGtxOIJYQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"globby": "15.0.0",
|
||||
"js-yaml": "4.1.1",
|
||||
"jsonc-parser": "3.3.1",
|
||||
"markdown-it": "14.1.0",
|
||||
"markdownlint": "0.39.0",
|
||||
"markdownlint": "0.40.0",
|
||||
"markdownlint-cli2-formatter-default": "0.0.6",
|
||||
"micromatch": "4.0.8"
|
||||
},
|
||||
@@ -1111,6 +1126,23 @@
|
||||
"markdownlint-cli2": ">=0.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/markdownlint/node_modules/string-width": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
|
||||
"integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"get-east-asian-width": "^1.3.0",
|
||||
"strip-ansi": "^7.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/mdurl": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"js-yaml": "^4.1.0",
|
||||
"markdown-table": "^3.0.3",
|
||||
"markdown-table-formatter": "^1.6.0",
|
||||
"markdownlint-cli2": "^0.19.0",
|
||||
"markdownlint-cli2": "^0.20.0",
|
||||
"prettier": "^3.3.3",
|
||||
"yaml-lint": "^1.7.0"
|
||||
},
|
||||
|
||||
@@ -33,59 +33,31 @@ class CustomValidator(BaseValidator):
|
||||
# Validate token (optional)
|
||||
if inputs.get("token"):
|
||||
token = inputs["token"]
|
||||
result = self.token_validator.validate_github_token(token)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
valid &= self.validate_with(self.token_validator, "validate_github_token", token)
|
||||
# Also check for variable expansion
|
||||
if not self.is_github_expression(token):
|
||||
result = self.security_validator.validate_no_injection(token, "token")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.security_validator, "validate_no_injection", token, "token"
|
||||
)
|
||||
|
||||
# Validate email (optional, empty means use default)
|
||||
if "email" in inputs and inputs["email"] and inputs["email"] != "":
|
||||
if inputs.get("email"):
|
||||
email = inputs["email"]
|
||||
result = self.network_validator.validate_email(email, "email")
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
valid &= self.validate_with(self.network_validator, "validate_email", email, "email")
|
||||
# Also check for shell metacharacters (but allow @ and .)
|
||||
if not self.is_github_expression(email):
|
||||
# Only check for dangerous shell metacharacters, not @ or .
|
||||
dangerous_chars = [";", "&", "|", "`", "$", "(", ")", "<", ">", "\n", "\r"]
|
||||
for char in dangerous_chars:
|
||||
if char in email:
|
||||
self.add_error(f"email: Contains dangerous character '{char}'")
|
||||
valid = False
|
||||
break
|
||||
if any(char in email for char in dangerous_chars):
|
||||
self.add_error("email: Contains dangerous shell metacharacter")
|
||||
valid = False
|
||||
|
||||
# Validate username (optional)
|
||||
if inputs.get("username"):
|
||||
username = inputs["username"]
|
||||
if not self.is_github_expression(username):
|
||||
# Check for injection
|
||||
result = self.security_validator.validate_no_injection(username, "username")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Check username length (GitHub usernames are max 39 characters)
|
||||
valid &= self.validate_with(
|
||||
self.security_validator, "validate_no_injection", username, "username"
|
||||
)
|
||||
if len(username) > 39:
|
||||
self.add_error("Username is too long (max 39 characters)")
|
||||
valid = False
|
||||
|
||||
@@ -319,7 +319,7 @@ runs:
|
||||
|
||||
- name: Setup PHP
|
||||
id: setup-php
|
||||
uses: shivammathur/setup-php@bf6b4fbd49ca58e4608c9c89fba0b8d90bd2a39f # 2.35.5
|
||||
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # 2.36.0
|
||||
with:
|
||||
php-version: ${{ steps.detect-php-version.outputs.detected-version }}
|
||||
extensions: ${{ inputs.extensions }}
|
||||
@@ -356,7 +356,7 @@ runs:
|
||||
|
||||
- name: Cache Composer packages
|
||||
id: composer-cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: |
|
||||
vendor
|
||||
@@ -376,7 +376,7 @@ runs:
|
||||
composer clear-cache
|
||||
|
||||
- name: Install Composer Dependencies
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
@@ -454,7 +454,7 @@ runs:
|
||||
phpunit_output=$(composer test 2>&1) || phpunit_exit_code=$?
|
||||
elif [ -f "vendor/bin/phpunit" ]; then
|
||||
echo "Running PHPUnit directly..."
|
||||
phpunit_output=$(vendor/bin/phpunit --verbose 2>&1) || phpunit_exit_code=$?
|
||||
phpunit_output=$(vendor/bin/phpunit 2>&1) || phpunit_exit_code=$?
|
||||
else
|
||||
echo "::error::PHPUnit not found. Ensure Composer dependencies are installed."
|
||||
exit 1
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for php-tests action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 78% (7/9 inputs)
|
||||
# Coverage: 89% (8/9 inputs)
|
||||
#
|
||||
# This file defines validation rules for the php-tests GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -27,7 +27,8 @@ optional_inputs:
|
||||
conventions:
|
||||
coverage: coverage_driver
|
||||
email: email
|
||||
framework: boolean
|
||||
extensions: php_extensions
|
||||
framework: framework_mode
|
||||
max-retries: numeric_range_1_10
|
||||
php-version: semantic_version
|
||||
token: github_token
|
||||
@@ -35,12 +36,12 @@ conventions:
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 9
|
||||
validated_inputs: 7
|
||||
validated_inputs: 8
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 78
|
||||
validation_coverage: 78
|
||||
coverage_percentage: 89
|
||||
validation_coverage: 89
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: false
|
||||
has_token_validation: true
|
||||
|
||||
@@ -40,7 +40,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: pr-lint
|
||||
token: ${{ inputs.token }}
|
||||
@@ -54,13 +54,9 @@ runs:
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
ref: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref_name }}
|
||||
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
# If you use VALIDATE_ALL_CODEBASE = true, you can remove this line to
|
||||
# improve performance
|
||||
fetch-depth: 0
|
||||
|
||||
# ╭──────────────────────────────────────────────────────────╮
|
||||
# │ Install packages for linting │
|
||||
# ╰──────────────────────────────────────────────────────────╯
|
||||
@@ -74,6 +70,29 @@ runs:
|
||||
|
||||
if [ -f package.json ]; then
|
||||
printf '%s\n' "found=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check if packageManager field is set (for corepack)
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
has_package_manager=$(jq -r '.packageManager // empty' package.json 2>/dev/null || printf '')
|
||||
if [ -n "$has_package_manager" ]; then
|
||||
printf '%s\n' "has-package-manager=true" >> "$GITHUB_OUTPUT"
|
||||
printf 'Found packageManager field: %s\n' "$has_package_manager"
|
||||
else
|
||||
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
# Fallback: check with grep if jq not available
|
||||
# Use robust pattern to verify non-empty value
|
||||
if grep -q '"packageManager"[[:space:]]*:[[:space:]]*"[^"]\+"' package.json 2>/dev/null; then
|
||||
printf '%s\n' "has-package-manager=true" >> "$GITHUB_OUTPUT"
|
||||
printf '%s\n' "Found packageManager field in package.json"
|
||||
else
|
||||
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# Explicitly set has-package-manager to false when package.json doesn't exist
|
||||
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Detect Package Manager
|
||||
@@ -95,34 +114,39 @@ runs:
|
||||
fi
|
||||
|
||||
printf 'package-manager=%s\n' "$package_manager" >> "$GITHUB_OUTPUT"
|
||||
echo "Detected package manager: $package_manager"
|
||||
printf 'Detected package manager: %s\n' "$package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.detect-node.outputs.found == 'true'
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '22'
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
if: steps.detect-node.outputs.found == 'true'
|
||||
if: steps.detect-node.outputs.found == 'true' && steps.detect-node.outputs.has-package-manager == 'true'
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
corepack enable
|
||||
printf '%s\n' "Corepack enabled - package manager will be installed automatically from package.json"
|
||||
|
||||
- name: Install Package Manager
|
||||
if: steps.detect-node.outputs.found == 'true'
|
||||
- name: Install Package Manager (Fallback)
|
||||
if: steps.detect-node.outputs.found == 'true' && steps.detect-node.outputs.has-package-manager == 'false'
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
printf 'No packageManager field found, using detected package manager: %s\n' "$PACKAGE_MANAGER"
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
pnpm)
|
||||
corepack enable
|
||||
corepack prepare pnpm@latest --activate
|
||||
;;
|
||||
yarn)
|
||||
corepack enable
|
||||
corepack prepare yarn@stable --activate
|
||||
;;
|
||||
bun|npm)
|
||||
@@ -139,7 +163,7 @@ runs:
|
||||
- name: Cache Node Dependencies
|
||||
if: steps.detect-node.outputs.found == 'true'
|
||||
id: node-cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-pr-lint-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
@@ -154,16 +178,21 @@ runs:
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
echo "Installing dependencies using $PACKAGE_MANAGER..."
|
||||
printf 'Installing dependencies using %s...\n' "$PACKAGE_MANAGER"
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
"pnpm")
|
||||
pnpm install --frozen-lockfile
|
||||
;;
|
||||
"yarn")
|
||||
if [ -f ".yarnrc.yml" ]; then
|
||||
# Detect Yarn version by checking actual version output
|
||||
# Yarn 2+ (Berry) uses --immutable, Yarn 1.x (Classic) uses --frozen-lockfile
|
||||
yarn_version=$(yarn --version 2>/dev/null || printf '1.0.0')
|
||||
if printf '%s' "$yarn_version" | grep -q '^[2-9]'; then
|
||||
# Yarn 2+ (Berry) - use --immutable
|
||||
yarn install --immutable
|
||||
else
|
||||
# Yarn 1.x (Classic) - use --frozen-lockfile
|
||||
yarn install --frozen-lockfile
|
||||
fi
|
||||
;;
|
||||
@@ -175,7 +204,7 @@ runs:
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "✅ Dependencies installed successfully"
|
||||
printf '✅ Dependencies installed successfully\n'
|
||||
|
||||
# PHP tests if composer.json exists
|
||||
- name: Detect composer.json
|
||||
@@ -219,12 +248,12 @@ runs:
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for php..." >&2
|
||||
printf 'Checking .tool-versions for php...\n' >&2
|
||||
version=$(awk '/^php[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in .tool-versions: $version" >&2
|
||||
printf 'Found PHP version in .tool-versions: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -232,13 +261,13 @@ runs:
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for php..." >&2
|
||||
printf 'Checking Dockerfile for php...\n' >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "php:" | head -1 | \
|
||||
sed -n -E "s/.*php:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in Dockerfile: $version" >&2
|
||||
printf 'Found PHP version in Dockerfile: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -246,29 +275,29 @@ runs:
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for php..." >&2
|
||||
printf 'Checking devcontainer.json for php...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*php:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in devcontainer: $version" >&2
|
||||
printf 'Found PHP version in devcontainer: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
printf 'jq not found; skipping devcontainer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .php-version file
|
||||
if [ -z "$detected_version" ] && [ -f .php-version ]; then
|
||||
echo "Checking .php-version..." >&2
|
||||
printf 'Checking .php-version...\n' >&2
|
||||
version=$(tr -d '\r' < .php-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in .php-version: $version" >&2
|
||||
printf 'Found PHP version in .php-version: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -276,7 +305,7 @@ runs:
|
||||
|
||||
# Parse composer.json
|
||||
if [ -z "$detected_version" ] && [ -f composer.json ]; then
|
||||
echo "Checking composer.json..." >&2
|
||||
printf 'Checking composer.json...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.require.php // empty' composer.json 2>/dev/null | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p')
|
||||
if [ -z "$version" ]; then
|
||||
@@ -285,34 +314,34 @@ runs:
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in composer.json: $version" >&2
|
||||
printf 'Found PHP version in composer.json: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping composer.json parsing" >&2
|
||||
printf 'jq not found; skipping composer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default PHP version: $detected_version" >&2
|
||||
printf 'Using default PHP version: %s\n' "$detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected PHP version: $detected_version" >&2
|
||||
printf 'Final detected PHP version: %s\n' "$detected_version" >&2
|
||||
|
||||
- name: Setup PHP
|
||||
if: steps.detect-php.outputs.found == 'true'
|
||||
uses: shivammathur/setup-php@bf6b4fbd49ca58e4608c9c89fba0b8d90bd2a39f # 2.35.5
|
||||
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # 2.36.0
|
||||
with:
|
||||
php-version: ${{ steps.php-version.outputs.detected-version }}
|
||||
tools: composer
|
||||
coverage: none
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Setup problem matchers for PHP
|
||||
if: steps.detect-php.outputs.found == 'true'
|
||||
@@ -322,7 +351,8 @@ runs:
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
echo "::add-matcher::$RUNNER_TOOL_CACHE/php.json"
|
||||
matcher_path=$(printf '%s' "$RUNNER_TOOL_CACHE/php.json" | tr -d '\n\r')
|
||||
printf '%s\n' "::add-matcher::$matcher_path"
|
||||
|
||||
- name: Install PHP dependencies
|
||||
if: steps.detect-php.outputs.found == 'true'
|
||||
@@ -348,7 +378,7 @@ runs:
|
||||
id: python-version
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: '3.11'
|
||||
DEFAULT_VERSION: '3.14'
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
@@ -374,12 +404,12 @@ runs:
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for python..." >&2
|
||||
printf 'Checking .tool-versions for python...\n' >&2
|
||||
version=$(awk '/^python[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in .tool-versions: $version" >&2
|
||||
printf 'Found Python version in .tool-versions: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -387,13 +417,13 @@ runs:
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for python..." >&2
|
||||
printf 'Checking Dockerfile for python...\n' >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "python:" | head -1 | \
|
||||
sed -n -E "s/.*python:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in Dockerfile: $version" >&2
|
||||
printf 'Found Python version in Dockerfile: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -401,29 +431,29 @@ runs:
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for python..." >&2
|
||||
printf 'Checking devcontainer.json for python...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*python:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in devcontainer: $version" >&2
|
||||
printf 'Found Python version in devcontainer: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
printf 'jq not found; skipping devcontainer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .python-version file
|
||||
if [ -z "$detected_version" ] && [ -f .python-version ]; then
|
||||
echo "Checking .python-version..." >&2
|
||||
printf 'Checking .python-version...\n' >&2
|
||||
version=$(tr -d '\r' < .python-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in .python-version: $version" >&2
|
||||
printf 'Found Python version in .python-version: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -431,13 +461,13 @@ runs:
|
||||
|
||||
# Parse pyproject.toml
|
||||
if [ -z "$detected_version" ] && [ -f pyproject.toml ]; then
|
||||
echo "Checking pyproject.toml..." >&2
|
||||
if grep -q '^\\[project\\]' pyproject.toml; then
|
||||
version=$(grep -A 20 '^\\[project\\]' pyproject.toml | grep -E '^\\s*requires-python[[:space:]]*=' | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p' | head -1)
|
||||
printf 'Checking pyproject.toml...\n' >&2
|
||||
if grep -q '^\[project\]' pyproject.toml; then
|
||||
version=$(grep -A 20 '^\[project\]' pyproject.toml | grep -E '^\s*requires-python[[:space:]]*=' | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p' | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in pyproject.toml: $version" >&2
|
||||
printf 'Found Python version in pyproject.toml: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -447,16 +477,16 @@ runs:
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default Python version: $detected_version" >&2
|
||||
printf 'Using default Python version: %s\n' "$detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected Python version: $detected_version" >&2
|
||||
printf 'Final detected Python version: %s\n' "$detected_version" >&2
|
||||
|
||||
- name: Setup Python
|
||||
if: steps.detect-python.outputs.found == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'pip'
|
||||
@@ -485,7 +515,7 @@ runs:
|
||||
id: go-version
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: '1.24'
|
||||
DEFAULT_VERSION: '1.25'
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
@@ -511,12 +541,12 @@ runs:
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for golang..." >&2
|
||||
printf 'Checking .tool-versions for golang...\n' >&2
|
||||
version=$(awk '/^golang[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in .tool-versions: $version" >&2
|
||||
printf 'Found Go version in .tool-versions: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -524,13 +554,13 @@ runs:
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for golang..." >&2
|
||||
printf 'Checking Dockerfile for golang...\n' >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "golang:" | head -1 | \
|
||||
sed -n -E "s/.*golang:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in Dockerfile: $version" >&2
|
||||
printf 'Found Go version in Dockerfile: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -538,29 +568,29 @@ runs:
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for golang..." >&2
|
||||
printf 'Checking devcontainer.json for golang...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*golang:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in devcontainer: $version" >&2
|
||||
printf 'Found Go version in devcontainer: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
printf 'jq not found; skipping devcontainer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .go-version file
|
||||
if [ -z "$detected_version" ] && [ -f .go-version ]; then
|
||||
echo "Checking .go-version..." >&2
|
||||
printf 'Checking .go-version...\n' >&2
|
||||
version=$(tr -d '\r' < .go-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in .go-version: $version" >&2
|
||||
printf 'Found Go version in .go-version: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -568,12 +598,12 @@ runs:
|
||||
|
||||
# Parse go.mod
|
||||
if [ -z "$detected_version" ] && [ -f go.mod ]; then
|
||||
echo "Checking go.mod..." >&2
|
||||
printf 'Checking go.mod...\n' >&2
|
||||
version=$(grep -E '^go[[:space:]]+[0-9]' go.mod | awk '{print $2}' | head -1 || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in go.mod: $version" >&2
|
||||
printf 'Found Go version in go.mod: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
@@ -582,16 +612,16 @@ runs:
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default Go version: $detected_version" >&2
|
||||
printf 'Using default Go version: %s\n' "$detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected Go version: $detected_version" >&2
|
||||
printf 'Final detected Go version: %s\n' "$detected_version" >&2
|
||||
|
||||
- name: Setup Go
|
||||
if: steps.detect-go.outputs.found == 'true'
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
go-version: ${{ steps.go-version.outputs.detected-version }}
|
||||
cache: true
|
||||
@@ -602,7 +632,7 @@ runs:
|
||||
- name: MegaLinter
|
||||
# You can override MegaLinter flavor used to have faster performances
|
||||
# More info at https://megalinter.io/latest/flavors/
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@62c799d895af9bcbca5eacfebca29d527f125a57 # v9.1.0
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@55a59b24a441e0e1943080d4a512d827710d4a9d # v9.2.0
|
||||
id: ml
|
||||
|
||||
# All available variables are described in documentation
|
||||
@@ -620,11 +650,7 @@ runs:
|
||||
# github.event_name == 'push' &&
|
||||
# contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)
|
||||
# }}
|
||||
VALIDATE_ALL_CODEBASE: >-
|
||||
${{
|
||||
github.event_name == 'push' &&
|
||||
contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)
|
||||
}}
|
||||
VALIDATE_ALL_CODEBASE: false
|
||||
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
|
||||
@@ -632,7 +658,7 @@ runs:
|
||||
#
|
||||
# When active, APPLY_FIXES must also be defined as environment variable
|
||||
# (in .github/workflows/mega-linter.yml or other CI tool)
|
||||
APPLY_FIXES: all
|
||||
APPLY_FIXES: none
|
||||
|
||||
# Decide which event triggers application of fixes in a commit or a PR
|
||||
# (pull_request, push, all)
|
||||
@@ -648,124 +674,13 @@ runs:
|
||||
# Uncomment to disable copy-paste and spell checks
|
||||
DISABLE: COPYPASTE,SPELL
|
||||
|
||||
# Export env vars to make them available for subsequent expressions
|
||||
- name: Export Apply Fixes Variables
|
||||
shell: sh
|
||||
run: |
|
||||
echo "APPLY_FIXES_EVENT=pull_request" >> "$GITHUB_ENV"
|
||||
echo "APPLY_FIXES_MODE=commit" >> "$GITHUB_ENV"
|
||||
|
||||
# Upload MegaLinter artifacts
|
||||
- name: Archive production artifacts
|
||||
if: success() || failure()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: MegaLinter reports
|
||||
include-hidden-files: 'true'
|
||||
path: |
|
||||
megalinter-reports
|
||||
mega-linter.log
|
||||
|
||||
# Set APPLY_FIXES_IF var for use in future steps
|
||||
- name: Set APPLY_FIXES_IF var
|
||||
shell: sh
|
||||
env:
|
||||
APPLY_FIXES_CONDITION: >-
|
||||
${{
|
||||
steps.ml.outputs.has_updated_sources == 1 &&
|
||||
(env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) &&
|
||||
(github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository)
|
||||
}}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Sanitize by removing newlines to prevent env var injection
|
||||
sanitized_condition="$(echo "$APPLY_FIXES_CONDITION" | tr -d '\n\r')"
|
||||
printf 'APPLY_FIXES_IF=%s\n' "$sanitized_condition" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set APPLY_FIXES_IF_* vars for use in future steps
|
||||
- name: Set APPLY_FIXES_IF_* vars
|
||||
shell: sh
|
||||
env:
|
||||
APPLY_FIXES_IF_PR_CONDITION: ${{ env.APPLY_FIXES_IF == 'true' && env.APPLY_FIXES_MODE == 'pull_request' }}
|
||||
APPLY_FIXES_IF_COMMIT_CONDITION: ${{ env.APPLY_FIXES_IF == 'true' && env.APPLY_FIXES_MODE == 'commit' && (!contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)) }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Sanitize by removing newlines to prevent env var injection
|
||||
sanitized_pr="$(echo "$APPLY_FIXES_IF_PR_CONDITION" | tr -d '\n\r')"
|
||||
sanitized_commit="$(echo "$APPLY_FIXES_IF_COMMIT_CONDITION" | tr -d '\n\r')"
|
||||
|
||||
printf 'APPLY_FIXES_IF_PR=%s\n' "$sanitized_pr" >> "${GITHUB_ENV}"
|
||||
printf 'APPLY_FIXES_IF_COMMIT=%s\n' "$sanitized_commit" >> "${GITHUB_ENV}"
|
||||
|
||||
# Create pull request if applicable
|
||||
# (for now works only on PR from same repository, not from forks)
|
||||
- name: Create Pull Request with applied fixes
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
id: cpr
|
||||
if: env.APPLY_FIXES_IF_PR == 'true'
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
commit-message: 'style: apply linter fixes'
|
||||
title: 'style: apply linter fixes'
|
||||
labels: bot
|
||||
|
||||
- name: Create PR output
|
||||
if: env.APPLY_FIXES_IF_PR == 'true'
|
||||
shell: sh
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
PR_URL: ${{ steps.cpr.outputs.pull-request-url }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
echo "PR Number - $PR_NUMBER"
|
||||
echo "PR URL - $PR_URL"
|
||||
|
||||
# Push new commit if applicable
|
||||
# (for now works only on PR from same repository, not from forks)
|
||||
- name: Prepare commit
|
||||
if: env.APPLY_FIXES_IF_COMMIT == 'true'
|
||||
shell: sh
|
||||
env:
|
||||
BRANCH_REF: >-
|
||||
${{
|
||||
github.event.pull_request.head.ref ||
|
||||
github.head_ref ||
|
||||
github.ref_name
|
||||
}}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Fix .git directory ownership after MegaLinter container execution
|
||||
sudo chown -Rc "$UID" .git/
|
||||
|
||||
# Ensure we're on the correct branch (not in detached HEAD state)
|
||||
# This is necessary because MegaLinter may leave the repo in a detached HEAD state
|
||||
current_branch=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [ "$current_branch" = "HEAD" ]; then
|
||||
echo "Repository is in detached HEAD state, checking out $BRANCH_REF"
|
||||
# Validate branch reference to prevent command injection
|
||||
if ! git check-ref-format --branch "$BRANCH_REF"; then
|
||||
echo "::error::Invalid branch reference format: $BRANCH_REF"
|
||||
exit 1
|
||||
fi
|
||||
git checkout "$BRANCH_REF"
|
||||
else
|
||||
echo "Repository is on branch: $current_branch"
|
||||
fi
|
||||
|
||||
- name: Commit and push applied linter fixes
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
if: env.APPLY_FIXES_IF_COMMIT == 'true'
|
||||
with:
|
||||
branch: >-
|
||||
${{
|
||||
github.event.pull_request.head.ref ||
|
||||
github.head_ref ||
|
||||
github.ref
|
||||
}}
|
||||
commit_message: 'style: apply linter fixes'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
commit_user_email: ${{ inputs.email }}
|
||||
|
||||
@@ -34,74 +34,45 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate pre-commit-config if provided
|
||||
if "pre-commit-config" in inputs:
|
||||
result = self.file_validator.validate_file_path(
|
||||
inputs["pre-commit-config"], "pre-commit-config"
|
||||
valid &= self.validate_with(
|
||||
self.file_validator,
|
||||
"validate_file_path",
|
||||
inputs["pre-commit-config"],
|
||||
"pre-commit-config",
|
||||
)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate base-branch if provided (just check for injection)
|
||||
if inputs.get("base-branch"):
|
||||
# Check for dangerous characters that could cause shell injection
|
||||
result = self.security_validator.validate_no_injection(
|
||||
inputs["base-branch"], "base-branch"
|
||||
valid &= self.validate_with(
|
||||
self.security_validator,
|
||||
"validate_no_injection",
|
||||
inputs["base-branch"],
|
||||
"base-branch",
|
||||
)
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate token if provided
|
||||
if inputs.get("token"):
|
||||
result = self.token_validator.validate_github_token(inputs["token"])
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.token_validator, "validate_github_token", inputs["token"]
|
||||
)
|
||||
|
||||
# Validate commit_user if provided (allow spaces for Git usernames)
|
||||
# Check both underscore and hyphen versions since inputs can have either
|
||||
commit_user_key = (
|
||||
"commit_user"
|
||||
if "commit_user" in inputs
|
||||
else "commit-user"
|
||||
if "commit-user" in inputs
|
||||
else None
|
||||
)
|
||||
commit_user_key = self.get_key_variant(inputs, "commit_user", "commit-user")
|
||||
if commit_user_key and inputs[commit_user_key]:
|
||||
# Check for dangerous injection patterns
|
||||
value = inputs[commit_user_key]
|
||||
if any(char in value for char in [";", "&", "|", "`", "$", "(", ")", "\n", "\r"]):
|
||||
if any(c in value for c in [";", "&", "|", "`", "$", "(", ")", "\n", "\r"]):
|
||||
self.add_error(f"{commit_user_key}: Contains potentially dangerous characters")
|
||||
valid = False
|
||||
|
||||
# Validate commit_email if provided
|
||||
# Check both underscore and hyphen versions
|
||||
commit_email_key = (
|
||||
"commit_email"
|
||||
if "commit_email" in inputs
|
||||
else "commit-email"
|
||||
if "commit-email" in inputs
|
||||
else None
|
||||
)
|
||||
commit_email_key = self.get_key_variant(inputs, "commit_email", "commit-email")
|
||||
if commit_email_key and inputs[commit_email_key]:
|
||||
result = self.network_validator.validate_email(
|
||||
inputs[commit_email_key], commit_email_key
|
||||
valid &= self.validate_with(
|
||||
self.network_validator,
|
||||
"validate_email",
|
||||
inputs[commit_email_key],
|
||||
commit_email_key,
|
||||
)
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'pre-commit'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -83,7 +83,7 @@ runs:
|
||||
- name: Push pre-commit fixes
|
||||
id: push-fixes
|
||||
if: always() # Push changes even when pre-commit fails
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style(pre-commit): autofix'
|
||||
commit_user_name: ${{ inputs.commit_user }}
|
||||
|
||||
@@ -274,9 +274,9 @@ runs:
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '22'
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
@@ -311,7 +311,7 @@ runs:
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-prettier-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
@@ -468,7 +468,7 @@ runs:
|
||||
|
||||
- name: Commit and Push Fixes
|
||||
if: inputs.mode == 'fix' && success()
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: autofix Prettier formatting'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for prettier-lint action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 86% (12/14 inputs)
|
||||
# Coverage: 100% (14/14 inputs)
|
||||
#
|
||||
# This file defines validation rules for the prettier-lint GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -34,21 +34,24 @@ conventions:
|
||||
config-file: file_path
|
||||
email: email
|
||||
fail-on-error: boolean
|
||||
file-pattern: path_list
|
||||
ignore-file: file_path
|
||||
max-retries: numeric_range_1_10
|
||||
mode: mode_enum
|
||||
plugins: linter_list
|
||||
prettier-version: semantic_version
|
||||
report-format: report_format
|
||||
token: github_token
|
||||
username: username
|
||||
working-directory: file_path
|
||||
overrides: {}
|
||||
overrides:
|
||||
mode: mode_enum
|
||||
statistics:
|
||||
total_inputs: 14
|
||||
validated_inputs: 12
|
||||
validated_inputs: 14
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 86
|
||||
validation_coverage: 86
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
|
||||
@@ -31,68 +31,42 @@ class CustomValidator(BaseValidator):
|
||||
valid = True
|
||||
|
||||
# Validate python-version if provided
|
||||
if "python-version" in inputs or "python_version" in inputs:
|
||||
key = "python-version" if "python-version" in inputs else "python_version"
|
||||
value = inputs[key]
|
||||
|
||||
# Empty string should fail validation
|
||||
if value == "":
|
||||
version_key = self.get_key_variant(inputs, "python-version", "python_version")
|
||||
if version_key:
|
||||
value = inputs[version_key]
|
||||
if not value:
|
||||
self.add_error("Python version cannot be empty")
|
||||
valid = False
|
||||
elif value:
|
||||
result = self.version_validator.validate_python_version(value, key)
|
||||
|
||||
# Propagate errors from the version validator
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
|
||||
self.version_validator.clear_errors()
|
||||
|
||||
if not result:
|
||||
valid = False
|
||||
else:
|
||||
valid &= self.validate_with(
|
||||
self.version_validator, "validate_python_version", value, version_key
|
||||
)
|
||||
|
||||
# Validate username
|
||||
if "username" in inputs:
|
||||
if inputs.get("username"):
|
||||
username = inputs["username"]
|
||||
if username:
|
||||
# Check username length (GitHub usernames are max 39 characters)
|
||||
if len(username) > 39:
|
||||
self.add_error("Username is too long (max 39 characters)")
|
||||
valid = False
|
||||
# Check for command injection patterns
|
||||
if ";" in username or "`" in username or "$" in username:
|
||||
self.add_error("Username contains potentially dangerous characters")
|
||||
valid = False
|
||||
if len(username) > 39:
|
||||
self.add_error("Username is too long (max 39 characters)")
|
||||
valid = False
|
||||
if ";" in username or "`" in username or "$" in username:
|
||||
self.add_error("Username contains potentially dangerous characters")
|
||||
valid = False
|
||||
|
||||
# Validate email
|
||||
if "email" in inputs:
|
||||
email = inputs["email"]
|
||||
if email:
|
||||
result = self.network_validator.validate_email(email, "email")
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
if inputs.get("email"):
|
||||
valid &= self.validate_with(
|
||||
self.network_validator, "validate_email", inputs["email"], "email"
|
||||
)
|
||||
|
||||
# Validate token
|
||||
if "token" in inputs:
|
||||
if inputs.get("token"):
|
||||
token = inputs["token"]
|
||||
if token:
|
||||
# Check for variable expansion (but allow GitHub Actions expressions)
|
||||
if "${" in token and not token.startswith("${{ ") and not token.endswith(" }}"):
|
||||
self.add_error("Token contains potentially dangerous variable expansion")
|
||||
valid = False
|
||||
else:
|
||||
result = self.token_validator.validate_github_token(token)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
# Check for variable expansion (but allow GitHub Actions expressions)
|
||||
if "${" in token and not token.startswith("${{ ") and not token.endswith(" }}"):
|
||||
self.add_error("Token contains potentially dangerous variable expansion")
|
||||
valid = False
|
||||
else:
|
||||
valid &= self.validate_with(self.token_validator, "validate_github_token", token)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'python-lint-fix'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -224,7 +224,7 @@ runs:
|
||||
|
||||
- name: Setup Python (pip)
|
||||
if: steps.package-manager.outputs.package-manager == 'pip'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'pip'
|
||||
@@ -237,7 +237,7 @@ runs:
|
||||
|
||||
- name: Setup Python (pipenv)
|
||||
if: steps.package-manager.outputs.package-manager == 'pipenv'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'pipenv'
|
||||
@@ -247,7 +247,7 @@ runs:
|
||||
|
||||
- name: Setup Python (poetry)
|
||||
if: steps.package-manager.outputs.package-manager == 'poetry'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'poetry'
|
||||
@@ -361,7 +361,7 @@ runs:
|
||||
|
||||
- name: Commit Fixes
|
||||
if: ${{ fromJSON(steps.fix.outputs.fixed_count) > 0 }}
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: apply python lint fixes'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
@@ -370,7 +370,7 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: steps.check-files.outputs.result == 'found'
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ${{ inputs.working-directory }}/reports/flake8.sarif
|
||||
category: 'python-lint'
|
||||
|
||||
82
security-scan/README.md
Normal file
82
security-scan/README.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# ivuorinen/actions/security-scan
|
||||
|
||||
## Security Scan
|
||||
|
||||
### Description
|
||||
|
||||
Comprehensive security scanning for GitHub Actions including actionlint,
|
||||
Gitleaks (optional), and Trivy vulnerability scanning. Requires
|
||||
'security-events: write' and 'contents: read' permissions in the workflow.
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|----------------------|--------------------------------------------------------------|----------|----------------------|
|
||||
| `gitleaks-license` | <p>Gitleaks license key (required for Gitleaks scanning)</p> | `false` | `""` |
|
||||
| `gitleaks-config` | <p>Path to Gitleaks config file</p> | `false` | `.gitleaks.toml` |
|
||||
| `trivy-severity` | <p>Severity levels to scan for (comma-separated)</p> | `false` | `CRITICAL,HIGH` |
|
||||
| `trivy-scanners` | <p>Types of scanners to run (comma-separated)</p> | `false` | `vuln,config,secret` |
|
||||
| `trivy-timeout` | <p>Timeout for Trivy scan</p> | `false` | `10m` |
|
||||
| `actionlint-enabled` | <p>Enable actionlint scanning</p> | `false` | `true` |
|
||||
| `token` | <p>GitHub token for authentication</p> | `false` | `""` |
|
||||
|
||||
### Outputs
|
||||
|
||||
| name | description |
|
||||
|------------------------|-----------------------------------------------------|
|
||||
| `has_trivy_results` | <p>Whether Trivy scan produced valid results</p> |
|
||||
| `has_gitleaks_results` | <p>Whether Gitleaks scan produced valid results</p> |
|
||||
| `total_issues` | <p>Total number of security issues found</p> |
|
||||
| `critical_issues` | <p>Number of critical security issues found</p> |
|
||||
|
||||
### Runs
|
||||
|
||||
This action is a `composite` action.
|
||||
|
||||
### Usage
|
||||
|
||||
```yaml
|
||||
- uses: ivuorinen/actions/security-scan@main
|
||||
with:
|
||||
gitleaks-license:
|
||||
# Gitleaks license key (required for Gitleaks scanning)
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
gitleaks-config:
|
||||
# Path to Gitleaks config file
|
||||
#
|
||||
# Required: false
|
||||
# Default: .gitleaks.toml
|
||||
|
||||
trivy-severity:
|
||||
# Severity levels to scan for (comma-separated)
|
||||
#
|
||||
# Required: false
|
||||
# Default: CRITICAL,HIGH
|
||||
|
||||
trivy-scanners:
|
||||
# Types of scanners to run (comma-separated)
|
||||
#
|
||||
# Required: false
|
||||
# Default: vuln,config,secret
|
||||
|
||||
trivy-timeout:
|
||||
# Timeout for Trivy scan
|
||||
#
|
||||
# Required: false
|
||||
# Default: 10m
|
||||
|
||||
actionlint-enabled:
|
||||
# Enable actionlint scanning
|
||||
#
|
||||
# Required: false
|
||||
# Default: true
|
||||
|
||||
token:
|
||||
# GitHub token for authentication
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
```
|
||||
282
security-scan/action.yml
Normal file
282
security-scan/action.yml
Normal file
@@ -0,0 +1,282 @@
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-action.json
|
||||
#
|
||||
# REQUIRED PERMISSIONS (set these in your workflow file):
|
||||
# permissions:
|
||||
# security-events: write # Required for SARIF uploads
|
||||
# contents: read # Required for repository access
|
||||
#
|
||||
---
|
||||
name: Security Scan
|
||||
description: |
|
||||
Comprehensive security scanning for GitHub Actions including actionlint,
|
||||
Gitleaks (optional), and Trivy vulnerability scanning. Requires
|
||||
'security-events: write' and 'contents: read' permissions in the workflow.
|
||||
author: Ismo Vuorinen
|
||||
branding:
|
||||
icon: shield
|
||||
color: red
|
||||
|
||||
inputs:
|
||||
gitleaks-license:
|
||||
description: 'Gitleaks license key (required for Gitleaks scanning)'
|
||||
required: false
|
||||
default: ''
|
||||
gitleaks-config:
|
||||
description: 'Path to Gitleaks config file'
|
||||
required: false
|
||||
default: '.gitleaks.toml'
|
||||
trivy-severity:
|
||||
description: 'Severity levels to scan for (comma-separated)'
|
||||
required: false
|
||||
default: 'CRITICAL,HIGH'
|
||||
trivy-scanners:
|
||||
description: 'Types of scanners to run (comma-separated)'
|
||||
required: false
|
||||
default: 'vuln,config,secret'
|
||||
trivy-timeout:
|
||||
description: 'Timeout for Trivy scan'
|
||||
required: false
|
||||
default: '10m'
|
||||
actionlint-enabled:
|
||||
description: 'Enable actionlint scanning'
|
||||
required: false
|
||||
default: 'true'
|
||||
token:
|
||||
description: 'GitHub token for authentication'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
outputs:
|
||||
has_trivy_results:
|
||||
description: 'Whether Trivy scan produced valid results'
|
||||
value: ${{ steps.verify-sarif.outputs.has_trivy }}
|
||||
has_gitleaks_results:
|
||||
description: 'Whether Gitleaks scan produced valid results'
|
||||
value: ${{ steps.verify-sarif.outputs.has_gitleaks }}
|
||||
total_issues:
|
||||
description: 'Total number of security issues found'
|
||||
value: ${{ steps.analyze.outputs.total_issues }}
|
||||
critical_issues:
|
||||
description: 'Number of critical security issues found'
|
||||
value: ${{ steps.analyze.outputs.critical_issues }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: security-scan
|
||||
gitleaks-license: ${{ inputs.gitleaks-license }}
|
||||
gitleaks-config: ${{ inputs.gitleaks-config }}
|
||||
trivy-severity: ${{ inputs.trivy-severity }}
|
||||
trivy-scanners: ${{ inputs.trivy-scanners }}
|
||||
trivy-timeout: ${{ inputs.trivy-timeout }}
|
||||
actionlint-enabled: ${{ inputs.actionlint-enabled }}
|
||||
token: ${{ inputs.token }}
|
||||
|
||||
- name: Check Required Configurations
|
||||
id: check-configs
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Initialize all flags as false
|
||||
{
|
||||
printf '%s\n' "run_gitleaks=false"
|
||||
printf '%s\n' "run_trivy=true"
|
||||
printf '%s\n' "run_actionlint=${{ inputs.actionlint-enabled }}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check Gitleaks configuration and license
|
||||
if [ -f "${{ inputs.gitleaks-config }}" ] && [ -n "${{ inputs.gitleaks-license }}" ]; then
|
||||
printf 'Gitleaks config and license found\n'
|
||||
printf '%s\n' "run_gitleaks=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
printf '::warning::Gitleaks config or license missing - skipping Gitleaks scan\n'
|
||||
fi
|
||||
|
||||
- name: Run actionlint
|
||||
if: steps.check-configs.outputs.run_actionlint == 'true'
|
||||
uses: raven-actions/actionlint@963d4779ef039e217e5d0e6fd73ce9ab7764e493 # v2.1.0
|
||||
with:
|
||||
cache: true
|
||||
fail-on-error: true
|
||||
shellcheck: false
|
||||
|
||||
- name: Run Gitleaks
|
||||
if: steps.check-configs.outputs.run_gitleaks == 'true'
|
||||
uses: gitleaks/gitleaks-action@ff98106e4c7b2bc287b24eaf42907196329070c7 # v2.3.9
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
GITLEAKS_LICENSE: ${{ inputs.gitleaks-license }}
|
||||
with:
|
||||
config-path: ${{ inputs.gitleaks-config }}
|
||||
report-format: sarif
|
||||
report-path: gitleaks-report.sarif
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
if: steps.check-configs.outputs.run_trivy == 'true'
|
||||
uses: aquasecurity/trivy-action@a11da62073708815958ea6d84f5650c78a3ef85b # master
|
||||
with:
|
||||
scan-type: 'fs'
|
||||
scanners: ${{ inputs.trivy-scanners }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: ${{ inputs.trivy-severity }}
|
||||
timeout: ${{ inputs.trivy-timeout }}
|
||||
|
||||
- name: Verify SARIF files
|
||||
id: verify-sarif
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Initialize outputs
|
||||
{
|
||||
printf '%s\n' "has_trivy=false"
|
||||
printf '%s\n' "has_gitleaks=false"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check Trivy results
|
||||
if [ -f "trivy-results.sarif" ]; then
|
||||
if jq -e . <"trivy-results.sarif" >/dev/null 2>&1; then
|
||||
printf '%s\n' "has_trivy=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
printf '::warning::Trivy SARIF file exists but is not valid JSON\n'
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check Gitleaks results if it ran
|
||||
if [ "${{ steps.check-configs.outputs.run_gitleaks }}" = "true" ]; then
|
||||
if [ -f "gitleaks-report.sarif" ]; then
|
||||
if jq -e . <"gitleaks-report.sarif" >/dev/null 2>&1; then
|
||||
printf '%s\n' "has_gitleaks=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
printf '::warning::Gitleaks SARIF file exists but is not valid JSON\n'
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Upload Trivy results
|
||||
if: steps.verify-sarif.outputs.has_trivy == 'true'
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: 'trivy'
|
||||
|
||||
- name: Upload Gitleaks results
|
||||
if: steps.verify-sarif.outputs.has_gitleaks == 'true'
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: 'gitleaks-report.sarif'
|
||||
category: 'gitleaks'
|
||||
|
||||
- name: Archive security reports
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: security-reports-${{ github.run_id }}
|
||||
path: |
|
||||
${{ steps.verify-sarif.outputs.has_trivy == 'true' && 'trivy-results.sarif' || '' }}
|
||||
${{ steps.verify-sarif.outputs.has_gitleaks == 'true' && 'gitleaks-report.sarif' || '' }}
|
||||
retention-days: 30
|
||||
|
||||
- name: Analyze Results
|
||||
id: analyze
|
||||
if: always()
|
||||
shell: node {0}
|
||||
run: |
|
||||
const fs = require('fs');
|
||||
|
||||
try {
|
||||
let totalIssues = 0;
|
||||
let criticalIssues = 0;
|
||||
|
||||
const analyzeSarif = (file, tool) => {
|
||||
if (!fs.existsSync(file)) {
|
||||
console.log(`No results file found for ${tool}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const sarif = JSON.parse(fs.readFileSync(file, 'utf8'));
|
||||
return sarif.runs.reduce((acc, run) => {
|
||||
if (!run.results) return acc;
|
||||
|
||||
const critical = run.results.filter(r =>
|
||||
r.level === 'error' ||
|
||||
r.level === 'critical' ||
|
||||
(r.ruleId || '').toLowerCase().includes('critical')
|
||||
).length;
|
||||
|
||||
return {
|
||||
total: acc.total + run.results.length,
|
||||
critical: acc.critical + critical
|
||||
};
|
||||
}, { total: 0, critical: 0 });
|
||||
} catch (error) {
|
||||
console.log(`Error analyzing ${tool} results: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// Only analyze results from tools that ran successfully
|
||||
const results = {
|
||||
trivy: '${{ steps.verify-sarif.outputs.has_trivy }}' === 'true' ?
|
||||
analyzeSarif('trivy-results.sarif', 'trivy') : null,
|
||||
gitleaks: '${{ steps.verify-sarif.outputs.has_gitleaks }}' === 'true' ?
|
||||
analyzeSarif('gitleaks-report.sarif', 'gitleaks') : null
|
||||
};
|
||||
|
||||
// Aggregate results
|
||||
Object.entries(results).forEach(([tool, result]) => {
|
||||
if (result) {
|
||||
totalIssues += result.total;
|
||||
criticalIssues += result.critical;
|
||||
console.log(`${tool}: ${result.total} total, ${result.critical} critical issues`);
|
||||
}
|
||||
});
|
||||
|
||||
// Create summary
|
||||
const summary = `## Security Scan Summary
|
||||
|
||||
- Total Issues Found: ${totalIssues}
|
||||
- Critical Issues: ${criticalIssues}
|
||||
|
||||
### Tool Breakdown
|
||||
${Object.entries(results)
|
||||
.filter(([_, r]) => r)
|
||||
.map(([tool, r]) =>
|
||||
`- ${tool}: ${r.total} total, ${r.critical} critical`
|
||||
).join('\n')}
|
||||
|
||||
### Tools Run Status
|
||||
- Actionlint: ${{ steps.check-configs.outputs.run_actionlint }}
|
||||
- Trivy: ${{ steps.verify-sarif.outputs.has_trivy }}
|
||||
- Gitleaks: ${{ steps.check-configs.outputs.run_gitleaks }}
|
||||
`;
|
||||
|
||||
// Set outputs using GITHUB_OUTPUT
|
||||
const outputFile = process.env.GITHUB_OUTPUT;
|
||||
if (outputFile) {
|
||||
fs.appendFileSync(outputFile, `total_issues=${totalIssues}\n`);
|
||||
fs.appendFileSync(outputFile, `critical_issues=${criticalIssues}\n`);
|
||||
}
|
||||
|
||||
// Add job summary using GITHUB_STEP_SUMMARY
|
||||
const summaryFile = process.env.GITHUB_STEP_SUMMARY;
|
||||
if (summaryFile) {
|
||||
fs.appendFileSync(summaryFile, summary + '\n');
|
||||
}
|
||||
|
||||
// Fail if critical issues found
|
||||
if (criticalIssues > 0) {
|
||||
console.error(`Found ${criticalIssues} critical security issues`);
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Analysis failed: ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
55
security-scan/rules.yml
Normal file
55
security-scan/rules.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
# Validation rules for security-scan action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 86% (6/7 inputs)
|
||||
#
|
||||
# This file defines validation rules for the security-scan GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
# action is used.
|
||||
#
|
||||
|
||||
schema_version: '1.0'
|
||||
action: security-scan
|
||||
description: |
|
||||
Comprehensive security scanning for GitHub Actions including actionlint,
|
||||
Gitleaks (optional), and Trivy vulnerability scanning. Requires
|
||||
'security-events: write' and 'contents: read' permissions in the workflow.
|
||||
generator_version: 1.0.0
|
||||
required_inputs: []
|
||||
optional_inputs:
|
||||
- actionlint-enabled
|
||||
- gitleaks-config
|
||||
- gitleaks-license
|
||||
- token
|
||||
- trivy-scanners
|
||||
- trivy-severity
|
||||
- trivy-timeout
|
||||
conventions:
|
||||
actionlint-enabled: boolean
|
||||
gitleaks-config: file_path
|
||||
token: github_token
|
||||
trivy-scanners: scanner_list
|
||||
trivy-severity: severity_enum
|
||||
trivy-timeout: timeout_with_unit
|
||||
overrides:
|
||||
actionlint-enabled: boolean
|
||||
gitleaks-config: file_path
|
||||
token: github_token
|
||||
trivy-scanners: scanner_list
|
||||
trivy-severity: severity_enum
|
||||
trivy-timeout: timeout_with_unit
|
||||
statistics:
|
||||
total_inputs: 7
|
||||
validated_inputs: 6
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 86
|
||||
validation_coverage: 86
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: false
|
||||
has_token_validation: true
|
||||
has_version_validation: false
|
||||
has_file_validation: true
|
||||
has_security_validation: true
|
||||
@@ -43,7 +43,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'stale'
|
||||
token: ${{ inputs.token || github.token }}
|
||||
@@ -52,7 +52,7 @@ runs:
|
||||
|
||||
- name: 🚀 Run stale
|
||||
id: stale
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ inputs.token || github.token }}
|
||||
days-before-stale: ${{ inputs.days-before-stale }}
|
||||
|
||||
@@ -78,16 +78,9 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate token if provided
|
||||
if "token" in inputs:
|
||||
token_valid = self.token_validator.validate_github_token(
|
||||
inputs["token"],
|
||||
required=False, # Token is optional, defaults to ${{ github.token }}
|
||||
valid &= self.validate_with(
|
||||
self.token_validator, "validate_github_token", inputs["token"], required=False
|
||||
)
|
||||
# Copy any errors from token validator
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
valid &= token_valid
|
||||
|
||||
return valid
|
||||
|
||||
@@ -100,27 +93,15 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(path):
|
||||
return True
|
||||
|
||||
# First check basic file path security
|
||||
result = self.file_validator.validate_file_path(path, "labels")
|
||||
# Copy any errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
result = self.validate_with(self.file_validator, "validate_file_path", path, "labels")
|
||||
if not result:
|
||||
return False
|
||||
|
||||
# Check file extension
|
||||
if not (path.endswith(".yml") or path.endswith(".yaml")):
|
||||
self.add_error(f'Invalid labels file: "{path}". Must be a .yml or .yaml file')
|
||||
return False
|
||||
|
||||
# Additional custom validation could go here
|
||||
# For example, checking if the file exists, validating YAML structure, etc.
|
||||
|
||||
return True
|
||||
|
||||
@@ -30,54 +30,32 @@ class CustomValidator(BaseValidator):
|
||||
"""Validate terraform-lint-fix action inputs."""
|
||||
valid = True
|
||||
|
||||
# Validate terraform-version if provided
|
||||
if "terraform-version" in inputs:
|
||||
value = inputs["terraform-version"]
|
||||
# Validate terraform-version if provided (empty is OK - uses default)
|
||||
if inputs.get("terraform-version"):
|
||||
valid &= self.validate_with(
|
||||
self.version_validator,
|
||||
"validate_terraform_version",
|
||||
inputs["terraform-version"],
|
||||
"terraform-version",
|
||||
)
|
||||
|
||||
# Empty string is OK - uses default
|
||||
if value == "":
|
||||
pass # Allow empty, will use default
|
||||
elif value:
|
||||
result = self.version_validator.validate_terraform_version(
|
||||
value, "terraform-version"
|
||||
)
|
||||
|
||||
# Propagate errors from the version validator
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
|
||||
self.version_validator.clear_errors()
|
||||
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate token if provided
|
||||
if "token" in inputs:
|
||||
value = inputs["token"]
|
||||
if value == "":
|
||||
# Empty token is OK - uses default
|
||||
pass
|
||||
elif value:
|
||||
result = self.token_validator.validate_github_token(value, required=False)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
# Validate token if provided (empty is OK - uses default)
|
||||
if inputs.get("token"):
|
||||
valid &= self.validate_with(
|
||||
self.token_validator,
|
||||
"validate_github_token",
|
||||
inputs["token"],
|
||||
required=False,
|
||||
)
|
||||
|
||||
# Validate working-directory if provided
|
||||
if "working-directory" in inputs:
|
||||
value = inputs["working-directory"]
|
||||
if value:
|
||||
result = self.file_validator.validate_file_path(value, "working-directory")
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
if inputs.get("working-directory"):
|
||||
valid &= self.validate_with(
|
||||
self.file_validator,
|
||||
"validate_file_path",
|
||||
inputs["working-directory"],
|
||||
"working-directory",
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@8fb52522ab00fe73cf181ef299e56066f0b2c8d8
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'terraform-lint-fix'
|
||||
token: ${{ inputs.token || github.token }}
|
||||
@@ -247,7 +247,7 @@ runs:
|
||||
|
||||
- name: Commit Fixes
|
||||
if: steps.check-files.outputs.found == 'true' && inputs.auto-fix == 'true' && fromJSON(steps.fix.outputs.fixed_count) > 0
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: apply terraform formatting fixes'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
@@ -256,7 +256,7 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: steps.check-files.outputs.found == 'true' && inputs.format == 'sarif'
|
||||
uses: github/codeql-action/upload-sarif@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ${{ env.VALIDATED_WORKING_DIR }}/reports/tflint.sarif
|
||||
category: terraform-lint
|
||||
|
||||
@@ -27,57 +27,45 @@ class CustomValidator(BaseValidator):
|
||||
self.boolean_validator = BooleanValidator()
|
||||
self.file_validator = FileValidator()
|
||||
|
||||
def validate_inputs(self, inputs: dict[str, str]) -> bool: # pylint: disable=too-many-branches
|
||||
def validate_inputs(self, inputs: dict[str, str]) -> bool:
|
||||
"""Validate validate-inputs action inputs."""
|
||||
valid = True
|
||||
|
||||
# Validate action/action-type input
|
||||
if "action" in inputs or "action-type" in inputs:
|
||||
action_input = inputs.get("action") or inputs.get("action-type", "")
|
||||
# Check for empty action
|
||||
action_key = self.get_key_variant(inputs, "action", "action-type")
|
||||
if action_key:
|
||||
action_input = inputs[action_key]
|
||||
if action_input == "":
|
||||
self.add_error("Action name cannot be empty")
|
||||
valid = False
|
||||
# Allow GitHub expressions
|
||||
elif action_input.startswith("${{") and action_input.endswith("}}"):
|
||||
pass # GitHub expressions are valid
|
||||
# Check for dangerous characters
|
||||
elif any(
|
||||
char in action_input
|
||||
for char in [";", "`", "$", "&", "|", ">", "<", "\n", "\r", "/"]
|
||||
):
|
||||
self.add_error(f"Invalid characters in action name: {action_input}")
|
||||
valid = False
|
||||
# Validate action name format (should be lowercase with hyphens or underscores)
|
||||
elif action_input and not re.match(r"^[a-z][a-z0-9_-]*[a-z0-9]$", action_input):
|
||||
self.add_error(f"Invalid action name format: {action_input}")
|
||||
valid = False
|
||||
elif not self.is_github_expression(action_input):
|
||||
# Only validate non-GitHub expressions
|
||||
if any(
|
||||
char in action_input
|
||||
for char in [";", "`", "$", "&", "|", ">", "<", "\n", "\r", "/"]
|
||||
):
|
||||
self.add_error(f"Invalid characters in action name: {action_input}")
|
||||
valid = False
|
||||
elif action_input and not re.match(r"^[a-z][a-z0-9_-]*[a-z0-9]$", action_input):
|
||||
self.add_error(f"Invalid action name format: {action_input}")
|
||||
valid = False
|
||||
|
||||
# Validate rules-file if provided
|
||||
if inputs.get("rules-file"):
|
||||
result = self.file_validator.validate_file_path(inputs["rules-file"], "rules-file")
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.file_validator, "validate_file_path", inputs["rules-file"], "rules-file"
|
||||
)
|
||||
|
||||
# Validate fail-on-error boolean
|
||||
if "fail-on-error" in inputs:
|
||||
value = inputs["fail-on-error"]
|
||||
# Reject empty string
|
||||
if value == "":
|
||||
self.add_error("fail-on-error cannot be empty")
|
||||
valid = False
|
||||
elif value:
|
||||
result = self.boolean_validator.validate_boolean(value, "fail-on-error")
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", value, "fail-on-error"
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -8,56 +8,62 @@ Centralized Python-based input validation for GitHub Actions with PCRE regex sup
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|---------------------|-------------------------------------------------------------------------------------|----------|---------|
|
||||
| `action` | <p>Action name to validate (alias for action-type)</p> | `false` | `""` |
|
||||
| `action-type` | <p>Type of action to validate (e.g., csharp-publish, docker-build, eslint-lint)</p> | `false` | `""` |
|
||||
| `rules-file` | <p>Path to validation rules file</p> | `false` | `""` |
|
||||
| `fail-on-error` | <p>Whether to fail on validation errors</p> | `false` | `true` |
|
||||
| `token` | <p>GitHub token for authentication</p> | `false` | `""` |
|
||||
| `namespace` | <p>Namespace/username for validation</p> | `false` | `""` |
|
||||
| `email` | <p>Email address for validation</p> | `false` | `""` |
|
||||
| `username` | <p>Username for validation</p> | `false` | `""` |
|
||||
| `dotnet-version` | <p>.NET version string</p> | `false` | `""` |
|
||||
| `terraform-version` | <p>Terraform version string</p> | `false` | `""` |
|
||||
| `tflint-version` | <p>TFLint version string</p> | `false` | `""` |
|
||||
| `node-version` | <p>Node.js version string</p> | `false` | `""` |
|
||||
| `force-version` | <p>Force version override</p> | `false` | `""` |
|
||||
| `default-version` | <p>Default version fallback</p> | `false` | `""` |
|
||||
| `image-name` | <p>Docker image name</p> | `false` | `""` |
|
||||
| `tag` | <p>Docker image tag</p> | `false` | `""` |
|
||||
| `architectures` | <p>Target architectures</p> | `false` | `""` |
|
||||
| `dockerfile` | <p>Dockerfile path</p> | `false` | `""` |
|
||||
| `context` | <p>Docker build context</p> | `false` | `""` |
|
||||
| `build-args` | <p>Docker build arguments</p> | `false` | `""` |
|
||||
| `buildx-version` | <p>Docker Buildx version</p> | `false` | `""` |
|
||||
| `max-retries` | <p>Maximum retry attempts</p> | `false` | `""` |
|
||||
| `image-quality` | <p>Image quality percentage</p> | `false` | `""` |
|
||||
| `png-quality` | <p>PNG quality percentage</p> | `false` | `""` |
|
||||
| `parallel-builds` | <p>Number of parallel builds</p> | `false` | `""` |
|
||||
| `days-before-stale` | <p>Number of days before marking as stale</p> | `false` | `""` |
|
||||
| `days-before-close` | <p>Number of days before closing stale items</p> | `false` | `""` |
|
||||
| `pre-commit-config` | <p>Pre-commit configuration file path</p> | `false` | `""` |
|
||||
| `base-branch` | <p>Base branch name</p> | `false` | `""` |
|
||||
| `dry-run` | <p>Dry run mode</p> | `false` | `""` |
|
||||
| `is_fiximus` | <p>Use Fiximus bot</p> | `false` | `""` |
|
||||
| `prefix` | <p>Release tag prefix</p> | `false` | `""` |
|
||||
| `language` | <p>Language to analyze (for CodeQL)</p> | `false` | `""` |
|
||||
| `queries` | <p>CodeQL queries to run</p> | `false` | `""` |
|
||||
| `packs` | <p>CodeQL query packs</p> | `false` | `""` |
|
||||
| `config-file` | <p>CodeQL configuration file path</p> | `false` | `""` |
|
||||
| `config` | <p>CodeQL configuration YAML string</p> | `false` | `""` |
|
||||
| `build-mode` | <p>Build mode for compiled languages</p> | `false` | `""` |
|
||||
| `source-root` | <p>Source code root directory</p> | `false` | `""` |
|
||||
| `category` | <p>Analysis category</p> | `false` | `""` |
|
||||
| `checkout-ref` | <p>Git reference to checkout</p> | `false` | `""` |
|
||||
| `working-directory` | <p>Working directory for analysis</p> | `false` | `""` |
|
||||
| `upload-results` | <p>Upload results to GitHub Security</p> | `false` | `""` |
|
||||
| `ram` | <p>Memory in MB for CodeQL</p> | `false` | `""` |
|
||||
| `threads` | <p>Number of threads for CodeQL</p> | `false` | `""` |
|
||||
| `output` | <p>Output path for SARIF results</p> | `false` | `""` |
|
||||
| `skip-queries` | <p>Skip running queries</p> | `false` | `""` |
|
||||
| `add-snippets` | <p>Add code snippets to SARIF</p> | `false` | `""` |
|
||||
| name | description | required | default |
|
||||
|----------------------|-------------------------------------------------------------------------------------|----------|---------|
|
||||
| `action` | <p>Action name to validate (alias for action-type)</p> | `false` | `""` |
|
||||
| `action-type` | <p>Type of action to validate (e.g., csharp-publish, docker-build, eslint-lint)</p> | `false` | `""` |
|
||||
| `rules-file` | <p>Path to validation rules file</p> | `false` | `""` |
|
||||
| `fail-on-error` | <p>Whether to fail on validation errors</p> | `false` | `true` |
|
||||
| `token` | <p>GitHub token for authentication</p> | `false` | `""` |
|
||||
| `namespace` | <p>Namespace/username for validation</p> | `false` | `""` |
|
||||
| `email` | <p>Email address for validation</p> | `false` | `""` |
|
||||
| `username` | <p>Username for validation</p> | `false` | `""` |
|
||||
| `dotnet-version` | <p>.NET version string</p> | `false` | `""` |
|
||||
| `terraform-version` | <p>Terraform version string</p> | `false` | `""` |
|
||||
| `tflint-version` | <p>TFLint version string</p> | `false` | `""` |
|
||||
| `node-version` | <p>Node.js version string</p> | `false` | `""` |
|
||||
| `force-version` | <p>Force version override</p> | `false` | `""` |
|
||||
| `default-version` | <p>Default version fallback</p> | `false` | `""` |
|
||||
| `image-name` | <p>Docker image name</p> | `false` | `""` |
|
||||
| `tag` | <p>Docker image tag</p> | `false` | `""` |
|
||||
| `architectures` | <p>Target architectures</p> | `false` | `""` |
|
||||
| `dockerfile` | <p>Dockerfile path</p> | `false` | `""` |
|
||||
| `context` | <p>Docker build context</p> | `false` | `""` |
|
||||
| `build-args` | <p>Docker build arguments</p> | `false` | `""` |
|
||||
| `buildx-version` | <p>Docker Buildx version</p> | `false` | `""` |
|
||||
| `max-retries` | <p>Maximum retry attempts</p> | `false` | `""` |
|
||||
| `image-quality` | <p>Image quality percentage</p> | `false` | `""` |
|
||||
| `png-quality` | <p>PNG quality percentage</p> | `false` | `""` |
|
||||
| `parallel-builds` | <p>Number of parallel builds</p> | `false` | `""` |
|
||||
| `days-before-stale` | <p>Number of days before marking as stale</p> | `false` | `""` |
|
||||
| `days-before-close` | <p>Number of days before closing stale items</p> | `false` | `""` |
|
||||
| `pre-commit-config` | <p>Pre-commit configuration file path</p> | `false` | `""` |
|
||||
| `base-branch` | <p>Base branch name</p> | `false` | `""` |
|
||||
| `dry-run` | <p>Dry run mode</p> | `false` | `""` |
|
||||
| `is_fiximus` | <p>Use Fiximus bot</p> | `false` | `""` |
|
||||
| `prefix` | <p>Release tag prefix</p> | `false` | `""` |
|
||||
| `language` | <p>Language to analyze (for CodeQL)</p> | `false` | `""` |
|
||||
| `queries` | <p>CodeQL queries to run</p> | `false` | `""` |
|
||||
| `packs` | <p>CodeQL query packs</p> | `false` | `""` |
|
||||
| `config-file` | <p>CodeQL configuration file path</p> | `false` | `""` |
|
||||
| `config` | <p>CodeQL configuration YAML string</p> | `false` | `""` |
|
||||
| `build-mode` | <p>Build mode for compiled languages</p> | `false` | `""` |
|
||||
| `source-root` | <p>Source code root directory</p> | `false` | `""` |
|
||||
| `category` | <p>Analysis category</p> | `false` | `""` |
|
||||
| `checkout-ref` | <p>Git reference to checkout</p> | `false` | `""` |
|
||||
| `working-directory` | <p>Working directory for analysis</p> | `false` | `""` |
|
||||
| `upload-results` | <p>Upload results to GitHub Security</p> | `false` | `""` |
|
||||
| `ram` | <p>Memory in MB for CodeQL</p> | `false` | `""` |
|
||||
| `threads` | <p>Number of threads for CodeQL</p> | `false` | `""` |
|
||||
| `output` | <p>Output path for SARIF results</p> | `false` | `""` |
|
||||
| `skip-queries` | <p>Skip running queries</p> | `false` | `""` |
|
||||
| `add-snippets` | <p>Add code snippets to SARIF</p> | `false` | `""` |
|
||||
| `gitleaks-license` | <p>Gitleaks license key</p> | `false` | `""` |
|
||||
| `gitleaks-config` | <p>Gitleaks configuration file path</p> | `false` | `""` |
|
||||
| `trivy-severity` | <p>Trivy severity levels to scan</p> | `false` | `""` |
|
||||
| `trivy-scanners` | <p>Trivy scanner types to run</p> | `false` | `""` |
|
||||
| `trivy-timeout` | <p>Trivy scan timeout</p> | `false` | `""` |
|
||||
| `actionlint-enabled` | <p>Enable actionlint scanning</p> | `false` | `""` |
|
||||
|
||||
### Outputs
|
||||
|
||||
@@ -365,4 +371,40 @@ This action is a `composite` action.
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
gitleaks-license:
|
||||
# Gitleaks license key
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
gitleaks-config:
|
||||
# Gitleaks configuration file path
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
trivy-severity:
|
||||
# Trivy severity levels to scan
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
trivy-scanners:
|
||||
# Trivy scanner types to run
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
trivy-timeout:
|
||||
# Trivy scan timeout
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
actionlint-enabled:
|
||||
# Enable actionlint scanning
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
```
|
||||
|
||||
@@ -173,6 +173,26 @@ inputs:
|
||||
description: 'Add code snippets to SARIF'
|
||||
required: false
|
||||
|
||||
# Security-scan specific inputs
|
||||
gitleaks-license:
|
||||
description: 'Gitleaks license key'
|
||||
required: false
|
||||
gitleaks-config:
|
||||
description: 'Gitleaks configuration file path'
|
||||
required: false
|
||||
trivy-severity:
|
||||
description: 'Trivy severity levels to scan'
|
||||
required: false
|
||||
trivy-scanners:
|
||||
description: 'Trivy scanner types to run'
|
||||
required: false
|
||||
trivy-timeout:
|
||||
description: 'Trivy scan timeout'
|
||||
required: false
|
||||
actionlint-enabled:
|
||||
description: 'Enable actionlint scanning'
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
validation-status:
|
||||
description: 'Overall validation status (success/failure)'
|
||||
@@ -193,6 +213,10 @@ outputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install Python dependencies
|
||||
shell: bash
|
||||
run: pip install pyyaml==6.0.3
|
||||
|
||||
- name: Validate Action Inputs with Python
|
||||
id: validate
|
||||
shell: bash
|
||||
|
||||
@@ -114,7 +114,7 @@ class ValidationRuleGenerator:
|
||||
"prefix": re.compile(r"\b(prefix|tag[_-]?prefix)\b", re.IGNORECASE),
|
||||
# Boolean patterns (broad, should be lower priority)
|
||||
"boolean": re.compile(
|
||||
r"\b(dry-?run|verbose|enable|disable|auto|skip|force|cache|provenance|sbom|scan|sign|fail[_-]?on[_-]?error|nightly)\b",
|
||||
r"\b(dry-?run|verbose|enable|disable|auto|skip|force|cache|provenance|sbom|scan|sign|push|fail[_-]?on[_-]?error|nightly)\b",
|
||||
re.IGNORECASE,
|
||||
),
|
||||
# File extensions pattern
|
||||
@@ -160,36 +160,36 @@ class ValidationRuleGenerator:
|
||||
"npm_token": "github_token",
|
||||
"password": "github_token",
|
||||
# Complex fields that should skip validation
|
||||
"build-args": None, # Can be empty
|
||||
"context": None, # Default handled
|
||||
"cache-from": None, # Complex cache syntax
|
||||
"cache-export": None, # Complex cache syntax
|
||||
"cache-import": None, # Complex cache syntax
|
||||
"build-contexts": None, # Complex syntax
|
||||
"secrets": None, # Complex syntax
|
||||
"platform-build-args": None, # JSON format
|
||||
"extensions": None, # PHP extensions list
|
||||
"tools": None, # PHP tools list
|
||||
"build-args": "key_value_list", # Docker build arguments (KEY=VALUE format)
|
||||
"context": "file_path", # Build context path
|
||||
"cache-from": "cache_config", # Docker cache configuration
|
||||
"cache-export": "cache_config", # Docker cache configuration
|
||||
"cache-import": "cache_config", # Docker cache configuration
|
||||
"build-contexts": "key_value_list", # Docker build contexts (KEY=VALUE format)
|
||||
"secrets": "key_value_list", # Docker secrets (KEY=VALUE format)
|
||||
"platform-build-args": "json_format", # JSON format for platform-specific args
|
||||
"extensions": "php_extensions", # PHP extensions list
|
||||
"tools": "linter_list", # PHP tools list - same pattern as linters
|
||||
"framework": "framework_mode", # PHP framework mode (auto, laravel, generic)
|
||||
"args": None, # Composer args
|
||||
"stability": None, # Composer stability
|
||||
"registry-url": "url", # URL format
|
||||
"scope": "scope", # NPM scope
|
||||
"plugins": None, # Prettier plugins
|
||||
"plugins": "linter_list", # Prettier plugins - same pattern as linters
|
||||
"file-extensions": "file_extensions", # File extension list
|
||||
"file-pattern": None, # Glob pattern
|
||||
"enable-linters": None, # Linter list
|
||||
"disable-linters": None, # Linter list
|
||||
"success-codes": None, # Exit code list
|
||||
"retry-codes": None, # Exit code list
|
||||
"ignore-paths": None, # Path patterns
|
||||
"key-files": None, # Cache key files
|
||||
"restore-keys": None, # Cache restore keys
|
||||
"env-vars": None, # Environment variables
|
||||
"file-pattern": "path_list", # Glob pattern for file paths
|
||||
"enable-linters": "linter_list", # Linter list
|
||||
"disable-linters": "linter_list", # Linter list
|
||||
"success-codes": "exit_code_list", # Exit code list
|
||||
"retry-codes": "exit_code_list", # Exit code list
|
||||
"ignore-paths": "path_list", # Path patterns to ignore
|
||||
"key-files": "path_list", # Cache key files (paths)
|
||||
"restore-keys": "path_list", # Cache restore keys (paths)
|
||||
"env-vars": "key_value_list", # Environment variables (KEY=VALUE format)
|
||||
# Action-specific fields that need special handling
|
||||
"type": None, # Cache type enum (npm, composer, go, etc.) - complex enum,
|
||||
# skip validation
|
||||
"paths": None, # File paths for caching (comma-separated) - complex format,
|
||||
# skip validation
|
||||
"paths": "path_list", # File paths for caching (comma-separated)
|
||||
"command": None, # Shell command - complex format, skip validation for safety
|
||||
"backoff-strategy": None, # Retry strategy enum - complex enum, skip validation
|
||||
"shell": None, # Shell type enum - simple enum, skip validation
|
||||
@@ -199,10 +199,13 @@ class ValidationRuleGenerator:
|
||||
"retry-delay": "numeric_range_1_300", # Retry delay should support higher values
|
||||
"max-warnings": "numeric_range_0_10000",
|
||||
# version-file-parser specific fields
|
||||
"language": None, # Simple enum (node, php, python, go, dotnet)
|
||||
"tool-versions-key": None, # Simple string (nodejs, python, php, golang, dotnet)
|
||||
"dockerfile-image": None, # Simple string (node, python, php, golang, dotnet)
|
||||
"validation-regex": "regex_pattern", # Regex pattern - validate for ReDoS
|
||||
# Docker network mode
|
||||
"network": "network_mode", # Docker network mode (host, none, default)
|
||||
# Language enum for version detection
|
||||
"language": "language_enum", # Language type (php, python, go, dotnet)
|
||||
}
|
||||
|
||||
def get_action_directories(self) -> list[str]:
|
||||
@@ -314,7 +317,6 @@ class ValidationRuleGenerator:
|
||||
"docker-publish": {
|
||||
"registry": "registry_enum",
|
||||
"cache-mode": "cache_mode",
|
||||
"platforms": None, # Skip validation - complex platform format
|
||||
},
|
||||
"docker-publish-hub": {
|
||||
"password": "docker_password",
|
||||
@@ -354,26 +356,28 @@ class ValidationRuleGenerator:
|
||||
"prettier-lint": {
|
||||
"mode": "mode_enum",
|
||||
},
|
||||
"security-scan": {
|
||||
"gitleaks-config": "file_path",
|
||||
"trivy-severity": "severity_enum",
|
||||
"trivy-scanners": "scanner_list",
|
||||
"trivy-timeout": "timeout_with_unit",
|
||||
"actionlint-enabled": "boolean",
|
||||
"token": "github_token",
|
||||
},
|
||||
}
|
||||
|
||||
if action_name in action_overrides:
|
||||
# Apply overrides for existing conventions
|
||||
overrides.update(
|
||||
{
|
||||
input_name: override_value
|
||||
for input_name, override_value in action_overrides[action_name].items()
|
||||
if input_name in conventions
|
||||
},
|
||||
)
|
||||
# Add missing inputs from overrides to conventions
|
||||
for input_name, override_value in action_overrides[action_name].items():
|
||||
if input_name not in conventions and input_name in action_data["inputs"]:
|
||||
if input_name in action_data["inputs"]:
|
||||
overrides[input_name] = override_value
|
||||
# Update conventions to match override (or set to None if skipped)
|
||||
conventions[input_name] = override_value
|
||||
|
||||
# Calculate statistics
|
||||
total_inputs = len(action_data["inputs"])
|
||||
validated_inputs = len(conventions)
|
||||
skipped_inputs = sum(1 for v in overrides.values() if v is None)
|
||||
validated_inputs = sum(1 for v in conventions.values() if v is not None)
|
||||
skipped_inputs = sum(1 for v in conventions.values() if v is None)
|
||||
coverage = round((validated_inputs / total_inputs) * 100) if total_inputs > 0 else 0
|
||||
|
||||
# Generate rules object with enhanced metadata
|
||||
@@ -432,8 +436,20 @@ class ValidationRuleGenerator:
|
||||
|
||||
# Use a custom yaml dumper to ensure proper indentation
|
||||
class CustomYamlDumper(yaml.SafeDumper):
|
||||
def increase_indent(self, flow: bool = False, *, indentless: bool = False) -> None: # noqa: FBT001, FBT002
|
||||
return super().increase_indent(flow, indentless=indentless)
|
||||
def increase_indent(self, flow: bool = False, *, indentless: bool = False) -> None: # noqa: FBT001, FBT002, ARG002 # type: ignore[override]
|
||||
return super().increase_indent(flow, False)
|
||||
|
||||
def choose_scalar_style(self):
|
||||
"""Choose appropriate quote style based on string content."""
|
||||
if hasattr(self, "event") and hasattr(self.event, "value") and self.event.value: # type: ignore[attr-defined]
|
||||
value = self.event.value # type: ignore[attr-defined]
|
||||
# Use literal block style for multiline strings
|
||||
if "\n" in value:
|
||||
return "|"
|
||||
# Use double quotes for strings with single quotes
|
||||
if "'" in value:
|
||||
return '"'
|
||||
return super().choose_scalar_style()
|
||||
|
||||
yaml_content = yaml.dump(
|
||||
rules,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -274,6 +274,71 @@ class TestDockerValidator:
|
||||
result = self.validator.validate_inputs(inputs)
|
||||
assert isinstance(result, bool)
|
||||
|
||||
def test_validate_registry_valid(self):
|
||||
"""Test registry enum validation with valid values."""
|
||||
valid_registries = [
|
||||
"dockerhub",
|
||||
"github",
|
||||
"both",
|
||||
]
|
||||
|
||||
for registry in valid_registries:
|
||||
self.validator.errors = []
|
||||
result = self.validator.validate_registry(registry)
|
||||
assert result is True, f"Should accept registry: {registry}"
|
||||
|
||||
def test_validate_registry_invalid(self):
|
||||
"""Test registry enum validation with invalid values."""
|
||||
invalid_registries = [
|
||||
"", # Empty
|
||||
" ", # Whitespace only
|
||||
"docker", # Wrong value (should be dockerhub)
|
||||
"hub", # Wrong value
|
||||
"ghcr", # Wrong value
|
||||
"gcr", # Wrong value
|
||||
"both,github", # Comma-separated not allowed
|
||||
"DOCKERHUB", # Uppercase
|
||||
"DockerHub", # Mixed case
|
||||
"docker hub", # Space
|
||||
"github.com", # Full URL not allowed
|
||||
]
|
||||
|
||||
for registry in invalid_registries:
|
||||
self.validator.errors = []
|
||||
result = self.validator.validate_registry(registry)
|
||||
assert result is False, f"Should reject registry: {registry}"
|
||||
|
||||
def test_validate_sbom_format_valid(self):
|
||||
"""Test SBOM format validation with valid values."""
|
||||
valid_formats = [
|
||||
"spdx-json",
|
||||
"cyclonedx-json",
|
||||
"", # Empty is optional
|
||||
]
|
||||
|
||||
for sbom_format in valid_formats:
|
||||
self.validator.errors = []
|
||||
result = self.validator.validate_sbom_format(sbom_format)
|
||||
assert result is True, f"Should accept SBOM format: {sbom_format}"
|
||||
|
||||
def test_validate_sbom_format_invalid(self):
|
||||
"""Test SBOM format validation with invalid values."""
|
||||
invalid_formats = [
|
||||
"spdx", # Missing -json suffix
|
||||
"cyclonedx", # Missing -json suffix
|
||||
"json", # Just json
|
||||
"spdx-xml", # Wrong format
|
||||
"cyclonedx-xml", # Wrong format
|
||||
"SPDX-JSON", # Uppercase
|
||||
"spdx json", # Space
|
||||
"invalid", # Invalid value
|
||||
]
|
||||
|
||||
for sbom_format in invalid_formats:
|
||||
self.validator.errors = []
|
||||
result = self.validator.validate_sbom_format(sbom_format)
|
||||
assert result is False, f"Should reject SBOM format: {sbom_format}"
|
||||
|
||||
def test_empty_values_handling(self):
|
||||
"""Test that empty values are handled appropriately."""
|
||||
# Some Docker fields might be required, others optional
|
||||
@@ -281,3 +346,5 @@ class TestDockerValidator:
|
||||
assert isinstance(self.validator.validate_docker_tag(""), bool)
|
||||
assert isinstance(self.validator.validate_architectures(""), bool)
|
||||
assert isinstance(self.validator.validate_prefix(""), bool)
|
||||
# Registry should reject empty values
|
||||
assert self.validator.validate_registry("") is False
|
||||
|
||||
@@ -151,7 +151,7 @@ class TestValidationRuleGenerator:
|
||||
generator = ValidationRuleGenerator()
|
||||
|
||||
# Test special cases from the mapping
|
||||
assert generator.detect_validation_type("build-args", {}) is None
|
||||
assert generator.detect_validation_type("build-args", {}) == "key_value_list"
|
||||
assert generator.detect_validation_type("version", {}) == "flexible_version"
|
||||
assert (
|
||||
generator.detect_validation_type("dotnet-version", {}) == "dotnet_version"
|
||||
|
||||
@@ -227,3 +227,82 @@ class BaseValidator(ABC):
|
||||
or ("${{" in value and "}}" in value)
|
||||
or (value.strip().startswith("${{") and value.strip().endswith("}}"))
|
||||
)
|
||||
|
||||
def propagate_errors(self, validator: BaseValidator, result: bool) -> bool:
|
||||
"""Copy errors from another validator and return result.
|
||||
|
||||
Args:
|
||||
validator: The validator to copy errors from
|
||||
result: The validation result to return
|
||||
|
||||
Returns:
|
||||
The result parameter unchanged
|
||||
"""
|
||||
for error in validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
validator.clear_errors()
|
||||
return result
|
||||
|
||||
def validate_with(
|
||||
self, validator: BaseValidator, method: str, *args: Any, **kwargs: Any
|
||||
) -> bool:
|
||||
"""Call validator method and propagate errors.
|
||||
|
||||
Args:
|
||||
validator: The validator instance to use
|
||||
method: The method name to call on the validator
|
||||
*args: Positional arguments to pass to the method
|
||||
**kwargs: Keyword arguments to pass to the method
|
||||
|
||||
Returns:
|
||||
The validation result
|
||||
"""
|
||||
result = getattr(validator, method)(*args, **kwargs)
|
||||
return self.propagate_errors(validator, result)
|
||||
|
||||
def validate_enum(
|
||||
self,
|
||||
value: str,
|
||||
name: str,
|
||||
valid_values: list[str],
|
||||
*,
|
||||
case_sensitive: bool = False,
|
||||
) -> bool:
|
||||
"""Validate value is one of allowed options.
|
||||
|
||||
Args:
|
||||
value: The value to validate
|
||||
name: The name of the input for error messages
|
||||
valid_values: List of allowed values
|
||||
case_sensitive: Whether comparison should be case sensitive
|
||||
|
||||
Returns:
|
||||
True if value is valid or empty/GitHub expression, False otherwise
|
||||
"""
|
||||
if not value or self.is_github_expression(value):
|
||||
return True
|
||||
check = value if case_sensitive else value.lower()
|
||||
allowed = valid_values if case_sensitive else [v.lower() for v in valid_values]
|
||||
if check not in allowed:
|
||||
self.add_error(f"Invalid {name}: {value}. Must be one of: {', '.join(valid_values)}")
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_key_variant(inputs: dict[str, str], *variants: str) -> str | None:
|
||||
"""Get first matching key variant from inputs.
|
||||
|
||||
Useful for inputs that may use underscore or hyphen variants.
|
||||
|
||||
Args:
|
||||
inputs: Dictionary of inputs to check
|
||||
*variants: Key variants to search for in order
|
||||
|
||||
Returns:
|
||||
The first matching key, or None if no match
|
||||
"""
|
||||
for key in variants:
|
||||
if key in inputs:
|
||||
return key
|
||||
return None
|
||||
|
||||
@@ -5,6 +5,7 @@ This validator automatically applies validation based on input naming convention
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -424,7 +425,10 @@ class ConventionBasedValidator(BaseValidator):
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
# Clear the module's errors after copying
|
||||
validator_module.errors = []
|
||||
if hasattr(validator_module, "clear_errors"):
|
||||
validator_module.clear_errors()
|
||||
else:
|
||||
validator_module.errors = []
|
||||
|
||||
return result
|
||||
# Method not found, skip validation
|
||||
@@ -556,13 +560,33 @@ class ConventionBasedValidator(BaseValidator):
|
||||
self._validator_modules["codeql"] = codeql.CodeQLValidator()
|
||||
return self._validator_modules["codeql"], f"validate_{validator_type}"
|
||||
|
||||
# PHP-specific validators
|
||||
if validator_type in ["php_extensions", "coverage_driver", "mode_enum"]:
|
||||
# Return self for PHP-specific validation methods
|
||||
# Convention-based validators
|
||||
if validator_type in [
|
||||
"php_extensions",
|
||||
"coverage_driver",
|
||||
"mode_enum",
|
||||
"binary_enum",
|
||||
"multi_value_enum",
|
||||
"report_format",
|
||||
"format_enum",
|
||||
"linter_list",
|
||||
"timeout_with_unit",
|
||||
"severity_enum",
|
||||
"scanner_list",
|
||||
"exit_code_list",
|
||||
"key_value_list",
|
||||
"path_list",
|
||||
"network_mode",
|
||||
"language_enum",
|
||||
"framework_mode",
|
||||
"json_format",
|
||||
"cache_config",
|
||||
]:
|
||||
# Return self for validation methods implemented in this class
|
||||
return self, f"_validate_{validator_type}"
|
||||
|
||||
# Package manager and report format validators
|
||||
if validator_type in ["package_manager_enum", "report_format"]:
|
||||
# Package manager validators
|
||||
if validator_type in ["package_manager_enum"]:
|
||||
# These could be in a separate module, but for now we'll put them in file validator
|
||||
if "file" not in self._validator_modules:
|
||||
from . import file
|
||||
@@ -592,9 +616,103 @@ class ConventionBasedValidator(BaseValidator):
|
||||
# Default range
|
||||
return 0, 100
|
||||
|
||||
def _validate_comma_separated_list(
|
||||
self,
|
||||
value: str,
|
||||
input_name: str,
|
||||
item_pattern: str | None = None,
|
||||
valid_items: list | None = None,
|
||||
check_injection: bool = False,
|
||||
item_name: str = "item",
|
||||
) -> bool:
|
||||
"""Validate comma-separated list of items (generic validator).
|
||||
|
||||
This is a generic validator that can be used for any comma-separated list
|
||||
with either pattern-based or enum-based validation.
|
||||
|
||||
Args:
|
||||
value: The comma-separated list value
|
||||
input_name: The input name for error messages
|
||||
item_pattern: Regex pattern each item must match
|
||||
(default: alphanumeric+hyphens+underscores)
|
||||
valid_items: Optional list of valid items for enum-style validation
|
||||
check_injection: Whether to check for shell injection patterns
|
||||
item_name: Descriptive name for items in error messages (e.g., "linter", "extension")
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
>>> # Pattern-based validation
|
||||
>>> validator._validate_comma_separated_list(
|
||||
... "gosec,govet", "enable-linters",
|
||||
... item_pattern=r'^[a-zA-Z0-9_-]+$',
|
||||
... item_name="linter"
|
||||
... )
|
||||
True
|
||||
|
||||
>>> # Enum-based validation
|
||||
>>> validator._validate_comma_separated_list(
|
||||
... "vuln,config", "scanners",
|
||||
... valid_items=["vuln", "config", "secret", "license"],
|
||||
... item_name="scanner"
|
||||
... )
|
||||
True
|
||||
"""
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Security check for injection patterns
|
||||
if check_injection and re.search(r"[;&|`$()]", value):
|
||||
self.add_error(
|
||||
f"Potential injection detected in {input_name}: {value}. "
|
||||
f"Avoid using shell metacharacters (;, &, |, `, $, parentheses)"
|
||||
)
|
||||
return False
|
||||
|
||||
# Split by comma and validate each item
|
||||
items = [item.strip() for item in value.split(",")]
|
||||
|
||||
for item in items:
|
||||
if not item: # Empty after strip
|
||||
self.add_error(f"Invalid {input_name}: {value}. Contains empty {item_name}")
|
||||
return False
|
||||
|
||||
# Enum-based validation (if valid_items provided)
|
||||
if valid_items is not None:
|
||||
if item not in valid_items:
|
||||
self.add_error(
|
||||
f"Invalid {item_name} '{item}' in {input_name}. "
|
||||
f"Must be one of: {', '.join(valid_items)}"
|
||||
)
|
||||
return False
|
||||
|
||||
# Pattern-based validation (if no valid_items and pattern provided)
|
||||
elif item_pattern is not None:
|
||||
if not re.match(item_pattern, item):
|
||||
self.add_error(
|
||||
f"Invalid {item_name} '{item}' in {input_name}. "
|
||||
f"Must match pattern: alphanumeric with hyphens/underscores"
|
||||
)
|
||||
return False
|
||||
|
||||
# Default pattern if neither valid_items nor item_pattern provided
|
||||
elif not re.match(r"^[a-zA-Z0-9_-]+$", item):
|
||||
self.add_error(
|
||||
f"Invalid {item_name} '{item}' in {input_name}. "
|
||||
f"Must be alphanumeric with hyphens/underscores"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _validate_php_extensions(self, value: str, input_name: str) -> bool:
|
||||
"""Validate PHP extensions format.
|
||||
|
||||
Wrapper for comma-separated list validator with PHP extension-specific rules.
|
||||
Allows alphanumeric characters, underscores, and spaces.
|
||||
Checks for shell injection patterns.
|
||||
|
||||
Args:
|
||||
value: The extensions value (comma-separated list)
|
||||
input_name: The input name for error messages
|
||||
@@ -602,59 +720,727 @@ class ConventionBasedValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
import re
|
||||
return self._validate_comma_separated_list(
|
||||
value,
|
||||
input_name,
|
||||
item_pattern=r"^[a-zA-Z0-9_\s]+$",
|
||||
check_injection=True,
|
||||
item_name="extension",
|
||||
)
|
||||
|
||||
if not value:
|
||||
return True
|
||||
def _validate_binary_enum(
|
||||
self,
|
||||
value: str,
|
||||
input_name: str,
|
||||
valid_values: list | None = None,
|
||||
case_sensitive: bool = True,
|
||||
) -> bool:
|
||||
"""Validate binary enum (two-value choice) (generic validator).
|
||||
|
||||
# Check for injection patterns
|
||||
if re.search(r"[;&|`$()@#]", value):
|
||||
self.add_error(f"Potential injection detected in {input_name}: {value}")
|
||||
This is a generic validator for two-value enums (e.g., check/fix, enabled/disabled).
|
||||
|
||||
Args:
|
||||
value: The enum value
|
||||
input_name: The input name for error messages
|
||||
valid_values: List of exactly 2 valid values (default: ["check", "fix"])
|
||||
case_sensitive: Whether validation is case-sensitive (default: True)
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
>>> # Default check/fix mode
|
||||
>>> validator._validate_binary_enum("check", "mode")
|
||||
True
|
||||
|
||||
>>> # Custom binary enum
|
||||
>>> validator._validate_binary_enum(
|
||||
... "enabled", "status",
|
||||
... valid_values=["enabled", "disabled"]
|
||||
... )
|
||||
True
|
||||
"""
|
||||
if valid_values is None:
|
||||
valid_values = ["check", "fix"]
|
||||
|
||||
if len(valid_values) != 2:
|
||||
raise ValueError(
|
||||
f"Binary enum requires exactly 2 valid values, got {len(valid_values)}"
|
||||
)
|
||||
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Case-insensitive comparison if needed
|
||||
if not case_sensitive:
|
||||
value_lower = value.lower()
|
||||
valid_values_lower = [v.lower() for v in valid_values]
|
||||
if value_lower not in valid_values_lower:
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Must be one of: {', '.join(valid_values)}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
if value not in valid_values:
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Must be one of: {', '.join(valid_values)}"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _validate_format_enum(
|
||||
self,
|
||||
value: str,
|
||||
input_name: str,
|
||||
valid_formats: list | None = None,
|
||||
allow_custom: bool = False,
|
||||
) -> bool:
|
||||
"""Validate output format enum (generic validator).
|
||||
|
||||
Generic validator for tool output formats (SARIF, JSON, XML, etc.).
|
||||
Supports common formats across linting/analysis tools.
|
||||
|
||||
Args:
|
||||
value: The format value
|
||||
input_name: The input name for error messages
|
||||
valid_formats: List of valid formats (default: comprehensive list)
|
||||
allow_custom: Whether to allow formats not in the predefined list (default: False)
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
>>> # Default comprehensive format list
|
||||
>>> validator._validate_format_enum("json", "format")
|
||||
True
|
||||
|
||||
>>> # Tool-specific format list
|
||||
>>> validator._validate_format_enum(
|
||||
... "sarif", "output-format",
|
||||
... valid_formats=["json", "sarif", "text"]
|
||||
... )
|
||||
True
|
||||
"""
|
||||
if valid_formats is None:
|
||||
# Comprehensive list of common formats across all tools
|
||||
valid_formats = [
|
||||
"checkstyle",
|
||||
"colored-line-number",
|
||||
"compact",
|
||||
"github-actions",
|
||||
"html",
|
||||
"json",
|
||||
"junit",
|
||||
"junit-xml",
|
||||
"line-number",
|
||||
"sarif",
|
||||
"stylish",
|
||||
"tab",
|
||||
"teamcity",
|
||||
"xml",
|
||||
]
|
||||
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Check if format is valid
|
||||
if value not in valid_formats and not allow_custom:
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Must be one of: {', '.join(valid_formats)}"
|
||||
)
|
||||
return False
|
||||
|
||||
# Check format - should be alphanumeric, underscores, commas, spaces only
|
||||
if not re.match(r"^[a-zA-Z0-9_,\s]+$", value):
|
||||
self.add_error(f"Invalid format for {input_name}: {value}")
|
||||
return False
|
||||
return True
|
||||
|
||||
def _validate_multi_value_enum(
|
||||
self,
|
||||
value: str,
|
||||
input_name: str,
|
||||
valid_values: list | None = None,
|
||||
case_sensitive: bool = True,
|
||||
min_values: int = 2,
|
||||
max_values: int = 10,
|
||||
) -> bool:
|
||||
"""Validate multi-value enum (2-10 value choice) (generic validator).
|
||||
|
||||
Generic validator for enums with 2-10 predefined values.
|
||||
For exactly 2 values, use _validate_binary_enum instead.
|
||||
|
||||
Args:
|
||||
value: The enum value
|
||||
input_name: The input name for error messages
|
||||
valid_values: List of valid values (2-10 items required)
|
||||
case_sensitive: Whether validation is case-sensitive (default: True)
|
||||
min_values: Minimum number of valid values (default: 2)
|
||||
max_values: Maximum number of valid values (default: 10)
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
>>> # Framework selection (3 values)
|
||||
>>> validator._validate_multi_value_enum(
|
||||
... "laravel", "framework",
|
||||
... valid_values=["auto", "laravel", "generic"]
|
||||
... )
|
||||
True
|
||||
|
||||
>>> # Language selection (4 values)
|
||||
>>> validator._validate_multi_value_enum(
|
||||
... "python", "language",
|
||||
... valid_values=["php", "python", "go", "dotnet"]
|
||||
... )
|
||||
True
|
||||
"""
|
||||
if valid_values is None:
|
||||
raise ValueError("valid_values is required for multi_value_enum validator")
|
||||
|
||||
# Validate valid_values count
|
||||
if len(valid_values) < min_values:
|
||||
msg = f"Multi-value enum needs >= {min_values} values, got {len(valid_values)}"
|
||||
raise ValueError(msg)
|
||||
|
||||
if len(valid_values) > max_values:
|
||||
msg = f"Multi-value enum allows <= {max_values} values, got {len(valid_values)}"
|
||||
raise ValueError(msg)
|
||||
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Case-insensitive comparison if needed
|
||||
if not case_sensitive:
|
||||
value_lower = value.lower()
|
||||
valid_values_lower = [v.lower() for v in valid_values]
|
||||
if value_lower not in valid_values_lower:
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Must be one of: {', '.join(valid_values)}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
if value not in valid_values:
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Must be one of: {', '.join(valid_values)}"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _validate_coverage_driver(self, value: str, input_name: str) -> bool:
|
||||
"""Validate coverage driver enum.
|
||||
|
||||
Wrapper for multi_value_enum validator with PHP coverage driver options.
|
||||
|
||||
Args:
|
||||
value: The coverage driver value
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
Valid: "xdebug", "pcov", "xdebug3", "none", ""
|
||||
Invalid: "xdebug2", "XDEBUG", "coverage"
|
||||
"""
|
||||
valid_drivers = ["none", "xdebug", "pcov", "xdebug3"]
|
||||
|
||||
if value and value not in valid_drivers:
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Must be one of: {', '.join(valid_drivers)}"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
return self._validate_multi_value_enum(
|
||||
value,
|
||||
input_name,
|
||||
valid_values=["none", "xdebug", "pcov", "xdebug3"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
def _validate_mode_enum(self, value: str, input_name: str) -> bool:
|
||||
"""Validate mode enum for linting actions.
|
||||
|
||||
Wrapper for binary_enum validator with check/fix modes.
|
||||
|
||||
Args:
|
||||
value: The mode value
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
valid_modes = ["check", "fix"]
|
||||
|
||||
if value and value not in valid_modes:
|
||||
Examples:
|
||||
Valid: "check", "fix", ""
|
||||
Invalid: "invalid", "CHECK", "Fix"
|
||||
"""
|
||||
return self._validate_binary_enum(
|
||||
value,
|
||||
input_name,
|
||||
valid_values=["check", "fix"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
def _validate_report_format(self, value: str, input_name: str) -> bool:
|
||||
"""Validate report format for linting/analysis actions.
|
||||
|
||||
Wrapper for format_enum validator with comprehensive format list.
|
||||
Supports multiple report formats used across different tools.
|
||||
|
||||
Args:
|
||||
value: The report format value
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
Valid: "json", "sarif", "checkstyle", "github-actions", ""
|
||||
Invalid: "invalid", "txt", "pdf"
|
||||
"""
|
||||
return self._validate_format_enum(value, input_name)
|
||||
|
||||
def _validate_linter_list(self, value: str, input_name: str) -> bool:
|
||||
"""Validate comma-separated list of linter names.
|
||||
|
||||
Wrapper for comma-separated list validator with linter-specific rules.
|
||||
Allows alphanumeric characters, hyphens, and underscores.
|
||||
|
||||
Args:
|
||||
value: The linter list value
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
Valid: "gosec,govet,staticcheck", "errcheck"
|
||||
Invalid: "gosec,,govet", "invalid linter", "linter@123"
|
||||
"""
|
||||
return self._validate_comma_separated_list(
|
||||
value,
|
||||
input_name,
|
||||
item_pattern=r"^[a-zA-Z0-9_-]+$",
|
||||
item_name="linter",
|
||||
)
|
||||
|
||||
def _validate_timeout_with_unit(self, value: str, input_name: str) -> bool:
|
||||
"""Validate timeout duration with unit (Go duration format).
|
||||
|
||||
Args:
|
||||
value: The timeout value
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Go duration format: number + unit (ns, us/µs, ms, s, m, h)
|
||||
pattern = r"^[0-9]+(ns|us|µs|ms|s|m|h)$"
|
||||
|
||||
if not re.match(pattern, value):
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Must be one of: {', '.join(valid_modes)}"
|
||||
f"Invalid {input_name}: {value}. Expected format: number with unit "
|
||||
"(e.g., 5m, 30s, 1h, 500ms)"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _validate_severity_enum(self, value: str, input_name: str) -> bool:
|
||||
"""Validate severity levels enum (generalized).
|
||||
|
||||
Generic validator for security tool severity levels.
|
||||
Supports common severity formats used by various security tools.
|
||||
|
||||
Default levels: UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL (Trivy/CVSSv3 style)
|
||||
Case-sensitive by default.
|
||||
|
||||
Args:
|
||||
value: The severity value (comma-separated for multiple levels)
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Standard severity levels (Trivy/CVSSv3/OWASP compatible)
|
||||
# Can be extended for specific tools by creating tool-specific validators
|
||||
valid_severities = ["UNKNOWN", "LOW", "MEDIUM", "HIGH", "CRITICAL"]
|
||||
|
||||
# Split by comma and validate each severity
|
||||
severities = [s.strip() for s in value.split(",")]
|
||||
|
||||
for severity in severities:
|
||||
if not severity: # Empty after strip
|
||||
self.add_error(f"Invalid {input_name}: {value}. Contains empty severity level")
|
||||
return False
|
||||
|
||||
# Case-sensitive validation
|
||||
if severity not in valid_severities:
|
||||
self.add_error(
|
||||
f"Invalid {input_name}: {value}. Severity '{severity}' is not valid. "
|
||||
f"Must be one of: {', '.join(valid_severities)}"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _validate_scanner_list(self, value: str, input_name: str) -> bool:
|
||||
"""Validate comma-separated list of scanner types (for Trivy).
|
||||
|
||||
Wrapper for comma-separated list validator with Trivy scanner enum validation.
|
||||
Supports: vuln, config, secret, license
|
||||
|
||||
Args:
|
||||
value: The scanner list value (comma-separated)
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
Valid: "vuln,config,secret", "vuln", "config,license"
|
||||
Invalid: "invalid", "vuln,invalid,config", "vuln,,config"
|
||||
"""
|
||||
return self._validate_comma_separated_list(
|
||||
value,
|
||||
input_name,
|
||||
valid_items=["vuln", "config", "secret", "license"],
|
||||
item_name="scanner",
|
||||
)
|
||||
|
||||
def _validate_exit_code_list(self, value: str, input_name: str) -> bool:
|
||||
"""Validate comma-separated list of exit codes.
|
||||
|
||||
Validates Unix/Linux exit codes (0-255) in comma-separated format.
|
||||
Used for retry logic, success codes, and error handling.
|
||||
|
||||
Args:
|
||||
value: The exit code list value (comma-separated integers)
|
||||
input_name: The input name for error messages
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
Valid: "0", "0,1,2", "5,10,15", "0,130", ""
|
||||
Invalid: "256", "0,256", "-1", "0,abc", "0,,1"
|
||||
"""
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Split by comma and validate each exit code
|
||||
codes = [code.strip() for code in value.split(",")]
|
||||
|
||||
for code in codes:
|
||||
if not code: # Empty after strip
|
||||
self.add_error(f"Invalid {input_name}: {value}. Contains empty exit code")
|
||||
return False
|
||||
|
||||
# Check if code is numeric
|
||||
if not re.match(r"^[0-9]+$", code):
|
||||
self.add_error(
|
||||
f"Invalid exit code '{code}' in {input_name}. "
|
||||
f"Exit codes must be integers (0-255)"
|
||||
)
|
||||
return False
|
||||
|
||||
# Validate range (0-255 for Unix/Linux exit codes)
|
||||
code_int = int(code)
|
||||
if code_int < 0 or code_int > 255:
|
||||
self.add_error(
|
||||
f"Invalid exit code '{code}' in {input_name}. Exit codes must be in range 0-255"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _validate_key_value_list(
|
||||
self,
|
||||
value: str,
|
||||
input_name: str,
|
||||
key_pattern: str | None = None,
|
||||
check_injection: bool = True,
|
||||
) -> bool:
|
||||
"""Validate comma-separated list of key-value pairs (generic validator).
|
||||
|
||||
Validates KEY=VALUE,KEY2=VALUE2 format commonly used for Docker build-args,
|
||||
environment variables, and other configuration parameters.
|
||||
|
||||
Args:
|
||||
value: The key-value list value (comma-separated KEY=VALUE pairs)
|
||||
input_name: The input name for error messages
|
||||
key_pattern: Regex pattern for key validation
|
||||
(default: alphanumeric+underscores+hyphens)
|
||||
check_injection: Whether to check for shell injection patterns
|
||||
in values (default: True)
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
|
||||
Examples:
|
||||
Valid: "KEY=value", "KEY1=value1,KEY2=value2", "BUILD_ARG=hello", ""
|
||||
Invalid: "KEY", "=value", "KEY=", "KEY=value,", "KEY=val;whoami"
|
||||
"""
|
||||
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
if key_pattern is None:
|
||||
# Default: alphanumeric, underscores, hyphens (common for env vars and build args)
|
||||
key_pattern = r"^[a-zA-Z0-9_-]+$"
|
||||
|
||||
# Security check for injection patterns in the entire value
|
||||
if check_injection and re.search(r"[;&|`$()]", value):
|
||||
self.add_error(
|
||||
f"Potential injection detected in {input_name}: {value}. "
|
||||
f"Avoid using shell metacharacters (;, &, |, `, $, parentheses)"
|
||||
)
|
||||
return False
|
||||
|
||||
# Split by comma and validate each key-value pair
|
||||
pairs = [pair.strip() for pair in value.split(",")]
|
||||
|
||||
for pair in pairs:
|
||||
if not pair: # Empty after strip
|
||||
self.add_error(f"Invalid {input_name}: {value}. Contains empty key-value pair")
|
||||
return False
|
||||
|
||||
# Check for KEY=VALUE format
|
||||
if "=" not in pair:
|
||||
self.add_error(
|
||||
f"Invalid key-value pair '{pair}' in {input_name}. Expected format: KEY=VALUE"
|
||||
)
|
||||
return False
|
||||
|
||||
# Split by first = only (value may contain =)
|
||||
parts = pair.split("=", 1)
|
||||
key = parts[0].strip()
|
||||
|
||||
# Validate key is not empty
|
||||
if not key:
|
||||
self.add_error(
|
||||
f"Invalid key-value pair '{pair}' in {input_name}. Key cannot be empty"
|
||||
)
|
||||
return False
|
||||
|
||||
# Validate key pattern
|
||||
if not re.match(key_pattern, key):
|
||||
self.add_error(
|
||||
f"Invalid key '{key}' in {input_name}. "
|
||||
f"Keys must be alphanumeric with underscores/hyphens"
|
||||
)
|
||||
return False
|
||||
|
||||
# Note: Value can be empty (KEY=) - this is valid for some use cases
|
||||
# Value validation is optional and handled by the check_injection flag above
|
||||
|
||||
return True
|
||||
|
||||
def _validate_path_list(
|
||||
self,
|
||||
value: str,
|
||||
input_name: str,
|
||||
allow_glob: bool = True,
|
||||
check_injection: bool = True,
|
||||
) -> bool:
|
||||
"""Validate comma-separated list of file paths or glob patterns (generic validator).
|
||||
|
||||
Validates file paths and glob patterns commonly used for ignore-paths,
|
||||
restore-keys, file-pattern, and other path-based inputs.
|
||||
|
||||
Args:
|
||||
value: The path list to validate
|
||||
input_name: Name of the input being validated
|
||||
allow_glob: Whether to allow glob patterns (*, **, ?, [])
|
||||
check_injection: Whether to check for shell injection patterns
|
||||
|
||||
Examples:
|
||||
Valid: "*.js", "src/**/*.ts", "dist/,build/", ".github/workflows/*", ""
|
||||
Invalid: "../etc/passwd", "file;rm -rf /", "path|whoami"
|
||||
|
||||
Returns:
|
||||
bool: True if valid, False otherwise
|
||||
"""
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Security check for injection patterns
|
||||
if check_injection and re.search(r"[;&|`$()]", value):
|
||||
self.add_error(
|
||||
f"Potential injection detected in {input_name}: {value}. "
|
||||
f"Avoid using shell metacharacters (;, &, |, `, $, parentheses)"
|
||||
)
|
||||
return False
|
||||
|
||||
# Split by comma and validate each path
|
||||
paths = [path.strip() for path in value.split(",")]
|
||||
|
||||
for path in paths:
|
||||
if not path: # Empty after strip
|
||||
self.add_error(f"Invalid {input_name}: {value}. Contains empty path")
|
||||
return False
|
||||
|
||||
# Check for path traversal attempts
|
||||
if "../" in path or "/.." in path or path.startswith(".."):
|
||||
self.add_error(
|
||||
f"Path traversal detected in {input_name}: {path}. Avoid using '..' in paths"
|
||||
)
|
||||
return False
|
||||
|
||||
# Validate glob patterns if allowed
|
||||
if allow_glob:
|
||||
# Glob patterns are valid: *, **, ?, [], {}
|
||||
# Check for valid glob characters
|
||||
glob_pattern = r"^[a-zA-Z0-9_\-./\*\?\[\]\{\},@~+]+$"
|
||||
if not re.match(glob_pattern, path):
|
||||
self.add_error(
|
||||
f"Invalid path '{path}' in {input_name}. "
|
||||
f"Paths may contain alphanumeric characters, hyphens, underscores, "
|
||||
f"slashes, and glob patterns (*, **, ?, [], {{}})"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
# No glob patterns allowed - only alphanumeric, hyphens, underscores, slashes
|
||||
path_pattern = r"^[a-zA-Z0-9_\-./,@~+]+$"
|
||||
if not re.match(path_pattern, path):
|
||||
self.add_error(
|
||||
f"Invalid path '{path}' in {input_name}. "
|
||||
f"Paths may only contain alphanumeric characters, hyphens, "
|
||||
f"underscores, and slashes"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _validate_network_mode(self, value: str, input_name: str) -> bool:
|
||||
"""Validate Docker network mode enum.
|
||||
|
||||
Wrapper for multi_value_enum validator with Docker network mode options.
|
||||
|
||||
Examples:
|
||||
Valid: "host", "none", "default", ""
|
||||
Invalid: "bridge", "NONE", "custom"
|
||||
|
||||
Returns:
|
||||
bool: True if valid, False otherwise
|
||||
"""
|
||||
return self._validate_multi_value_enum(
|
||||
value,
|
||||
input_name,
|
||||
valid_values=["host", "none", "default"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
def _validate_language_enum(self, value: str, input_name: str) -> bool:
|
||||
"""Validate language enum for version detection.
|
||||
|
||||
Wrapper for multi_value_enum validator with supported language options.
|
||||
|
||||
Examples:
|
||||
Valid: "php", "python", "go", "dotnet", ""
|
||||
Invalid: "node", "ruby", "PHP"
|
||||
|
||||
Returns:
|
||||
bool: True if valid, False otherwise
|
||||
"""
|
||||
return self._validate_multi_value_enum(
|
||||
value,
|
||||
input_name,
|
||||
valid_values=["php", "python", "go", "dotnet"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
def _validate_framework_mode(self, value: str, input_name: str) -> bool:
|
||||
"""Validate PHP framework detection mode.
|
||||
|
||||
Wrapper for multi_value_enum validator with framework mode options.
|
||||
|
||||
Examples:
|
||||
Valid: "auto", "laravel", "generic", ""
|
||||
Invalid: "symfony", "Auto", "LARAVEL"
|
||||
|
||||
Returns:
|
||||
bool: True if valid, False otherwise
|
||||
"""
|
||||
return self._validate_multi_value_enum(
|
||||
value,
|
||||
input_name,
|
||||
valid_values=["auto", "laravel", "generic"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
def _validate_json_format(self, value: str, input_name: str) -> bool:
|
||||
"""Validate JSON format string.
|
||||
|
||||
Validates that input is valid JSON. Used for structured configuration
|
||||
data like platform-specific build arguments.
|
||||
|
||||
Examples:
|
||||
Valid: '{"key":"value"}', '[]', '{"platforms":["linux/amd64"]}', ""
|
||||
Invalid: '{invalid}', 'not json', '{key:value}'
|
||||
|
||||
Returns:
|
||||
bool: True if valid, False otherwise
|
||||
"""
|
||||
import json
|
||||
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
try:
|
||||
json.loads(value)
|
||||
return True
|
||||
except json.JSONDecodeError as e:
|
||||
self.add_error(f"Invalid JSON format in {input_name}: {value}. Error: {str(e)}")
|
||||
return False
|
||||
except Exception as e:
|
||||
self.add_error(f"Failed to validate JSON in {input_name}: {str(e)}")
|
||||
return False
|
||||
|
||||
def _validate_cache_config(self, value: str, input_name: str) -> bool:
|
||||
"""Validate Docker BuildKit cache configuration.
|
||||
|
||||
Validates Docker cache export/import configuration format.
|
||||
Common formats: type=registry,ref=..., type=local,dest=..., type=gha
|
||||
|
||||
Examples:
|
||||
Valid: "type=registry,ref=user/repo:cache", "type=local,dest=/tmp/cache",
|
||||
"type=gha", "type=inline", ""
|
||||
Invalid: "invalid", "type=", "registry", "type=unknown"
|
||||
|
||||
Returns:
|
||||
bool: True if valid, False otherwise
|
||||
"""
|
||||
if not value or value.strip() == "":
|
||||
return True # Optional
|
||||
|
||||
# Check basic format: type=value[,key=value,...]
|
||||
if not re.match(r"^type=[a-z0-9-]+", value):
|
||||
self.add_error(
|
||||
f"Invalid cache config in {input_name}: {value}. "
|
||||
f"Must start with 'type=<cache-type>'"
|
||||
)
|
||||
return False
|
||||
|
||||
# Valid cache types
|
||||
valid_types = ["registry", "local", "gha", "inline", "s3", "azblob", "oci"]
|
||||
|
||||
# Extract type
|
||||
type_match = re.match(r"^type=([a-z0-9-]+)", value)
|
||||
if type_match:
|
||||
cache_type = type_match.group(1)
|
||||
if cache_type not in valid_types:
|
||||
self.add_error(
|
||||
f"Invalid cache type '{cache_type}' in {input_name}. "
|
||||
f"Valid types: {', '.join(valid_types)}"
|
||||
)
|
||||
return False
|
||||
|
||||
# Validate key=value pairs format
|
||||
parts = value.split(",")
|
||||
for part in parts:
|
||||
if "=" not in part:
|
||||
self.add_error(
|
||||
f"Invalid cache config format in {input_name}: {value}. "
|
||||
f"Each part must be in 'key=value' format"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -12,7 +12,8 @@ class TokenValidator(BaseValidator):
|
||||
"""Validator for various authentication tokens."""
|
||||
|
||||
# Token patterns for different token types (based on official GitHub documentation)
|
||||
# https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/about-authentication-to-github#githubs-token-formats
|
||||
# See: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/
|
||||
# about-authentication-to-github#githubs-token-formats
|
||||
# Note: The lengths include the prefix
|
||||
TOKEN_PATTERNS: ClassVar[dict[str, str]] = {
|
||||
# Personal access token (classic):
|
||||
|
||||
Reference in New Issue
Block a user