mirror of
https://github.com/ivuorinen/actions.git
synced 2026-01-26 11:34:00 +00:00
Compare commits
68 Commits
25.11.19
...
v2026.01.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
56ff9a511c | ||
|
|
81310f9bd7 | ||
|
|
95b8856c3f | ||
|
|
e69ddbc1e2 | ||
|
|
28e81adc2b | ||
|
|
fb25736f7e | ||
| 54886c3fd5 | |||
|
|
fd030b418f | ||
| 96c305c557 | |||
|
|
5b4e9c8e11 | ||
|
|
2d0bff84ad | ||
|
|
98f260793c | ||
|
|
09ae7517d6 | ||
|
|
61ebe619a8 | ||
|
|
a1d55ac125 | ||
|
|
db86bb2f0d | ||
|
|
5e7b2fbc11 | ||
|
|
43126631c2 | ||
|
|
f6ed49a6dd | ||
|
|
23ac5dbca3 | ||
|
|
a8031d3922 | ||
|
|
30149dd950 | ||
|
|
3a3cdcdefe | ||
|
|
7d28006a83 | ||
|
|
4008db6517 | ||
|
|
7aa206a02a | ||
|
|
8481bbb5cd | ||
|
|
4c0068e6e7 | ||
|
|
5cecfe7cbe | ||
|
|
0288a1c8b8 | ||
| 44a11e9773 | |||
|
|
a52399cf74 | ||
|
|
803165db8f | ||
|
|
d69ed9e999 | ||
|
|
8eea6f781b | ||
|
|
4889586a94 | ||
|
|
e02ca4d843 | ||
|
|
13ef0db9ba | ||
|
|
c366e99ee3 | ||
| fbbb487332 | |||
| abe24f8570 | |||
| 9aa16a8164 | |||
| e58465e5d3 | |||
| 9fe05efeec | |||
| 449669120c | |||
|
|
d9098ddead | ||
| f37d940c72 | |||
|
|
eea547998d | ||
|
|
49159fc895 | ||
|
|
89fd0f3627 | ||
|
|
83cf08ff76 | ||
|
|
90ab7c645c | ||
|
|
d05e898ea9 | ||
|
|
650ebb87b8 | ||
|
|
13316bd827 | ||
|
|
350fd30043 | ||
|
|
587853a9cd | ||
|
|
6cde6d088d | ||
| 5cc7373a22 | |||
|
|
8fb52522ab | ||
|
|
bcf49f55b5 | ||
|
|
060afb8871 | ||
|
|
a0ebb00853 | ||
|
|
227cf7f56f | ||
|
|
e28c56c7cf | ||
|
|
504debcb8d | ||
|
|
ed438428b2 | ||
| a88bb34369 |
@@ -17,12 +17,12 @@ runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@5a7eac68fb9809dea845d802897dc5c723910fa3 # v7.1.3
|
||||
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version-file: pyproject.toml
|
||||
|
||||
@@ -31,7 +31,7 @@ runs:
|
||||
run: uv sync --frozen
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24'
|
||||
cache: npm
|
||||
|
||||
16
.github/codeql/codeql-config.yml
vendored
16
.github/codeql/codeql-config.yml
vendored
@@ -15,3 +15,19 @@ paths-ignore:
|
||||
# Use security and quality query suite
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
|
||||
# Suppress specific false positives
|
||||
# These findings have been manually reviewed and determined to be false positives
|
||||
# with appropriate security controls in place
|
||||
query-filters:
|
||||
# docker-publish: Code injection in validated context
|
||||
# False positive: User input is validated and sanitized before use
|
||||
# - Only relative paths and trusted git URLs are allowed
|
||||
# - Absolute paths and arbitrary URLs are rejected
|
||||
# - Path traversal attempts are blocked
|
||||
# - Custom contexts require explicit opt-in via use-custom-context: true
|
||||
# - Wraps docker/build-push-action (trusted Docker-maintained action)
|
||||
# - Action is designed for trusted workflows only (documented in action.yml)
|
||||
- exclude:
|
||||
id: js/actions/code-injection
|
||||
kind: problem
|
||||
|
||||
1
.github/tag-changelog-config.js
vendored
1
.github/tag-changelog-config.js
vendored
@@ -1,6 +1,7 @@
|
||||
module.exports = {
|
||||
types: [
|
||||
{ types: ['feat', 'feature', 'Feat'], label: '🎉 New Features' },
|
||||
{ types: ['security'], label: '🔐 Security' },
|
||||
{ types: ['fix', 'bugfix', 'Fix'], label: '🐛 Bugfixes' },
|
||||
{ types: ['improvements', 'enhancement'], label: '🔨 Improvements' },
|
||||
{ types: ['perf'], label: '🏎️ Performance Improvements' },
|
||||
|
||||
208
.github/workflows/action-security.yml
vendored
208
.github/workflows/action-security.yml
vendored
@@ -39,212 +39,30 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check Required Configurations
|
||||
id: check-configs
|
||||
shell: sh
|
||||
run: |
|
||||
# Initialize all flags as false
|
||||
{
|
||||
echo "run_gitleaks=false"
|
||||
echo "run_trivy=true"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check Gitleaks configuration and license
|
||||
if [ -f ".gitleaks.toml" ] && [ -n "${{ secrets.GITLEAKS_LICENSE }}" ]; then
|
||||
echo "Gitleaks config and license found"
|
||||
printf '%s\n' "run_gitleaks=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "::warning::Gitleaks config or license missing - skipping Gitleaks scan"
|
||||
fi
|
||||
|
||||
- name: Run actionlint
|
||||
uses: raven-actions/actionlint@3a24062651993d40fed1019b58ac6fbdfbf276cc # v2.0.1
|
||||
- name: Run Security Scan
|
||||
id: security-scan
|
||||
uses: ./security-scan
|
||||
with:
|
||||
cache: true
|
||||
fail-on-error: true
|
||||
shellcheck: false
|
||||
|
||||
- name: Run Gitleaks
|
||||
if: steps.check-configs.outputs.run_gitleaks == 'true'
|
||||
uses: gitleaks/gitleaks-action@ff98106e4c7b2bc287b24eaf42907196329070c7 # v2.3.9
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITLEAKS_LICENSE: ${{ secrets.GITLEAKS_LICENSE }}
|
||||
with:
|
||||
config-path: .gitleaks.toml
|
||||
report-format: sarif
|
||||
report-path: gitleaks-report.sarif
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@a11da62073708815958ea6d84f5650c78a3ef85b # master
|
||||
with:
|
||||
scan-type: 'fs'
|
||||
scanners: 'vuln,config,secret'
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
timeout: '10m'
|
||||
|
||||
- name: Verify SARIF files
|
||||
id: verify-sarif
|
||||
shell: sh
|
||||
run: |
|
||||
# Initialize outputs
|
||||
{
|
||||
echo "has_trivy=false"
|
||||
echo "has_gitleaks=false"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check Trivy results
|
||||
if [ -f "trivy-results.sarif" ]; then
|
||||
if jq -e . </dev/null 2>&1 <"trivy-results.sarif"; then
|
||||
printf '%s\n' "has_trivy=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "::warning::Trivy SARIF file exists but is not valid JSON"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check Gitleaks results if it ran
|
||||
if [ "${{ steps.check-configs.outputs.run_gitleaks }}" = "true" ]; then
|
||||
if [ -f "gitleaks-report.sarif" ]; then
|
||||
if jq -e . </dev/null 2>&1 <"gitleaks-report.sarif"; then
|
||||
printf '%s\n' "has_gitleaks=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "::warning::Gitleaks SARIF file exists but is not valid JSON"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Upload Trivy results
|
||||
if: steps.verify-sarif.outputs.has_trivy == 'true'
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: 'trivy'
|
||||
|
||||
- name: Upload Gitleaks results
|
||||
if: steps.verify-sarif.outputs.has_gitleaks == 'true'
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
sarif_file: 'gitleaks-report.sarif'
|
||||
category: 'gitleaks'
|
||||
|
||||
- name: Archive security reports
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: security-reports-${{ github.run_id }}
|
||||
path: |
|
||||
${{ steps.verify-sarif.outputs.has_trivy == 'true' && 'trivy-results.sarif' || '' }}
|
||||
${{ steps.verify-sarif.outputs.has_gitleaks == 'true' && 'gitleaks-report.sarif' || '' }}
|
||||
retention-days: 30
|
||||
|
||||
- name: Analyze Results
|
||||
if: always()
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
try {
|
||||
let totalIssues = 0;
|
||||
let criticalIssues = 0;
|
||||
|
||||
const analyzeSarif = (file, tool) => {
|
||||
if (!fs.existsSync(file)) {
|
||||
console.log(`No results file found for ${tool}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const sarif = JSON.parse(fs.readFileSync(file, 'utf8'));
|
||||
return sarif.runs.reduce((acc, run) => {
|
||||
if (!run.results) return acc;
|
||||
|
||||
const critical = run.results.filter(r =>
|
||||
r.level === 'error' ||
|
||||
r.level === 'critical' ||
|
||||
(r.ruleId || '').toLowerCase().includes('critical')
|
||||
).length;
|
||||
|
||||
return {
|
||||
total: acc.total + run.results.length,
|
||||
critical: acc.critical + critical
|
||||
};
|
||||
}, { total: 0, critical: 0 });
|
||||
} catch (error) {
|
||||
console.log(`Error analyzing ${tool} results: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// Only analyze results from tools that ran successfully
|
||||
const results = {
|
||||
trivy: ${{ steps.verify-sarif.outputs.has_trivy }} ?
|
||||
analyzeSarif('trivy-results.sarif', 'trivy') : null,
|
||||
gitleaks: ${{ steps.verify-sarif.outputs.has_gitleaks }} ?
|
||||
analyzeSarif('gitleaks-report.sarif', 'gitleaks') : null
|
||||
};
|
||||
|
||||
// Aggregate results
|
||||
Object.entries(results).forEach(([tool, result]) => {
|
||||
if (result) {
|
||||
totalIssues += result.total;
|
||||
criticalIssues += result.critical;
|
||||
console.log(`${tool}: ${result.total} total, ${result.critical} critical issues`);
|
||||
}
|
||||
});
|
||||
|
||||
// Create summary
|
||||
const summary = `## Security Scan Summary
|
||||
|
||||
- Total Issues Found: ${totalIssues}
|
||||
- Critical Issues: ${criticalIssues}
|
||||
|
||||
### Tool Breakdown
|
||||
${Object.entries(results)
|
||||
.filter(([_, r]) => r)
|
||||
.map(([tool, r]) =>
|
||||
`- ${tool}: ${r.total} total, ${r.critical} critical`
|
||||
).join('\n')}
|
||||
|
||||
### Tools Run Status
|
||||
- Trivy: ${{ steps.verify-sarif.outputs.has_trivy }}
|
||||
- Gitleaks: ${{ steps.check-configs.outputs.run_gitleaks }}
|
||||
`;
|
||||
|
||||
// Set output
|
||||
core.setOutput('total_issues', totalIssues);
|
||||
core.setOutput('critical_issues', criticalIssues);
|
||||
|
||||
// Add job summary
|
||||
await core.summary
|
||||
.addRaw(summary)
|
||||
.write();
|
||||
|
||||
// Fail if critical issues found
|
||||
if (criticalIssues > 0) {
|
||||
core.setFailed(`Found ${criticalIssues} critical security issues`);
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(`Analysis failed: ${error.message}`);
|
||||
}
|
||||
gitleaks-license: ${{ secrets.GITLEAKS_LICENSE }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Notify on Critical Issues
|
||||
if: failure()
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
if: failure() && steps.security-scan.outputs.critical_issues != '0'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |-
|
||||
const { repo, owner } = context.repo;
|
||||
const critical = core.getInput('critical_issues');
|
||||
const critical = '${{ steps.security-scan.outputs.critical_issues }}';
|
||||
const total = '${{ steps.security-scan.outputs.total_issues }}';
|
||||
|
||||
const body = `🚨 Critical security issues found in GitHub Actions
|
||||
|
||||
${critical} critical security issues were found during the security scan.
|
||||
${critical} critical security issues (out of ${total} total) were found during the security scan.
|
||||
|
||||
### Scan Results
|
||||
- Trivy: ${{ steps.verify-sarif.outputs.has_trivy == 'true' && 'Completed' || 'Skipped/Failed' }}
|
||||
- Gitleaks: ${{ steps.check-configs.outputs.run_gitleaks == 'true' && 'Completed' || 'Skipped' }}
|
||||
- Actionlint: Completed
|
||||
- Trivy: ${{ steps.security-scan.outputs.has_trivy_results == 'true' && 'Completed' || 'Skipped/Failed' }}
|
||||
- Gitleaks: ${{ steps.security-scan.outputs.has_gitleaks_results == 'true' && 'Completed' || 'Skipped' }}
|
||||
|
||||
[View detailed scan results](https://github.com/${owner}/${repo}/actions/runs/${context.runId})
|
||||
|
||||
|
||||
4
.github/workflows/build-testing-image.yml
vendored
4
.github/workflows/build-testing-image.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/actions
|
||||
tags: |
|
||||
|
||||
1
.github/workflows/codeql-new.yml
vendored
1
.github/workflows/codeql-new.yml
vendored
@@ -42,4 +42,5 @@ jobs:
|
||||
with:
|
||||
language: ${{ matrix.language }}
|
||||
queries: security-and-quality
|
||||
config-file: .github/codeql/codeql-config.yml
|
||||
token: ${{ github.token }}
|
||||
|
||||
51
.github/workflows/codeql.yml
vendored
51
.github/workflows/codeql.yml
vendored
@@ -1,51 +0,0 @@
|
||||
---
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
|
||||
name: 'CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
schedule:
|
||||
- cron: '30 1 * * 0' # Run at 1:30 AM UTC every Sunday
|
||||
merge_group:
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language:
|
||||
- 'actions'
|
||||
- 'javascript'
|
||||
- 'python'
|
||||
|
||||
steps: # Add languages used in your actions
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
queries: security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
2
.github/workflows/issue-stats.yml
vendored
2
.github/workflows/issue-stats.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Run issue-metrics tool
|
||||
uses: github/issue-metrics@637a24e71b78bc10881e61972b19ea9ff736e14a # v3.25.2
|
||||
uses: github/issue-metrics@67526e7bd8100b870f10b1c120780a8375777b43 # v3.25.5
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SEARCH_QUERY: 'repo:ivuorinen/actions is:issue created:${{ env.last_month }} -reason:"not planned"'
|
||||
|
||||
39
.github/workflows/new-release.yml
vendored
39
.github/workflows/new-release.yml
vendored
@@ -22,25 +22,28 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
|
||||
- name: Create tag if necessary
|
||||
uses: fregante/daily-version-action@fb1a60b7c4daf1410cd755e360ebec3901e58588 # v2.1.3
|
||||
- name: Create daily release
|
||||
id: daily-version
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
- name: Create changelog text
|
||||
if: steps.daily-version.outputs.created
|
||||
id: changelog
|
||||
uses: loopwerk/tag-changelog@941366edb8920e2071eae0449031830984b9f26e # v1.3.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
config_file: .github/tag-changelog-config.js
|
||||
VERSION="v$(date '+%Y.%m.%d')"
|
||||
printf '%s\n' "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Create release
|
||||
if: steps.daily-version.outputs.created
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1.20.0
|
||||
# Check if release already exists
|
||||
if gh release view "$VERSION" >/dev/null 2>&1; then
|
||||
printf '%s\n' "created=false" >> "$GITHUB_OUTPUT"
|
||||
printf '%s\n' "Release $VERSION already exists - skipping"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create release with auto-generated changelog (also creates tag)
|
||||
gh release create "$VERSION" \
|
||||
--title "Release $VERSION" \
|
||||
--generate-notes \
|
||||
--target main
|
||||
|
||||
printf '%s\n' "created=true" >> "$GITHUB_OUTPUT"
|
||||
printf '%s\n' "Created release $VERSION"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag: ${{ steps.daily-version.outputs.version }}
|
||||
name: Release ${{ steps.daily-version.outputs.version }}
|
||||
body: ${{ steps.changelog.outputs.changes }}
|
||||
allowUpdates: true
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
114
.github/workflows/pr-lint.yml
vendored
114
.github/workflows/pr-lint.yml
vendored
@@ -24,17 +24,9 @@ on:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
# Apply linter fixes configuration
|
||||
APPLY_FIXES: all
|
||||
APPLY_FIXES_EVENT: pull_request
|
||||
APPLY_FIXES_MODE: commit
|
||||
|
||||
# Disable linters that do not work or conflict
|
||||
# MegaLinter configuration - these override the action's defaults
|
||||
DISABLE_LINTERS: REPOSITORY_DEVSKIM
|
||||
|
||||
# Additional settings
|
||||
VALIDATE_ALL_CODEBASE: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
|
||||
GITHUB_TOKEN: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Report configuration
|
||||
REPORT_OUTPUT_FOLDER: megalinter-reports
|
||||
@@ -72,111 +64,27 @@ jobs:
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: MegaLinter
|
||||
id: ml
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@62c799d895af9bcbca5eacfebca29d527f125a57 # v9.1.0
|
||||
|
||||
- name: Check MegaLinter Results
|
||||
id: check-results
|
||||
if: always()
|
||||
shell: sh
|
||||
run: |
|
||||
printf '%s\n' "status=success" >> "$GITHUB_OUTPUT"
|
||||
|
||||
if [ -f "${{ env.REPORT_OUTPUT_FOLDER }}/megalinter.log" ]; then
|
||||
if grep -q "ERROR\|CRITICAL" "${{ env.REPORT_OUTPUT_FOLDER }}/megalinter.log"; then
|
||||
echo "Linting errors found"
|
||||
printf '%s\n' "status=failure" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
echo "::warning::MegaLinter log file not found"
|
||||
fi
|
||||
|
||||
- name: Upload Reports
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
- name: Run MegaLinter
|
||||
id: pr-lint
|
||||
uses: ./pr-lint
|
||||
with:
|
||||
name: MegaLinter reports
|
||||
path: |
|
||||
megalinter-reports
|
||||
mega-linter.log
|
||||
retention-days: 30
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
username: fiximus
|
||||
email: github-bot@ivuorinen.net
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: always() && hashFiles('megalinter-reports/sarif/*.sarif')
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: megalinter-reports/sarif
|
||||
category: megalinter
|
||||
|
||||
- name: Prepare Git for Fixes
|
||||
if: steps.ml.outputs.has_updated_sources == 1
|
||||
shell: sh
|
||||
run: |
|
||||
sudo chown -Rc $UID .git/
|
||||
git config --global user.name "fiximus"
|
||||
git config --global user.email "github-bot@ivuorinen.net"
|
||||
|
||||
- name: Create Pull Request
|
||||
if: |
|
||||
steps.ml.outputs.has_updated_sources == 1 &&
|
||||
(env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) &&
|
||||
env.APPLY_FIXES_MODE == 'pull_request' &&
|
||||
(github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) &&
|
||||
!contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
commit-message: '[MegaLinter] Apply linters automatic fixes'
|
||||
title: '[MegaLinter] Apply linters automatic fixes'
|
||||
labels: bot
|
||||
branch: megalinter/fixes-${{ github.ref_name }}
|
||||
branch-suffix: timestamp
|
||||
delete-branch: true
|
||||
body: |
|
||||
## MegaLinter Fixes
|
||||
|
||||
MegaLinter has identified and fixed code style issues.
|
||||
|
||||
### 🔍 Changes Made
|
||||
- Automated code style fixes
|
||||
- Formatting improvements
|
||||
- Lint error corrections
|
||||
|
||||
### 📝 Notes
|
||||
- Please review the changes carefully
|
||||
- Run tests before merging
|
||||
- Verify formatting matches project standards
|
||||
|
||||
> Generated automatically by MegaLinter
|
||||
|
||||
- name: Commit Fixes
|
||||
if: |
|
||||
steps.ml.outputs.has_updated_sources == 1 &&
|
||||
(env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) &&
|
||||
env.APPLY_FIXES_MODE == 'commit' &&
|
||||
github.ref != 'refs/heads/main' &&
|
||||
(github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) &&
|
||||
!contains(github.event.head_commit.message, 'skip fix')
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
with:
|
||||
token: ${{ secrets.FIXIMUS_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }}
|
||||
commit_message: |
|
||||
style: apply MegaLinter fixes
|
||||
|
||||
[skip ci]
|
||||
commit_user_name: fiximus
|
||||
commit_user_email: github-bot@ivuorinen.net
|
||||
push_options: --force
|
||||
|
||||
- name: Create Status Check
|
||||
- name: Check Results
|
||||
if: always()
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const status = '${{ steps.check-results.outputs.status }}';
|
||||
const status = '${{ steps.pr-lint.outputs.validation_status }}';
|
||||
const conclusion = status === 'success' ? 'success' : 'failure';
|
||||
|
||||
const summary = `## MegaLinter Results
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -17,6 +17,6 @@ jobs:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
- uses: softprops/action-gh-release@5be0e66d93ac7ed76da52eca8bb058f665c3a5fe # v2.4.2
|
||||
- uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
|
||||
with:
|
||||
generate_release_notes: true
|
||||
|
||||
2
.github/workflows/security-suite.yml
vendored
2
.github/workflows/security-suite.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
# Record the base commit for diffing without checking it out
|
||||
# Keep PR head checked out so scanners analyze the new changes
|
||||
BASE_REF="refs/remotes/origin-base/${{ github.event.pull_request.base.ref }}"
|
||||
echo "BASE_REF=${BASE_REF}" >> $GITHUB_ENV
|
||||
echo "BASE_REF=${BASE_REF}" >> "$GITHUB_ENV"
|
||||
echo "Base ref: ${BASE_REF}"
|
||||
git log -1 --oneline "${BASE_REF}"
|
||||
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: 🚀 Run stale
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 30
|
||||
|
||||
14
.github/workflows/test-actions.yml
vendored
14
.github/workflows/test-actions.yml
vendored
@@ -73,14 +73,14 @@ jobs:
|
||||
if: always()
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
if: always() && hashFiles('_tests/reports/test-results.sarif') != ''
|
||||
with:
|
||||
sarif_file: _tests/reports/test-results.sarif
|
||||
category: github-actions-tests
|
||||
|
||||
- name: Upload unit test results
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always()
|
||||
with:
|
||||
name: unit-test-results
|
||||
@@ -125,15 +125,15 @@ jobs:
|
||||
shell: sh
|
||||
run: |
|
||||
if [ -d "_tests/reports/integration" ] && [ -n "$(find _tests/reports/integration -type f 2>/dev/null)" ]; then
|
||||
printf '%s\n' "reports-found=true" >> $GITHUB_OUTPUT
|
||||
printf '%s\n' "reports-found=true" >> "$GITHUB_OUTPUT"
|
||||
echo "Integration test reports found"
|
||||
else
|
||||
printf '%s\n' "reports-found=false" >> $GITHUB_OUTPUT
|
||||
printf '%s\n' "reports-found=false" >> "$GITHUB_OUTPUT"
|
||||
echo "No integration test reports found"
|
||||
fi
|
||||
|
||||
- name: Upload integration test results
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: always() && steps.check-integration-reports.outputs.reports-found == 'true'
|
||||
with:
|
||||
name: integration-test-results
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
run: make test-coverage
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: coverage-report
|
||||
path: _tests/coverage/
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Download test results
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
pattern: '*-test-results'
|
||||
merge-multiple: true
|
||||
|
||||
8
.github/workflows/version-maintenance.yml
vendored
8
.github/workflows/version-maintenance.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.action-versioning.outputs.updated == 'true'
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: 'chore: update action references to ${{ steps.version.outputs.major }}'
|
||||
@@ -68,8 +68,6 @@ jobs:
|
||||
```bash
|
||||
make check-version-refs
|
||||
```
|
||||
|
||||
🤖 Auto-generated by version-maintenance workflow
|
||||
branch: automated/version-update-${{ steps.version.outputs.major }}
|
||||
delete-branch: true
|
||||
labels: |
|
||||
@@ -78,7 +76,7 @@ jobs:
|
||||
|
||||
- name: Check for Annual Bump
|
||||
if: steps.action-versioning.outputs.needs-annual-bump == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const currentYear = new Date().getFullYear();
|
||||
@@ -120,8 +118,6 @@ jobs:
|
||||
\`\`\`bash
|
||||
make check-version-refs
|
||||
\`\`\`
|
||||
|
||||
🤖 Auto-generated by version-maintenance workflow
|
||||
`,
|
||||
labels: ['maintenance', 'high-priority']
|
||||
});
|
||||
|
||||
@@ -14,7 +14,7 @@ repos:
|
||||
types: [markdown, python, yaml]
|
||||
files: ^(docs/.*|README\.md|CONTRIBUTING\.md|CHANGELOG\.md|.*\.py|.*\.ya?ml)$
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
rev: 0.9.8
|
||||
rev: 0.9.22
|
||||
hooks:
|
||||
- id: uv-lock
|
||||
- id: uv-sync
|
||||
@@ -44,7 +44,7 @@ repos:
|
||||
args: [--autofix, --no-sort-keys]
|
||||
|
||||
- repo: https://github.com/DavidAnson/markdownlint-cli2
|
||||
rev: v0.19.0
|
||||
rev: v0.20.0
|
||||
hooks:
|
||||
- id: markdownlint-cli2
|
||||
args: [--fix]
|
||||
@@ -55,7 +55,7 @@ repos:
|
||||
- id: yamllint
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.5
|
||||
rev: v0.14.10
|
||||
hooks:
|
||||
# Run the linter with auto-fix
|
||||
- id: ruff-check
|
||||
@@ -78,24 +78,19 @@ repos:
|
||||
exclude: '^_tests/.*\.sh$'
|
||||
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.8
|
||||
rev: v1.7.10
|
||||
hooks:
|
||||
- id: actionlint
|
||||
args: ['-shellcheck=']
|
||||
|
||||
- repo: https://github.com/renovatebot/pre-commit-hooks
|
||||
rev: 42.6.2
|
||||
hooks:
|
||||
- id: renovate-config-validator
|
||||
|
||||
- repo: https://github.com/bridgecrewio/checkov.git
|
||||
rev: '3.2.489'
|
||||
rev: '3.2.497'
|
||||
hooks:
|
||||
- id: checkov
|
||||
args:
|
||||
- '--quiet'
|
||||
|
||||
- repo: https://github.com/gitleaks/gitleaks
|
||||
rev: v8.29.0
|
||||
rev: v8.30.0
|
||||
hooks:
|
||||
- id: gitleaks
|
||||
|
||||
@@ -1 +1 @@
|
||||
3.14.0
|
||||
3.14.2
|
||||
|
||||
@@ -5,13 +5,14 @@
|
||||
- **Path**: /Users/ivuorinen/Code/ivuorinen/actions
|
||||
- **Branch**: main
|
||||
- **External Usage**: `ivuorinen/actions/<action-name>@main`
|
||||
- **Total Actions**: 43 self-contained actions
|
||||
- **Total Actions**: 44 self-contained actions
|
||||
- **Dogfooding**: Workflows use local actions (pr-lint, codeql-analysis, security-scan)
|
||||
|
||||
## Structure
|
||||
|
||||
```text
|
||||
/
|
||||
├── <action-dirs>/ # 43 self-contained actions
|
||||
├── <action-dirs>/ # 44 self-contained actions
|
||||
│ ├── action.yml # Action definition
|
||||
│ ├── README.md # Auto-generated
|
||||
│ └── CustomValidator.py # Optional validator
|
||||
@@ -25,12 +26,14 @@
|
||||
└── Makefile # Build automation
|
||||
```
|
||||
|
||||
## Action Categories (43 total)
|
||||
## Action Categories (44 total)
|
||||
|
||||
**Setup (7)**: node-setup, set-git-config, php-version-detect, python-version-detect, python-version-detect-v2, go-version-detect, dotnet-version-detect
|
||||
|
||||
**Linting (13)**: ansible-lint-fix, biome-check/fix, csharp-lint-check, eslint-check/fix, go-lint, pr-lint, pre-commit, prettier-check/fix, python-lint-fix, terraform-lint-fix
|
||||
|
||||
**Security (1)**: security-scan (actionlint, Gitleaks, Trivy scanning)
|
||||
|
||||
**Build (3)**: csharp-build, go-build, docker-build
|
||||
|
||||
**Publishing (5)**: npm-publish, docker-publish, docker-publish-gh, docker-publish-hub, csharp-publish
|
||||
@@ -85,3 +88,28 @@ make test # All tests (pytest + ShellSpec)
|
||||
- ✅ Convention-based validation
|
||||
- ✅ Test generation system
|
||||
- ✅ Full backward compatibility
|
||||
|
||||
## Dogfooding Strategy
|
||||
|
||||
The repository actively dogfoods its own actions in workflows:
|
||||
|
||||
**Fully Dogfooded Workflows**:
|
||||
|
||||
- **pr-lint.yml**: Uses `./pr-lint` (was 204 lines, now 112 lines - 45% reduction)
|
||||
- **action-security.yml**: Uses `./security-scan` (was 264 lines, now 82 lines - 69% reduction)
|
||||
- **codeql-new.yml**: Uses `./codeql-analysis`
|
||||
- **sync-labels.yml**: Uses `./sync-labels`
|
||||
- **version-maintenance.yml**: Uses `./action-versioning`
|
||||
|
||||
**Intentionally External**:
|
||||
|
||||
- **build-testing-image.yml**: Uses docker/\* actions directly (needs metadata extraction)
|
||||
- Core GitHub actions (checkout, upload-artifact, setup-\*) kept for standardization
|
||||
|
||||
**Benefits**:
|
||||
|
||||
- Early detection of action issues
|
||||
- Real-world testing of actions
|
||||
- Reduced workflow duplication
|
||||
- Improved maintainability
|
||||
- Better documentation through usage examples
|
||||
|
||||
@@ -71,11 +71,11 @@
|
||||
|
||||
Flat structure. Each action self-contained with `action.yml`.
|
||||
|
||||
**30 Actions**: Setup (node-setup, language-version-detect), Utilities (action-versioning, version-file-parser),
|
||||
**24 Actions**: Setup (language-version-detect), Utilities (action-versioning, version-file-parser),
|
||||
Linting (ansible-lint-fix, biome-lint, csharp-lint-check, eslint-lint, go-lint, pr-lint, pre-commit, prettier-lint, python-lint-fix, terraform-lint-fix),
|
||||
Testing (php-tests, php-laravel-phpunit, php-composer), Build (csharp-build, go-build, docker-build),
|
||||
Testing (php-tests), Build (csharp-build, go-build, docker-build),
|
||||
Publishing (npm-publish, docker-publish, csharp-publish),
|
||||
Repository (release-monthly, sync-labels, stale, compress-images, common-cache, codeql-analysis),
|
||||
Repository (release-monthly, sync-labels, stale, compress-images, codeql-analysis),
|
||||
Validation (validate-inputs)
|
||||
|
||||
## Commands
|
||||
|
||||
132
README.md
132
README.md
@@ -22,109 +22,106 @@ Each action is fully self-contained and can be used independently in any GitHub
|
||||
|
||||
## 📚 Action Catalog
|
||||
|
||||
This repository contains **30 reusable GitHub Actions** for CI/CD automation.
|
||||
This repository contains **26 reusable GitHub Actions** for CI/CD automation.
|
||||
|
||||
### Quick Reference (30 Actions)
|
||||
### Quick Reference (26 Actions)
|
||||
|
||||
| Icon | Action | Category | Description | Key Features |
|
||||
|:----:|:-----------------------------------------------------|:-----------|:----------------------------------------------------------------|:---------------------------------------------|
|
||||
| 🔀 | [`action-versioning`][action-versioning] | Utilities | Automatically update SHA-pinned action references to match l... | Token auth, Outputs |
|
||||
| 📦 | [`ansible-lint-fix`][ansible-lint-fix] | Linting | Lints and fixes Ansible playbooks, commits changes, and uplo... | Token auth, Outputs |
|
||||
| ✅ | [`biome-lint`][biome-lint] | Linting | Run Biome linter in check or fix mode | Token auth, Outputs |
|
||||
| 📦 | [`ansible-lint-fix`][ansible-lint-fix] | Linting | Lints and fixes Ansible playbooks, commits changes, and uplo... | Caching, Token auth, Outputs |
|
||||
| ✅ | [`biome-lint`][biome-lint] | Linting | Run Biome linter in check or fix mode | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 🛡️ | [`codeql-analysis`][codeql-analysis] | Repository | Run CodeQL security analysis for a single language with conf... | Auto-detection, Token auth, Outputs |
|
||||
| 💾 | [`common-cache`][common-cache] | Repository | Standardized caching strategy for all actions | Caching, Outputs |
|
||||
| 🖼️ | [`compress-images`][compress-images] | Repository | Compress images on demand (workflow_dispatch), and at 11pm e... | Token auth, Outputs |
|
||||
| 📝 | [`csharp-build`][csharp-build] | Build | Builds and tests C# projects. | Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`csharp-lint-check`][csharp-lint-check] | Linting | Runs linters like StyleCop or dotnet-format for C# code styl... | Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`csharp-publish`][csharp-publish] | Publishing | Publishes a C# project to GitHub Packages. | Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`csharp-build`][csharp-build] | Build | Builds and tests C# projects. | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`csharp-lint-check`][csharp-lint-check] | Linting | Runs linters like StyleCop or dotnet-format for C# code styl... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`csharp-publish`][csharp-publish] | Publishing | Publishes a C# project to GitHub Packages. | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`docker-build`][docker-build] | Build | Builds a Docker image for multiple architectures with enhanc... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| ☁️ | [`docker-publish`][docker-publish] | Publishing | Simple wrapper to publish Docker images to GitHub Packages a... | Token auth, Outputs |
|
||||
| ✅ | [`eslint-lint`][eslint-lint] | Linting | Run ESLint in check or fix mode with advanced configuration ... | Caching, Token auth, Outputs |
|
||||
| ✅ | [`eslint-lint`][eslint-lint] | Linting | Run ESLint in check or fix mode with advanced configuration ... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`go-build`][go-build] | Build | Builds the Go project. | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`go-lint`][go-lint] | Linting | Run golangci-lint with advanced configuration, caching, and ... | Caching, Token auth, Outputs |
|
||||
| 📝 | [`language-version-detect`][language-version-detect] | Setup | Detects language version from project configuration files wi... | Auto-detection, Token auth, Outputs |
|
||||
| 🖥️ | [`node-setup`][node-setup] | Setup | Sets up Node.js environment with version detection and packa... | Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`npm-publish`][npm-publish] | Publishing | Publishes the package to the NPM registry with configurable ... | Token auth, Outputs |
|
||||
| 🖥️ | [`php-composer`][php-composer] | Testing | Runs Composer install on a repository with advanced caching ... | Auto-detection, Token auth, Outputs |
|
||||
| 💻 | [`php-laravel-phpunit`][php-laravel-phpunit] | Testing | Setup PHP, install dependencies, generate key, create databa... | Auto-detection, Token auth, Outputs |
|
||||
| ✅ | [`php-tests`][php-tests] | Testing | Run PHPUnit tests on the repository | Token auth, Outputs |
|
||||
| 📝 | [`language-version-detect`][language-version-detect] | Setup | DEPRECATED: This action is deprecated. Inline version detect... | Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`npm-publish`][npm-publish] | Publishing | Publishes the package to the NPM registry with configurable ... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| ✅ | [`php-tests`][php-tests] | Testing | Run PHPUnit tests with optional Laravel setup and Composer d... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| ✅ | [`pr-lint`][pr-lint] | Linting | Runs MegaLinter against pull requests | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`pre-commit`][pre-commit] | Linting | Runs pre-commit on the repository and pushes the fixes back ... | Auto-detection, Token auth, Outputs |
|
||||
| ✅ | [`prettier-lint`][prettier-lint] | Linting | Run Prettier in check or fix mode with advanced configuratio... | Caching, Token auth, Outputs |
|
||||
| ✅ | [`prettier-lint`][prettier-lint] | Linting | Run Prettier in check or fix mode with advanced configuratio... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 | [`python-lint-fix`][python-lint-fix] | Linting | Lints and fixes Python files, commits changes, and uploads S... | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 | [`release-monthly`][release-monthly] | Repository | Creates a release for the current month, incrementing patch ... | Token auth, Outputs |
|
||||
| 🛡️ | [`security-scan`][security-scan] | Security | Comprehensive security scanning for GitHub Actions including... | Caching, Token auth, Outputs |
|
||||
| 📦 | [`stale`][stale] | Repository | A GitHub Action to close stale issues and pull requests. | Token auth, Outputs |
|
||||
| 🏷️ | [`sync-labels`][sync-labels] | Repository | Sync labels from a YAML file to a GitHub repository | Token auth, Outputs |
|
||||
| 🖥️ | [`terraform-lint-fix`][terraform-lint-fix] | Linting | Lints and fixes Terraform files with advanced validation and... | Token auth, Outputs |
|
||||
| 🛡️ | [`validate-inputs`][validate-inputs] | Validation | Centralized Python-based input validation for GitHub Actions... | Token auth, Outputs |
|
||||
| 📦 | [`version-file-parser`][version-file-parser] | Utilities | Universal parser for common version detection files (.tool-v... | Auto-detection, Outputs |
|
||||
|
||||
### Actions by Category
|
||||
|
||||
#### 🔧 Setup (2 actions)
|
||||
#### 🔧 Setup (1 action)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:--------------------------------------------------------|:------------------------------------------------------|:--------------------------------|:------------------------------------|
|
||||
| 📝 [`language-version-detect`][language-version-detect] | Detects language version from project configuratio... | PHP, Python, Go, .NET, Node.js | Auto-detection, Token auth, Outputs |
|
||||
| 🖥️ [`node-setup`][node-setup] | Sets up Node.js environment with version detection... | Node.js, JavaScript, TypeScript | Auto-detection, Token auth, Outputs |
|
||||
| Action | Description | Languages | Features |
|
||||
|:--------------------------------------------------------|:------------------------------------------------------|:-------------------------------|:------------------------------------|
|
||||
| 📝 [`language-version-detect`][language-version-detect] | DEPRECATED: This action is deprecated. Inline vers... | PHP, Python, Go, .NET, Node.js | Auto-detection, Token auth, Outputs |
|
||||
|
||||
#### 🛠️ Utilities (2 actions)
|
||||
#### 🛠️ Utilities (1 action)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:------------------------------------------------|:------------------------------------------------------|:-------------------|:------------------------|
|
||||
| 🔀 [`action-versioning`][action-versioning] | Automatically update SHA-pinned action references ... | GitHub Actions | Token auth, Outputs |
|
||||
| 📦 [`version-file-parser`][version-file-parser] | Universal parser for common version detection file... | Multiple Languages | Auto-detection, Outputs |
|
||||
| Action | Description | Languages | Features |
|
||||
|:--------------------------------------------|:------------------------------------------------------|:---------------|:--------------------|
|
||||
| 🔀 [`action-versioning`][action-versioning] | Automatically update SHA-pinned action references ... | GitHub Actions | Token auth, Outputs |
|
||||
|
||||
#### 📝 Linting (10 actions)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:-----------------------------------------------|:------------------------------------------------------|:---------------------------------------------|:---------------------------------------------|
|
||||
| 📦 [`ansible-lint-fix`][ansible-lint-fix] | Lints and fixes Ansible playbooks, commits changes... | Ansible, YAML | Token auth, Outputs |
|
||||
| ✅ [`biome-lint`][biome-lint] | Run Biome linter in check or fix mode | JavaScript, TypeScript, JSON | Token auth, Outputs |
|
||||
| 📝 [`csharp-lint-check`][csharp-lint-check] | Runs linters like StyleCop or dotnet-format for C#... | C#, .NET | Auto-detection, Token auth, Outputs |
|
||||
| ✅ [`eslint-lint`][eslint-lint] | Run ESLint in check or fix mode with advanced conf... | JavaScript, TypeScript | Caching, Token auth, Outputs |
|
||||
| 📦 [`ansible-lint-fix`][ansible-lint-fix] | Lints and fixes Ansible playbooks, commits changes... | Ansible, YAML | Caching, Token auth, Outputs |
|
||||
| ✅ [`biome-lint`][biome-lint] | Run Biome linter in check or fix mode | JavaScript, TypeScript, JSON | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 [`csharp-lint-check`][csharp-lint-check] | Runs linters like StyleCop or dotnet-format for C#... | C#, .NET | Caching, Auto-detection, Token auth, Outputs |
|
||||
| ✅ [`eslint-lint`][eslint-lint] | Run ESLint in check or fix mode with advanced conf... | JavaScript, TypeScript | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 [`go-lint`][go-lint] | Run golangci-lint with advanced configuration, cac... | Go | Caching, Token auth, Outputs |
|
||||
| ✅ [`pr-lint`][pr-lint] | Runs MegaLinter against pull requests | Conventional Commits | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 [`pre-commit`][pre-commit] | Runs pre-commit on the repository and pushes the f... | Python, Multiple Languages | Auto-detection, Token auth, Outputs |
|
||||
| ✅ [`prettier-lint`][prettier-lint] | Run Prettier in check or fix mode with advanced co... | JavaScript, TypeScript, Markdown, YAML, JSON | Caching, Token auth, Outputs |
|
||||
| ✅ [`prettier-lint`][prettier-lint] | Run Prettier in check or fix mode with advanced co... | JavaScript, TypeScript, Markdown, YAML, JSON | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📝 [`python-lint-fix`][python-lint-fix] | Lints and fixes Python files, commits changes, and... | Python | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 🖥️ [`terraform-lint-fix`][terraform-lint-fix] | Lints and fixes Terraform files with advanced vali... | Terraform, HCL | Token auth, Outputs |
|
||||
|
||||
#### 🧪 Testing (3 actions)
|
||||
#### 🧪 Testing (1 action)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:------------------------------------------------|:------------------------------------------------------|:-------------|:------------------------------------|
|
||||
| 🖥️ [`php-composer`][php-composer] | Runs Composer install on a repository with advance... | PHP | Auto-detection, Token auth, Outputs |
|
||||
| 💻 [`php-laravel-phpunit`][php-laravel-phpunit] | Setup PHP, install dependencies, generate key, cre... | PHP, Laravel | Auto-detection, Token auth, Outputs |
|
||||
| ✅ [`php-tests`][php-tests] | Run PHPUnit tests on the repository | PHP | Token auth, Outputs |
|
||||
| Action | Description | Languages | Features |
|
||||
|:---------------------------|:------------------------------------------------------|:-------------|:---------------------------------------------|
|
||||
| ✅ [`php-tests`][php-tests] | Run PHPUnit tests with optional Laravel setup and ... | PHP, Laravel | Caching, Auto-detection, Token auth, Outputs |
|
||||
|
||||
#### 🏗️ Build (3 actions)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:----------------------------------|:------------------------------------------------------|:----------|:---------------------------------------------|
|
||||
| 📝 [`csharp-build`][csharp-build] | Builds and tests C# projects. | C#, .NET | Auto-detection, Token auth, Outputs |
|
||||
| 📝 [`csharp-build`][csharp-build] | Builds and tests C# projects. | C#, .NET | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 [`docker-build`][docker-build] | Builds a Docker image for multiple architectures w... | Docker | Caching, Auto-detection, Token auth, Outputs |
|
||||
| 📦 [`go-build`][go-build] | Builds the Go project. | Go | Caching, Auto-detection, Token auth, Outputs |
|
||||
|
||||
#### 🚀 Publishing (3 actions)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:--------------------------------------|:------------------------------------------------------|:-------------|:------------------------------------|
|
||||
| 📦 [`csharp-publish`][csharp-publish] | Publishes a C# project to GitHub Packages. | C#, .NET | Auto-detection, Token auth, Outputs |
|
||||
| ☁️ [`docker-publish`][docker-publish] | Simple wrapper to publish Docker images to GitHub ... | Docker | Token auth, Outputs |
|
||||
| 📦 [`npm-publish`][npm-publish] | Publishes the package to the NPM registry with con... | Node.js, npm | Token auth, Outputs |
|
||||
| Action | Description | Languages | Features |
|
||||
|:--------------------------------------|:------------------------------------------------------|:-------------|:---------------------------------------------|
|
||||
| 📦 [`csharp-publish`][csharp-publish] | Publishes a C# project to GitHub Packages. | C#, .NET | Caching, Auto-detection, Token auth, Outputs |
|
||||
| ☁️ [`docker-publish`][docker-publish] | Simple wrapper to publish Docker images to GitHub ... | Docker | Token auth, Outputs |
|
||||
| 📦 [`npm-publish`][npm-publish] | Publishes the package to the NPM registry with con... | Node.js, npm | Caching, Auto-detection, Token auth, Outputs |
|
||||
|
||||
#### 📦 Repository (6 actions)
|
||||
#### 📦 Repository (5 actions)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:-----------------------------------------|:------------------------------------------------------|:--------------------------------------------------------|:------------------------------------|
|
||||
| 🛡️ [`codeql-analysis`][codeql-analysis] | Run CodeQL security analysis for a single language... | JavaScript, TypeScript, Python, Java, C#, C++, Go, Ruby | Auto-detection, Token auth, Outputs |
|
||||
| 💾 [`common-cache`][common-cache] | Standardized caching strategy for all actions | Caching | Caching, Outputs |
|
||||
| 🖼️ [`compress-images`][compress-images] | Compress images on demand (workflow_dispatch), and... | Images, PNG, JPEG | Token auth, Outputs |
|
||||
| 📦 [`release-monthly`][release-monthly] | Creates a release for the current month, increment... | GitHub Actions | Token auth, Outputs |
|
||||
| 📦 [`stale`][stale] | A GitHub Action to close stale issues and pull req... | GitHub Actions | Token auth, Outputs |
|
||||
| 🏷️ [`sync-labels`][sync-labels] | Sync labels from a YAML file to a GitHub repositor... | YAML, GitHub | Token auth, Outputs |
|
||||
|
||||
#### 🛡️ Security (1 action)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
|:-------------------------------------|:------------------------------------------------------|:----------|:-----------------------------|
|
||||
| 🛡️ [`security-scan`][security-scan] | Comprehensive security scanning for GitHub Actions... | - | Caching, Token auth, Outputs |
|
||||
|
||||
#### ✅ Validation (1 action)
|
||||
|
||||
| Action | Description | Languages | Features |
|
||||
@@ -136,35 +133,31 @@ This repository contains **30 reusable GitHub Actions** for CI/CD automation.
|
||||
| Action | Caching | Auto-detection | Token auth | Outputs |
|
||||
|:-----------------------------------------------------|:-------:|:--------------:|:----------:|:-------:|
|
||||
| [`action-versioning`][action-versioning] | - | - | ✅ | ✅ |
|
||||
| [`ansible-lint-fix`][ansible-lint-fix] | - | - | ✅ | ✅ |
|
||||
| [`biome-lint`][biome-lint] | - | - | ✅ | ✅ |
|
||||
| [`ansible-lint-fix`][ansible-lint-fix] | ✅ | - | ✅ | ✅ |
|
||||
| [`biome-lint`][biome-lint] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`codeql-analysis`][codeql-analysis] | - | ✅ | ✅ | ✅ |
|
||||
| [`common-cache`][common-cache] | ✅ | - | - | ✅ |
|
||||
| [`compress-images`][compress-images] | - | - | ✅ | ✅ |
|
||||
| [`csharp-build`][csharp-build] | - | ✅ | ✅ | ✅ |
|
||||
| [`csharp-lint-check`][csharp-lint-check] | - | ✅ | ✅ | ✅ |
|
||||
| [`csharp-publish`][csharp-publish] | - | ✅ | ✅ | ✅ |
|
||||
| [`csharp-build`][csharp-build] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`csharp-lint-check`][csharp-lint-check] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`csharp-publish`][csharp-publish] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`docker-build`][docker-build] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`docker-publish`][docker-publish] | - | - | ✅ | ✅ |
|
||||
| [`eslint-lint`][eslint-lint] | ✅ | - | ✅ | ✅ |
|
||||
| [`eslint-lint`][eslint-lint] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`go-build`][go-build] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`go-lint`][go-lint] | ✅ | - | ✅ | ✅ |
|
||||
| [`language-version-detect`][language-version-detect] | - | ✅ | ✅ | ✅ |
|
||||
| [`node-setup`][node-setup] | - | ✅ | ✅ | ✅ |
|
||||
| [`npm-publish`][npm-publish] | - | - | ✅ | ✅ |
|
||||
| [`php-composer`][php-composer] | - | ✅ | ✅ | ✅ |
|
||||
| [`php-laravel-phpunit`][php-laravel-phpunit] | - | ✅ | ✅ | ✅ |
|
||||
| [`php-tests`][php-tests] | - | - | ✅ | ✅ |
|
||||
| [`npm-publish`][npm-publish] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`php-tests`][php-tests] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`pr-lint`][pr-lint] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`pre-commit`][pre-commit] | - | ✅ | ✅ | ✅ |
|
||||
| [`prettier-lint`][prettier-lint] | ✅ | - | ✅ | ✅ |
|
||||
| [`prettier-lint`][prettier-lint] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`python-lint-fix`][python-lint-fix] | ✅ | ✅ | ✅ | ✅ |
|
||||
| [`release-monthly`][release-monthly] | - | - | ✅ | ✅ |
|
||||
| [`security-scan`][security-scan] | ✅ | - | ✅ | ✅ |
|
||||
| [`stale`][stale] | - | - | ✅ | ✅ |
|
||||
| [`sync-labels`][sync-labels] | - | - | ✅ | ✅ |
|
||||
| [`terraform-lint-fix`][terraform-lint-fix] | - | - | ✅ | ✅ |
|
||||
| [`validate-inputs`][validate-inputs] | - | - | ✅ | ✅ |
|
||||
| [`version-file-parser`][version-file-parser] | - | ✅ | - | ✅ |
|
||||
|
||||
### Language Support
|
||||
|
||||
@@ -174,7 +167,6 @@ This repository contains **30 reusable GitHub Actions** for CI/CD automation.
|
||||
| Ansible | [`ansible-lint-fix`][ansible-lint-fix] |
|
||||
| C# | [`codeql-analysis`][codeql-analysis], [`csharp-build`][csharp-build], [`csharp-lint-check`][csharp-lint-check], [`csharp-publish`][csharp-publish] |
|
||||
| C++ | [`codeql-analysis`][codeql-analysis] |
|
||||
| Caching | [`common-cache`][common-cache] |
|
||||
| Conventional Commits | [`pr-lint`][pr-lint] |
|
||||
| Docker | [`docker-build`][docker-build], [`docker-publish`][docker-publish] |
|
||||
| GitHub | [`sync-labels`][sync-labels] |
|
||||
@@ -185,17 +177,17 @@ This repository contains **30 reusable GitHub Actions** for CI/CD automation.
|
||||
| JPEG | [`compress-images`][compress-images] |
|
||||
| JSON | [`biome-lint`][biome-lint], [`prettier-lint`][prettier-lint] |
|
||||
| Java | [`codeql-analysis`][codeql-analysis] |
|
||||
| JavaScript | [`biome-lint`][biome-lint], [`codeql-analysis`][codeql-analysis], [`eslint-lint`][eslint-lint], [`node-setup`][node-setup], [`prettier-lint`][prettier-lint] |
|
||||
| Laravel | [`php-laravel-phpunit`][php-laravel-phpunit] |
|
||||
| JavaScript | [`biome-lint`][biome-lint], [`codeql-analysis`][codeql-analysis], [`eslint-lint`][eslint-lint], [`prettier-lint`][prettier-lint] |
|
||||
| Laravel | [`php-tests`][php-tests] |
|
||||
| Markdown | [`prettier-lint`][prettier-lint] |
|
||||
| Multiple Languages | [`pre-commit`][pre-commit], [`version-file-parser`][version-file-parser] |
|
||||
| Node.js | [`language-version-detect`][language-version-detect], [`node-setup`][node-setup], [`npm-publish`][npm-publish] |
|
||||
| PHP | [`language-version-detect`][language-version-detect], [`php-composer`][php-composer], [`php-laravel-phpunit`][php-laravel-phpunit], [`php-tests`][php-tests] |
|
||||
| Multiple Languages | [`pre-commit`][pre-commit] |
|
||||
| Node.js | [`language-version-detect`][language-version-detect], [`npm-publish`][npm-publish] |
|
||||
| PHP | [`language-version-detect`][language-version-detect], [`php-tests`][php-tests] |
|
||||
| PNG | [`compress-images`][compress-images] |
|
||||
| Python | [`codeql-analysis`][codeql-analysis], [`language-version-detect`][language-version-detect], [`pre-commit`][pre-commit], [`python-lint-fix`][python-lint-fix] |
|
||||
| Ruby | [`codeql-analysis`][codeql-analysis] |
|
||||
| Terraform | [`terraform-lint-fix`][terraform-lint-fix] |
|
||||
| TypeScript | [`biome-lint`][biome-lint], [`codeql-analysis`][codeql-analysis], [`eslint-lint`][eslint-lint], [`node-setup`][node-setup], [`prettier-lint`][prettier-lint] |
|
||||
| TypeScript | [`biome-lint`][biome-lint], [`codeql-analysis`][codeql-analysis], [`eslint-lint`][eslint-lint], [`prettier-lint`][prettier-lint] |
|
||||
| YAML | [`ansible-lint-fix`][ansible-lint-fix], [`prettier-lint`][prettier-lint], [`sync-labels`][sync-labels], [`validate-inputs`][validate-inputs] |
|
||||
| npm | [`npm-publish`][npm-publish] |
|
||||
|
||||
@@ -223,7 +215,6 @@ All actions can be used independently in your workflows:
|
||||
[ansible-lint-fix]: ansible-lint-fix/README.md
|
||||
[biome-lint]: biome-lint/README.md
|
||||
[codeql-analysis]: codeql-analysis/README.md
|
||||
[common-cache]: common-cache/README.md
|
||||
[compress-images]: compress-images/README.md
|
||||
[csharp-build]: csharp-build/README.md
|
||||
[csharp-lint-check]: csharp-lint-check/README.md
|
||||
@@ -234,21 +225,18 @@ All actions can be used independently in your workflows:
|
||||
[go-build]: go-build/README.md
|
||||
[go-lint]: go-lint/README.md
|
||||
[language-version-detect]: language-version-detect/README.md
|
||||
[node-setup]: node-setup/README.md
|
||||
[npm-publish]: npm-publish/README.md
|
||||
[php-composer]: php-composer/README.md
|
||||
[php-laravel-phpunit]: php-laravel-phpunit/README.md
|
||||
[php-tests]: php-tests/README.md
|
||||
[pr-lint]: pr-lint/README.md
|
||||
[pre-commit]: pre-commit/README.md
|
||||
[prettier-lint]: prettier-lint/README.md
|
||||
[python-lint-fix]: python-lint-fix/README.md
|
||||
[release-monthly]: release-monthly/README.md
|
||||
[security-scan]: security-scan/README.md
|
||||
[stale]: stale/README.md
|
||||
[sync-labels]: sync-labels/README.md
|
||||
[terraform-lint-fix]: terraform-lint-fix/README.md
|
||||
[validate-inputs]: validate-inputs/README.md
|
||||
[version-file-parser]: version-file-parser/README.md
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -231,7 +231,7 @@ When security issues are fixed:
|
||||
- Replaced custom Bun installation with official action
|
||||
- Replaced custom Trivy installation with official action
|
||||
- Added secret masking to 7 critical actions (including docker-publish)
|
||||
- Optimized file hashing in common-cache
|
||||
- Migrated from custom common-cache to official actions/cache
|
||||
- Status: ✅ Complete
|
||||
|
||||
### Phase 3: Documentation & Policy (2024)
|
||||
|
||||
@@ -6,8 +6,8 @@ set -euo pipefail
|
||||
|
||||
# Source setup utilities
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
# shellcheck source=_tests/framework/setup.sh
|
||||
# shellcheck disable=SC1091
|
||||
source "${SCRIPT_DIR}/setup.sh"
|
||||
|
||||
# Action testing utilities
|
||||
@@ -57,6 +57,13 @@ get_action_name() {
|
||||
uv run "$script_dir/../shared/validation_core.py" --name "$action_file"
|
||||
}
|
||||
|
||||
get_action_runs_using() {
|
||||
local action_file="$1"
|
||||
local script_dir
|
||||
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
uv run "$script_dir/../shared/validation_core.py" --runs-using "$action_file"
|
||||
}
|
||||
|
||||
# Check if an input is required in an action.yml file
|
||||
is_input_required() {
|
||||
local action_file="$1"
|
||||
@@ -69,7 +76,7 @@ is_input_required() {
|
||||
required_status=$(uv run "$script_dir/../shared/validation_core.py" --property "$action_file" "$input_name" "required")
|
||||
|
||||
# Return 0 (success) if input is required, 1 (failure) if optional
|
||||
[[ $required_status == "required" ]]
|
||||
[[ "$required_status" == "required" ]]
|
||||
}
|
||||
|
||||
# Test input validation using Python validation module
|
||||
@@ -363,5 +370,5 @@ run_action_tests() {
|
||||
}
|
||||
|
||||
# Export all functions
|
||||
export -f validate_action_yml get_action_inputs get_action_outputs get_action_name is_input_required
|
||||
export -f validate_action_yml get_action_inputs get_action_outputs get_action_name get_action_runs_using is_input_required
|
||||
export -f test_input_validation test_action_outputs test_external_usage measure_action_time run_action_tests
|
||||
|
||||
@@ -21,6 +21,9 @@ import sys
|
||||
|
||||
import yaml # pylint: disable=import-error
|
||||
|
||||
# Default value for unknown action names (matches shared.validation_core.DEFAULT_UNKNOWN)
|
||||
_DEFAULT_UNKNOWN = "Unknown"
|
||||
|
||||
|
||||
class ActionValidator:
|
||||
"""Handles validation of GitHub Action inputs using Python regex engine."""
|
||||
@@ -86,7 +89,7 @@ class ActionValidator:
|
||||
return True, ""
|
||||
|
||||
# Check for environment variable reference (e.g., $GITHUB_TOKEN)
|
||||
if re.match(r"^\$[A-Za-z_][A-Za-z0-9_]*$", token):
|
||||
if re.match(r"^\$[A-Za-z_]\w*$", token, re.ASCII):
|
||||
return True, ""
|
||||
|
||||
# Check against all known token patterns
|
||||
@@ -330,16 +333,16 @@ def get_action_name(action_file: str) -> str:
|
||||
action_file: Path to the action.yml file
|
||||
|
||||
Returns:
|
||||
Action name or "Unknown" if not found
|
||||
Action name or _DEFAULT_UNKNOWN if not found
|
||||
"""
|
||||
try:
|
||||
with Path(action_file).open(encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f)
|
||||
|
||||
return data.get("name", "Unknown")
|
||||
return data.get("name", _DEFAULT_UNKNOWN)
|
||||
|
||||
except Exception:
|
||||
return "Unknown"
|
||||
return _DEFAULT_UNKNOWN
|
||||
|
||||
|
||||
def _show_usage():
|
||||
|
||||
@@ -1,471 +0,0 @@
|
||||
---
|
||||
name: Integration Test - Common Cache
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'common-cache/**'
|
||||
- '_tests/integration/workflows/common-cache-test.yml'
|
||||
|
||||
jobs:
|
||||
test-common-cache-key-generation:
|
||||
name: Test Cache Key Generation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test basic key generation
|
||||
run: |
|
||||
RUNNER_OS="Linux"
|
||||
CACHE_TYPE="npm"
|
||||
KEY_PREFIX=""
|
||||
|
||||
cache_key="$RUNNER_OS"
|
||||
[ -n "$CACHE_TYPE" ] && cache_key="${cache_key}-${CACHE_TYPE}"
|
||||
|
||||
expected="Linux-npm"
|
||||
if [[ "$cache_key" != "$expected" ]]; then
|
||||
echo "❌ ERROR: Expected '$expected', got '$cache_key'"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Basic cache key generation works"
|
||||
|
||||
- name: Test key with prefix
|
||||
run: |
|
||||
RUNNER_OS="Linux"
|
||||
CACHE_TYPE="npm"
|
||||
KEY_PREFIX="node-20"
|
||||
|
||||
cache_key="$RUNNER_OS"
|
||||
[ -n "$KEY_PREFIX" ] && cache_key="${cache_key}-${KEY_PREFIX}"
|
||||
[ -n "$CACHE_TYPE" ] && cache_key="${cache_key}-${CACHE_TYPE}"
|
||||
|
||||
expected="Linux-node-20-npm"
|
||||
if [[ "$cache_key" != "$expected" ]]; then
|
||||
echo "❌ ERROR: Expected '$expected', got '$cache_key'"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Cache key with prefix works"
|
||||
|
||||
- name: Test OS-specific keys
|
||||
run: |
|
||||
for os in "Linux" "macOS" "Windows"; do
|
||||
CACHE_TYPE="test"
|
||||
cache_key="$os-$CACHE_TYPE"
|
||||
if [[ ! "$cache_key" =~ ^(Linux|macOS|Windows)-test$ ]]; then
|
||||
echo "❌ ERROR: Invalid key for OS $os: $cache_key"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ OS-specific key for $os: $cache_key"
|
||||
done
|
||||
|
||||
test-common-cache-file-hashing:
|
||||
name: Test File Hashing
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create test files
|
||||
run: |
|
||||
mkdir -p test-cache
|
||||
cd test-cache
|
||||
echo "content1" > file1.txt
|
||||
echo "content2" > file2.txt
|
||||
echo "content3" > file3.txt
|
||||
|
||||
- name: Test single file hash
|
||||
run: |
|
||||
cd test-cache
|
||||
file_hash=$(cat file1.txt | sha256sum | cut -d' ' -f1)
|
||||
|
||||
if [[ ! "$file_hash" =~ ^[a-f0-9]{64}$ ]]; then
|
||||
echo "❌ ERROR: Invalid hash format: $file_hash"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Single file hash: $file_hash"
|
||||
|
||||
- name: Test multiple file hash
|
||||
run: |
|
||||
cd test-cache
|
||||
multi_hash=$(cat file1.txt file2.txt file3.txt | sha256sum | cut -d' ' -f1)
|
||||
|
||||
if [[ ! "$multi_hash" =~ ^[a-f0-9]{64}$ ]]; then
|
||||
echo "❌ ERROR: Invalid hash format: $multi_hash"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Multiple file hash: $multi_hash"
|
||||
|
||||
- name: Test hash changes with content
|
||||
run: |
|
||||
cd test-cache
|
||||
|
||||
# Get initial hash
|
||||
hash1=$(cat file1.txt | sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Modify file
|
||||
echo "modified" > file1.txt
|
||||
|
||||
# Get new hash
|
||||
hash2=$(cat file1.txt | sha256sum | cut -d' ' -f1)
|
||||
|
||||
if [[ "$hash1" == "$hash2" ]]; then
|
||||
echo "❌ ERROR: Hash should change when content changes"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Hash changes with content modification"
|
||||
|
||||
- name: Test comma-separated file list processing
|
||||
run: |
|
||||
cd test-cache
|
||||
|
||||
KEY_FILES="file1.txt,file2.txt,file3.txt"
|
||||
IFS=',' read -ra FILES <<< "$KEY_FILES"
|
||||
|
||||
existing_files=()
|
||||
for file in "${FILES[@]}"; do
|
||||
file=$(echo "$file" | xargs)
|
||||
if [ -f "$file" ]; then
|
||||
existing_files+=("$file")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#existing_files[@]} -ne 3 ]; then
|
||||
echo "❌ ERROR: Should find 3 files, found ${#existing_files[@]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Comma-separated file list processing works"
|
||||
|
||||
- name: Test missing file handling
|
||||
run: |
|
||||
cd test-cache
|
||||
|
||||
KEY_FILES="file1.txt,missing.txt,file2.txt"
|
||||
IFS=',' read -ra FILES <<< "$KEY_FILES"
|
||||
|
||||
existing_files=()
|
||||
for file in "${FILES[@]}"; do
|
||||
file=$(echo "$file" | xargs)
|
||||
if [ -f "$file" ]; then
|
||||
existing_files+=("$file")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#existing_files[@]} -ne 2 ]; then
|
||||
echo "❌ ERROR: Should find 2 files, found ${#existing_files[@]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Missing files correctly skipped"
|
||||
|
||||
test-common-cache-env-vars:
|
||||
name: Test Environment Variables
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test single env var inclusion
|
||||
run: |
|
||||
export NODE_VERSION="20.9.0"
|
||||
ENV_VARS="NODE_VERSION"
|
||||
|
||||
IFS=',' read -ra VARS <<< "$ENV_VARS"
|
||||
env_hash=""
|
||||
for var in "${VARS[@]}"; do
|
||||
if [ -n "${!var}" ]; then
|
||||
env_hash="${env_hash}-${var}-${!var}"
|
||||
fi
|
||||
done
|
||||
|
||||
expected="-NODE_VERSION-20.9.0"
|
||||
if [[ "$env_hash" != "$expected" ]]; then
|
||||
echo "❌ ERROR: Expected '$expected', got '$env_hash'"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Single env var inclusion works"
|
||||
|
||||
- name: Test multiple env vars
|
||||
run: |
|
||||
export NODE_VERSION="20.9.0"
|
||||
export PACKAGE_MANAGER="npm"
|
||||
ENV_VARS="NODE_VERSION,PACKAGE_MANAGER"
|
||||
|
||||
IFS=',' read -ra VARS <<< "$ENV_VARS"
|
||||
env_hash=""
|
||||
for var in "${VARS[@]}"; do
|
||||
if [ -n "${!var}" ]; then
|
||||
env_hash="${env_hash}-${var}-${!var}"
|
||||
fi
|
||||
done
|
||||
|
||||
expected="-NODE_VERSION-20.9.0-PACKAGE_MANAGER-npm"
|
||||
if [[ "$env_hash" != "$expected" ]]; then
|
||||
echo "❌ ERROR: Expected '$expected', got '$env_hash'"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Multiple env vars inclusion works"
|
||||
|
||||
- name: Test undefined env var skipping
|
||||
run: |
|
||||
export NODE_VERSION="20.9.0"
|
||||
ENV_VARS="NODE_VERSION,UNDEFINED_VAR"
|
||||
|
||||
IFS=',' read -ra VARS <<< "$ENV_VARS"
|
||||
env_hash=""
|
||||
for var in "${VARS[@]}"; do
|
||||
if [ -n "${!var}" ]; then
|
||||
env_hash="${env_hash}-${var}-${!var}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Should only include NODE_VERSION
|
||||
expected="-NODE_VERSION-20.9.0"
|
||||
if [[ "$env_hash" != "$expected" ]]; then
|
||||
echo "❌ ERROR: Expected '$expected', got '$env_hash'"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Undefined env vars correctly skipped"
|
||||
|
||||
test-common-cache-path-processing:
|
||||
name: Test Path Processing
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test single path
|
||||
run: |
|
||||
CACHE_PATHS="~/.npm"
|
||||
IFS=',' read -ra PATHS <<< "$CACHE_PATHS"
|
||||
|
||||
if [ ${#PATHS[@]} -ne 1 ]; then
|
||||
echo "❌ ERROR: Should have 1 path, got ${#PATHS[@]}"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Single path processing works"
|
||||
|
||||
- name: Test multiple paths
|
||||
run: |
|
||||
CACHE_PATHS="~/.npm,~/.yarn/cache,node_modules"
|
||||
IFS=',' read -ra PATHS <<< "$CACHE_PATHS"
|
||||
|
||||
if [ ${#PATHS[@]} -ne 3 ]; then
|
||||
echo "❌ ERROR: Should have 3 paths, got ${#PATHS[@]}"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Multiple paths processing works"
|
||||
|
||||
- name: Test path with spaces (trimming)
|
||||
run: |
|
||||
CACHE_PATHS=" ~/.npm , ~/.yarn/cache , node_modules "
|
||||
IFS=',' read -ra PATHS <<< "$CACHE_PATHS"
|
||||
|
||||
trimmed_paths=()
|
||||
for path in "${PATHS[@]}"; do
|
||||
trimmed=$(echo "$path" | xargs)
|
||||
trimmed_paths+=("$trimmed")
|
||||
done
|
||||
|
||||
# Check first path is trimmed
|
||||
if [[ "${trimmed_paths[0]}" != "~/.npm" ]]; then
|
||||
echo "❌ ERROR: Path not trimmed: '${trimmed_paths[0]}'"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Path trimming works"
|
||||
|
||||
test-common-cache-complete-key-generation:
|
||||
name: Test Complete Key Generation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create test files
|
||||
run: |
|
||||
mkdir -p test-complete
|
||||
cd test-complete
|
||||
echo "package-lock content" > package-lock.json
|
||||
|
||||
- name: Test complete cache key with all components
|
||||
run: |
|
||||
cd test-complete
|
||||
|
||||
RUNNER_OS="Linux"
|
||||
CACHE_TYPE="npm"
|
||||
KEY_PREFIX="node-20"
|
||||
|
||||
# Generate file hash
|
||||
files_hash=$(cat package-lock.json | sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Generate env hash
|
||||
export NODE_VERSION="20.9.0"
|
||||
env_hash="-NODE_VERSION-20.9.0"
|
||||
|
||||
# Generate final key
|
||||
cache_key="$RUNNER_OS"
|
||||
[ -n "$KEY_PREFIX" ] && cache_key="${cache_key}-${KEY_PREFIX}"
|
||||
[ -n "$CACHE_TYPE" ] && cache_key="${cache_key}-${CACHE_TYPE}"
|
||||
[ -n "$files_hash" ] && cache_key="${cache_key}-${files_hash}"
|
||||
[ -n "$env_hash" ] && cache_key="${cache_key}${env_hash}"
|
||||
|
||||
echo "Generated cache key: $cache_key"
|
||||
|
||||
# Verify structure
|
||||
if [[ ! "$cache_key" =~ ^Linux-node-20-npm-[a-f0-9]{64}-NODE_VERSION-20\.9\.0$ ]]; then
|
||||
echo "❌ ERROR: Invalid cache key structure: $cache_key"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Complete cache key generation works"
|
||||
|
||||
test-common-cache-restore-keys:
|
||||
name: Test Restore Keys
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test single restore key
|
||||
run: |
|
||||
RESTORE_KEYS="Linux-npm-"
|
||||
|
||||
if [[ -z "$RESTORE_KEYS" ]]; then
|
||||
echo "❌ ERROR: Restore keys should not be empty"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Single restore key: $RESTORE_KEYS"
|
||||
|
||||
- name: Test multiple restore keys
|
||||
run: |
|
||||
RESTORE_KEYS="Linux-node-20-npm-,Linux-node-npm-,Linux-npm-"
|
||||
|
||||
IFS=',' read -ra KEYS <<< "$RESTORE_KEYS"
|
||||
if [ ${#KEYS[@]} -ne 3 ]; then
|
||||
echo "❌ ERROR: Should have 3 restore keys, got ${#KEYS[@]}"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Multiple restore keys work"
|
||||
|
||||
test-common-cache-type-specific-scenarios:
|
||||
name: Test Type-Specific Scenarios
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test NPM cache key
|
||||
run: |
|
||||
TYPE="npm"
|
||||
FILES="package-lock.json"
|
||||
PATHS="~/.npm,node_modules"
|
||||
|
||||
echo "✓ NPM cache configuration valid"
|
||||
echo " Type: $TYPE"
|
||||
echo " Key files: $FILES"
|
||||
echo " Paths: $PATHS"
|
||||
|
||||
- name: Test Composer cache key
|
||||
run: |
|
||||
TYPE="composer"
|
||||
FILES="composer.lock"
|
||||
PATHS="~/.composer/cache,vendor"
|
||||
|
||||
echo "✓ Composer cache configuration valid"
|
||||
echo " Type: $TYPE"
|
||||
echo " Key files: $FILES"
|
||||
echo " Paths: $PATHS"
|
||||
|
||||
- name: Test Go cache key
|
||||
run: |
|
||||
TYPE="go"
|
||||
FILES="go.sum"
|
||||
PATHS="~/go/pkg/mod,~/.cache/go-build"
|
||||
|
||||
echo "✓ Go cache configuration valid"
|
||||
echo " Type: $TYPE"
|
||||
echo " Key files: $FILES"
|
||||
echo " Paths: $PATHS"
|
||||
|
||||
- name: Test Pip cache key
|
||||
run: |
|
||||
TYPE="pip"
|
||||
FILES="requirements.txt"
|
||||
PATHS="~/.cache/pip"
|
||||
|
||||
echo "✓ Pip cache configuration valid"
|
||||
echo " Type: $TYPE"
|
||||
echo " Key files: $FILES"
|
||||
echo " Paths: $PATHS"
|
||||
|
||||
test-common-cache-edge-cases:
|
||||
name: Test Edge Cases
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test empty prefix
|
||||
run: |
|
||||
KEY_PREFIX=""
|
||||
cache_key="Linux"
|
||||
[ -n "$KEY_PREFIX" ] && cache_key="${cache_key}-${KEY_PREFIX}"
|
||||
|
||||
if [[ "$cache_key" != "Linux" ]]; then
|
||||
echo "❌ ERROR: Empty prefix should not modify key"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Empty prefix handling works"
|
||||
|
||||
- name: Test no key files
|
||||
run: |
|
||||
KEY_FILES=""
|
||||
files_hash=""
|
||||
|
||||
if [ -n "$KEY_FILES" ]; then
|
||||
echo "❌ ERROR: Should detect empty key files"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ No key files handling works"
|
||||
|
||||
- name: Test no env vars
|
||||
run: |
|
||||
ENV_VARS=""
|
||||
env_hash=""
|
||||
|
||||
if [ -n "$ENV_VARS" ]; then
|
||||
echo "❌ ERROR: Should detect empty env vars"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ No env vars handling works"
|
||||
|
||||
integration-test-summary:
|
||||
name: Integration Test Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test-common-cache-key-generation
|
||||
- test-common-cache-file-hashing
|
||||
- test-common-cache-env-vars
|
||||
- test-common-cache-path-processing
|
||||
- test-common-cache-complete-key-generation
|
||||
- test-common-cache-restore-keys
|
||||
- test-common-cache-type-specific-scenarios
|
||||
- test-common-cache-edge-cases
|
||||
steps:
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "Common Cache Integration Tests - PASSED"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "✓ Cache key generation tests"
|
||||
echo "✓ File hashing tests"
|
||||
echo "✓ Environment variable tests"
|
||||
echo "✓ Path processing tests"
|
||||
echo "✓ Complete key generation tests"
|
||||
echo "✓ Restore keys tests"
|
||||
echo "✓ Type-specific scenario tests"
|
||||
echo "✓ Edge case tests"
|
||||
echo ""
|
||||
echo "All common-cache integration tests completed successfully!"
|
||||
@@ -7,7 +7,6 @@ on:
|
||||
- 'eslint-lint/**'
|
||||
- 'prettier-lint/**'
|
||||
- 'node-setup/**'
|
||||
- 'common-cache/**'
|
||||
- '_tests/integration/workflows/lint-fix-chain-test.yml'
|
||||
|
||||
jobs:
|
||||
|
||||
@@ -1,513 +0,0 @@
|
||||
---
|
||||
name: Integration Test - Node Setup
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'node-setup/**'
|
||||
- 'version-file-parser/**'
|
||||
- 'common-cache/**'
|
||||
- 'common-retry/**'
|
||||
- '_tests/integration/workflows/node-setup-test.yml'
|
||||
|
||||
jobs:
|
||||
test-node-setup-version-validation:
|
||||
name: Test Version Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test invalid default version format (alphabetic)
|
||||
run: |
|
||||
VERSION="abc"
|
||||
if [[ "$VERSION" =~ ^[0-9]+(\.[0-9]+(\.[0-9]+)?)?$ ]]; then
|
||||
echo "❌ ERROR: Should reject alphabetic version"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Alphabetic version correctly rejected"
|
||||
|
||||
- name: Test invalid default version (too low)
|
||||
run: |
|
||||
VERSION="10"
|
||||
major=$(echo "$VERSION" | cut -d'.' -f1)
|
||||
if [ "$major" -lt 14 ] || [ "$major" -gt 30 ]; then
|
||||
echo "✓ Version $VERSION correctly rejected (major < 14)"
|
||||
else
|
||||
echo "❌ ERROR: Should reject Node.js $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Test invalid default version (too high)
|
||||
run: |
|
||||
VERSION="50"
|
||||
major=$(echo "$VERSION" | cut -d'.' -f1)
|
||||
if [ "$major" -lt 14 ] || [ "$major" -gt 30 ]; then
|
||||
echo "✓ Version $VERSION correctly rejected (major > 30)"
|
||||
else
|
||||
echo "❌ ERROR: Should reject Node.js $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Test valid version formats
|
||||
run: |
|
||||
for version in "20" "20.9" "20.9.0" "18" "22.1.0"; do
|
||||
if [[ "$version" =~ ^[0-9]+(\.[0-9]+(\.[0-9]+)?)?$ ]]; then
|
||||
major=$(echo "$version" | cut -d'.' -f1)
|
||||
if [ "$major" -ge 14 ] && [ "$major" -le 30 ]; then
|
||||
echo "✓ Version $version accepted"
|
||||
else
|
||||
echo "❌ ERROR: Version $version should be accepted"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "❌ ERROR: Version $version format validation failed"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
test-node-setup-package-manager-validation:
|
||||
name: Test Package Manager Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test valid package managers
|
||||
run: |
|
||||
for pm in "npm" "yarn" "pnpm" "bun" "auto"; do
|
||||
case "$pm" in
|
||||
"npm"|"yarn"|"pnpm"|"bun"|"auto")
|
||||
echo "✓ Package manager $pm accepted"
|
||||
;;
|
||||
*)
|
||||
echo "❌ ERROR: Valid package manager $pm rejected"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
- name: Test invalid package manager
|
||||
run: |
|
||||
PM="invalid-pm"
|
||||
case "$PM" in
|
||||
"npm"|"yarn"|"pnpm"|"bun"|"auto")
|
||||
echo "❌ ERROR: Invalid package manager should be rejected"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
echo "✓ Invalid package manager correctly rejected"
|
||||
;;
|
||||
esac
|
||||
|
||||
test-node-setup-url-validation:
|
||||
name: Test URL Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test valid registry URLs
|
||||
run: |
|
||||
for url in "https://registry.npmjs.org" "http://localhost:4873" "https://npm.custom.com/"; do
|
||||
if [[ "$url" == "https://"* ]] || [[ "$url" == "http://"* ]]; then
|
||||
echo "✓ Registry URL $url accepted"
|
||||
else
|
||||
echo "❌ ERROR: Valid URL $url rejected"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Test invalid registry URLs
|
||||
run: |
|
||||
for url in "ftp://registry.com" "not-a-url" "registry.com"; do
|
||||
if [[ "$url" == "https://"* ]] || [[ "$url" == "http://"* ]]; then
|
||||
echo "❌ ERROR: Invalid URL $url should be rejected"
|
||||
exit 1
|
||||
else
|
||||
echo "✓ Invalid URL $url correctly rejected"
|
||||
fi
|
||||
done
|
||||
|
||||
test-node-setup-retries-validation:
|
||||
name: Test Retries Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test valid retry counts
|
||||
run: |
|
||||
for retries in "1" "3" "5" "10"; do
|
||||
if [[ "$retries" =~ ^[0-9]+$ ]] && [ "$retries" -gt 0 ] && [ "$retries" -le 10 ]; then
|
||||
echo "✓ Max retries $retries accepted"
|
||||
else
|
||||
echo "❌ ERROR: Valid retry count $retries rejected"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Test invalid retry counts
|
||||
run: |
|
||||
for retries in "0" "11" "abc" "-1"; do
|
||||
if [[ "$retries" =~ ^[0-9]+$ ]] && [ "$retries" -gt 0 ] && [ "$retries" -le 10 ]; then
|
||||
echo "❌ ERROR: Invalid retry count $retries should be rejected"
|
||||
exit 1
|
||||
else
|
||||
echo "✓ Invalid retry count $retries correctly rejected"
|
||||
fi
|
||||
done
|
||||
|
||||
test-node-setup-boolean-validation:
|
||||
name: Test Boolean Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test valid boolean values
|
||||
run: |
|
||||
for value in "true" "false"; do
|
||||
if [[ "$value" == "true" ]] || [[ "$value" == "false" ]]; then
|
||||
echo "✓ Boolean value $value accepted"
|
||||
else
|
||||
echo "❌ ERROR: Valid boolean $value rejected"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Test invalid boolean values
|
||||
run: |
|
||||
for value in "yes" "no" "1" "0" "True" "FALSE" ""; do
|
||||
if [[ "$value" != "true" ]] && [[ "$value" != "false" ]]; then
|
||||
echo "✓ Invalid boolean value '$value' correctly rejected"
|
||||
else
|
||||
echo "❌ ERROR: Invalid boolean $value should be rejected"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
test-node-setup-token-validation:
|
||||
name: Test Auth Token Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test injection pattern detection
|
||||
run: |
|
||||
for token in "token;malicious" "token&&command" "token|pipe"; do
|
||||
if [[ "$token" == *";"* ]] || [[ "$token" == *"&&"* ]] || [[ "$token" == *"|"* ]]; then
|
||||
echo "✓ Injection pattern in token correctly detected"
|
||||
else
|
||||
echo "❌ ERROR: Should detect injection pattern in: $token"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Test valid tokens
|
||||
run: |
|
||||
for token in "npm_AbCdEf1234567890" "github_pat_12345abcdef" "simple-token"; do
|
||||
if [[ "$token" == *";"* ]] || [[ "$token" == *"&&"* ]] || [[ "$token" == *"|"* ]]; then
|
||||
echo "❌ ERROR: Valid token should not be rejected: $token"
|
||||
exit 1
|
||||
else
|
||||
echo "✓ Valid token accepted"
|
||||
fi
|
||||
done
|
||||
|
||||
test-node-setup-package-manager-resolution:
|
||||
name: Test Package Manager Resolution
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test auto detection with detected PM
|
||||
run: |
|
||||
INPUT_PM="auto"
|
||||
DETECTED_PM="pnpm"
|
||||
|
||||
if [ "$INPUT_PM" = "auto" ]; then
|
||||
if [ -n "$DETECTED_PM" ]; then
|
||||
FINAL_PM="$DETECTED_PM"
|
||||
else
|
||||
FINAL_PM="npm"
|
||||
fi
|
||||
else
|
||||
FINAL_PM="$INPUT_PM"
|
||||
fi
|
||||
|
||||
if [[ "$FINAL_PM" != "pnpm" ]]; then
|
||||
echo "❌ ERROR: Should use detected PM (pnpm)"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Auto-detected package manager correctly resolved"
|
||||
|
||||
- name: Test auto detection without detected PM
|
||||
run: |
|
||||
INPUT_PM="auto"
|
||||
DETECTED_PM=""
|
||||
|
||||
if [ "$INPUT_PM" = "auto" ]; then
|
||||
if [ -n "$DETECTED_PM" ]; then
|
||||
FINAL_PM="$DETECTED_PM"
|
||||
else
|
||||
FINAL_PM="npm"
|
||||
fi
|
||||
else
|
||||
FINAL_PM="$INPUT_PM"
|
||||
fi
|
||||
|
||||
if [[ "$FINAL_PM" != "npm" ]]; then
|
||||
echo "❌ ERROR: Should default to npm"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Defaults to npm when no detection"
|
||||
|
||||
- name: Test explicit package manager
|
||||
run: |
|
||||
INPUT_PM="yarn"
|
||||
DETECTED_PM="pnpm"
|
||||
|
||||
if [ "$INPUT_PM" = "auto" ]; then
|
||||
if [ -n "$DETECTED_PM" ]; then
|
||||
FINAL_PM="$DETECTED_PM"
|
||||
else
|
||||
FINAL_PM="npm"
|
||||
fi
|
||||
else
|
||||
FINAL_PM="$INPUT_PM"
|
||||
fi
|
||||
|
||||
if [[ "$FINAL_PM" != "yarn" ]]; then
|
||||
echo "❌ ERROR: Should use explicit PM (yarn)"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Explicit package manager correctly used"
|
||||
|
||||
test-node-setup-feature-detection:
|
||||
name: Test Feature Detection
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Create test package.json with ESM
|
||||
run: |
|
||||
mkdir -p test-esm
|
||||
cd test-esm
|
||||
cat > package.json <<'EOF'
|
||||
{
|
||||
"name": "test-esm",
|
||||
"version": "1.0.0",
|
||||
"type": "module"
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Test ESM detection
|
||||
run: |
|
||||
cd test-esm
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
pkg_type=$(jq -r '.type // "commonjs"' package.json 2>/dev/null)
|
||||
if [[ "$pkg_type" == "module" ]]; then
|
||||
echo "✓ ESM support correctly detected"
|
||||
else
|
||||
echo "❌ ERROR: Should detect ESM support"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "⚠️ jq not available, skipping ESM detection test"
|
||||
echo "✓ ESM detection logic verified (jq would be required in actual action)"
|
||||
fi
|
||||
|
||||
- name: Create test with TypeScript
|
||||
run: |
|
||||
mkdir -p test-ts
|
||||
cd test-ts
|
||||
touch tsconfig.json
|
||||
cat > package.json <<'EOF'
|
||||
{
|
||||
"name": "test-ts",
|
||||
"devDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Test TypeScript detection
|
||||
run: |
|
||||
cd test-ts
|
||||
typescript_support="false"
|
||||
if [ -f tsconfig.json ]; then
|
||||
typescript_support="true"
|
||||
fi
|
||||
if [[ "$typescript_support" != "true" ]]; then
|
||||
echo "❌ ERROR: Should detect TypeScript"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ TypeScript support correctly detected"
|
||||
|
||||
- name: Create test with frameworks
|
||||
run: |
|
||||
mkdir -p test-frameworks
|
||||
cd test-frameworks
|
||||
cat > package.json <<'EOF'
|
||||
{
|
||||
"name": "test-frameworks",
|
||||
"dependencies": {
|
||||
"react": "^18.0.0",
|
||||
"next": "^14.0.0"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Test framework detection
|
||||
run: |
|
||||
cd test-frameworks
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
has_next=$(jq -e '.dependencies.next or .devDependencies.next' package.json >/dev/null 2>&1 && echo "yes" || echo "no")
|
||||
has_react=$(jq -e '.dependencies.react or .devDependencies.react' package.json >/dev/null 2>&1 && echo "yes" || echo "no")
|
||||
|
||||
if [[ "$has_next" == "yes" ]] && [[ "$has_react" == "yes" ]]; then
|
||||
echo "✓ Frameworks (Next.js, React) correctly detected"
|
||||
else
|
||||
echo "❌ ERROR: Should detect Next.js and React"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "⚠️ jq not available, skipping framework detection test"
|
||||
echo "✓ Framework detection logic verified (jq would be required in actual action)"
|
||||
fi
|
||||
|
||||
test-node-setup-security:
|
||||
name: Test Security Measures
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Test token sanitization
|
||||
run: |
|
||||
TOKEN="test-token
|
||||
with-newline"
|
||||
|
||||
# Should remove newlines
|
||||
sanitized=$(echo "$TOKEN" | tr -d '\n\r')
|
||||
|
||||
if [[ "$sanitized" == *$'\n'* ]] || [[ "$sanitized" == *$'\r'* ]]; then
|
||||
echo "❌ ERROR: Newlines not removed"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Token sanitization works correctly"
|
||||
|
||||
- name: Test package manager sanitization
|
||||
run: |
|
||||
PM="npm
|
||||
with-newline"
|
||||
|
||||
# Should remove newlines
|
||||
sanitized=$(echo "$PM" | tr -d '\n\r')
|
||||
|
||||
if [[ "$sanitized" == *$'\n'* ]] || [[ "$sanitized" == *$'\r'* ]]; then
|
||||
echo "❌ ERROR: Newlines not removed from PM"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Package manager sanitization works correctly"
|
||||
|
||||
test-node-setup-integration-workflow:
|
||||
name: Test Integration Workflow
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Simulate complete workflow
|
||||
run: |
|
||||
echo "=== Simulating Node Setup Workflow ==="
|
||||
|
||||
# 1. Validation
|
||||
echo "Step 1: Validate inputs"
|
||||
DEFAULT_VERSION="20"
|
||||
PACKAGE_MANAGER="npm"
|
||||
REGISTRY_URL="https://registry.npmjs.org"
|
||||
CACHE="true"
|
||||
INSTALL="true"
|
||||
MAX_RETRIES="3"
|
||||
echo "✓ Inputs validated"
|
||||
|
||||
# 2. Version parsing
|
||||
echo "Step 2: Parse Node.js version"
|
||||
NODE_VERSION="20.9.0"
|
||||
echo "✓ Version parsed: $NODE_VERSION"
|
||||
|
||||
# 3. Package manager resolution
|
||||
echo "Step 3: Resolve package manager"
|
||||
if [ "$PACKAGE_MANAGER" = "auto" ]; then
|
||||
FINAL_PM="npm"
|
||||
else
|
||||
FINAL_PM="$PACKAGE_MANAGER"
|
||||
fi
|
||||
echo "✓ Package manager resolved: $FINAL_PM"
|
||||
|
||||
# 4. Setup Node.js
|
||||
echo "Step 4: Setup Node.js $NODE_VERSION"
|
||||
if command -v node >/dev/null 2>&1; then
|
||||
echo "✓ Node.js available: $(node --version)"
|
||||
fi
|
||||
|
||||
# 5. Enable Corepack
|
||||
echo "Step 5: Enable Corepack"
|
||||
if command -v corepack >/dev/null 2>&1; then
|
||||
echo "✓ Corepack available"
|
||||
else
|
||||
echo "⚠️ Corepack not available in test environment"
|
||||
fi
|
||||
|
||||
# 6. Cache dependencies
|
||||
if [[ "$CACHE" == "true" ]]; then
|
||||
echo "Step 6: Cache dependencies"
|
||||
echo "✓ Would use common-cache action"
|
||||
fi
|
||||
|
||||
# 7. Install dependencies
|
||||
if [[ "$INSTALL" == "true" ]]; then
|
||||
echo "Step 7: Install dependencies"
|
||||
echo "✓ Would run: $FINAL_PM install"
|
||||
fi
|
||||
|
||||
echo "=== Workflow simulation completed ==="
|
||||
|
||||
integration-test-summary:
|
||||
name: Integration Test Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test-node-setup-version-validation
|
||||
- test-node-setup-package-manager-validation
|
||||
- test-node-setup-url-validation
|
||||
- test-node-setup-retries-validation
|
||||
- test-node-setup-boolean-validation
|
||||
- test-node-setup-token-validation
|
||||
- test-node-setup-package-manager-resolution
|
||||
- test-node-setup-feature-detection
|
||||
- test-node-setup-security
|
||||
- test-node-setup-integration-workflow
|
||||
steps:
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "=========================================="
|
||||
echo "Node Setup Integration Tests - PASSED"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "✓ Version validation tests"
|
||||
echo "✓ Package manager validation tests"
|
||||
echo "✓ URL validation tests"
|
||||
echo "✓ Retries validation tests"
|
||||
echo "✓ Boolean validation tests"
|
||||
echo "✓ Token validation tests"
|
||||
echo "✓ Package manager resolution tests"
|
||||
echo "✓ Feature detection tests"
|
||||
echo "✓ Security measure tests"
|
||||
echo "✓ Integration workflow tests"
|
||||
echo ""
|
||||
echo "All node-setup integration tests completed successfully!"
|
||||
@@ -1,241 +0,0 @@
|
||||
---
|
||||
name: Test version-file-parser Integration
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'version-file-parser/**'
|
||||
- '_tests/integration/workflows/version-file-parser-test.yml'
|
||||
|
||||
jobs:
|
||||
test-version-file-parser:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
test-case:
|
||||
- name: 'Node.js project'
|
||||
language: 'node'
|
||||
tool-versions-key: 'nodejs'
|
||||
dockerfile-image: 'node'
|
||||
expected-version: '18.0.0'
|
||||
setup-files: |
|
||||
echo "18.17.0" > .nvmrc
|
||||
cat > package.json <<EOF
|
||||
{
|
||||
"name": "test-project",
|
||||
"engines": { "node": ">=18.0.0" }
|
||||
}
|
||||
EOF
|
||||
touch package-lock.json
|
||||
|
||||
- name: 'PHP project'
|
||||
language: 'php'
|
||||
tool-versions-key: 'php'
|
||||
dockerfile-image: 'php'
|
||||
expected-version: '8.1'
|
||||
setup-files: |
|
||||
cat > composer.json <<EOF
|
||||
{
|
||||
"require": { "php": "^8.1" }
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: 'Python project'
|
||||
language: 'python'
|
||||
tool-versions-key: 'python'
|
||||
dockerfile-image: 'python'
|
||||
expected-version: '3.9'
|
||||
setup-files: |
|
||||
echo "3.9.0" > .python-version
|
||||
cat > pyproject.toml <<EOF
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9"
|
||||
EOF
|
||||
|
||||
- name: 'Go project'
|
||||
language: 'go'
|
||||
tool-versions-key: 'golang'
|
||||
dockerfile-image: 'golang'
|
||||
expected-version: '1.21'
|
||||
setup-files: |
|
||||
cat > go.mod <<EOF
|
||||
module test-project
|
||||
go 1.21
|
||||
EOF
|
||||
|
||||
- name: '.tool-versions file'
|
||||
language: 'node'
|
||||
tool-versions-key: 'nodejs'
|
||||
dockerfile-image: 'node'
|
||||
expected-version: '18.16.0'
|
||||
setup-files: |
|
||||
echo "nodejs 18.16.0" > .tool-versions
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clean up test files from previous runs
|
||||
run: |
|
||||
rm -f .nvmrc package.json package-lock.json composer.json .python-version pyproject.toml go.mod .tool-versions
|
||||
|
||||
- name: Setup test files
|
||||
run: ${{ matrix.test-case.setup-files }}
|
||||
|
||||
- name: Test version-file-parser
|
||||
id: test-action
|
||||
uses: ./version-file-parser
|
||||
with:
|
||||
language: ${{ matrix.test-case.language }}
|
||||
tool-versions-key: ${{ matrix.test-case.tool-versions-key }}
|
||||
dockerfile-image: ${{ matrix.test-case.dockerfile-image }}
|
||||
default-version: '1.0.0'
|
||||
|
||||
- name: Validate outputs
|
||||
run: |
|
||||
echo "Test case: ${{ matrix.test-case.name }}"
|
||||
echo "Expected version: ${{ matrix.test-case.expected-version }}"
|
||||
echo "Detected version: ${{ steps.test-action.outputs.detected-version }}"
|
||||
echo "Package manager: ${{ steps.test-action.outputs.package-manager }}"
|
||||
|
||||
# Validate that we got some version
|
||||
if [[ -z "${{ steps.test-action.outputs.detected-version }}" ]]; then
|
||||
echo "❌ ERROR: No version detected"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate version format (basic semver check)
|
||||
if ! echo "${{ steps.test-action.outputs.detected-version }}" | grep -E '^[0-9]+\.[0-9]+(\.[0-9]+)?'; then
|
||||
echo "❌ ERROR: Invalid version format: ${{ steps.test-action.outputs.detected-version }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate detected version matches expected version (not the fallback)
|
||||
if [[ "${{ steps.test-action.outputs.detected-version }}" != "${{ matrix.test-case.expected-version }}" ]]; then
|
||||
echo "❌ ERROR: Version mismatch"
|
||||
echo "Expected: ${{ matrix.test-case.expected-version }}"
|
||||
echo "Got: ${{ steps.test-action.outputs.detected-version }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Version validation passed"
|
||||
|
||||
# Skip external reference test in local/CI environment to avoid auth issues
|
||||
- name: Test external reference (info only)
|
||||
run: |
|
||||
echo "External reference test would use: ivuorinen/actions/version-file-parser@main"
|
||||
echo "Skipping to avoid authentication issues in local testing"
|
||||
|
||||
test-edge-cases:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clean up test files from previous runs
|
||||
run: |
|
||||
rm -f .nvmrc package.json package-lock.json composer.json .python-version pyproject.toml go.mod .tool-versions
|
||||
|
||||
- name: Setup test files (package.json engines)
|
||||
shell: bash
|
||||
run: |
|
||||
set -Eeuo pipefail
|
||||
cat > package.json <<'EOF'
|
||||
{
|
||||
"name": "edge-case",
|
||||
"engines": { "node": ">=18.0.0" }
|
||||
}
|
||||
EOF
|
||||
echo "18.17.0" > .nvmrc
|
||||
|
||||
- name: Test version detection from existing files
|
||||
id: existing-version
|
||||
uses: ./version-file-parser
|
||||
with:
|
||||
language: 'node'
|
||||
tool-versions-key: 'nodejs'
|
||||
dockerfile-image: 'node'
|
||||
default-version: '20.0.0'
|
||||
|
||||
- name: Validate existing version detection
|
||||
run: |
|
||||
# The action detects Node.js version from package.json engines field
|
||||
# package.json >=18.0.0 is parsed as 18.0.0
|
||||
# Note: .nvmrc exists but package.json takes precedence in this implementation
|
||||
expected_version="18.0.0"
|
||||
detected_version="${{ steps.existing-version.outputs.detected-version }}"
|
||||
|
||||
if [[ "$detected_version" != "$expected_version" ]]; then
|
||||
echo "❌ ERROR: Version mismatch"
|
||||
echo "Expected: $expected_version"
|
||||
echo "Got: $detected_version"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Existing version detection works correctly"
|
||||
|
||||
- name: Clean up before invalid regex test
|
||||
run: |
|
||||
rm -f .nvmrc package.json package-lock.json
|
||||
|
||||
- name: Test with invalid regex
|
||||
id: invalid-regex
|
||||
uses: ./version-file-parser
|
||||
with:
|
||||
language: 'node'
|
||||
tool-versions-key: 'nodejs'
|
||||
dockerfile-image: 'node'
|
||||
validation-regex: 'invalid[regex'
|
||||
default-version: '18.0.0'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Validate regex error handling
|
||||
run: |
|
||||
echo "Testing regex error handling completed"
|
||||
# Action should handle invalid regex gracefully
|
||||
if [ "${{ steps.invalid-regex.outcome }}" != "failure" ]; then
|
||||
echo "::error::Expected invalid-regex step to fail, but it was: ${{ steps.invalid-regex.outcome }}"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Invalid regex properly failed as expected"
|
||||
|
||||
test-dockerfile-parsing:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clean up test files from previous runs
|
||||
run: |
|
||||
rm -f .nvmrc package.json package-lock.json composer.json .python-version pyproject.toml go.mod .tool-versions Dockerfile
|
||||
|
||||
- name: Create Dockerfile with Node.js
|
||||
run: |
|
||||
cat > Dockerfile <<EOF
|
||||
FROM node:18.17.0-alpine
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
EOF
|
||||
|
||||
- name: Test Dockerfile parsing
|
||||
id: dockerfile-test
|
||||
uses: ./version-file-parser
|
||||
with:
|
||||
language: 'node'
|
||||
tool-versions-key: 'nodejs'
|
||||
dockerfile-image: 'node'
|
||||
|
||||
- name: Validate Dockerfile parsing
|
||||
run: |
|
||||
expected_version="18.17.0"
|
||||
detected_version="${{ steps.dockerfile-test.outputs.dockerfile-version }}"
|
||||
|
||||
echo "Expected version: $expected_version"
|
||||
echo "Detected version: $detected_version"
|
||||
|
||||
if [[ "$detected_version" != "$expected_version" ]]; then
|
||||
echo "❌ ERROR: Version mismatch"
|
||||
echo "Expected: $expected_version"
|
||||
echo "Got: $detected_version"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Dockerfile parsing successful"
|
||||
@@ -25,6 +25,9 @@ from typing import Any
|
||||
|
||||
import yaml # pylint: disable=import-error
|
||||
|
||||
# Default value for unknown items (used by ActionFileParser)
|
||||
DEFAULT_UNKNOWN = "Unknown"
|
||||
|
||||
|
||||
class ValidationCore:
|
||||
"""Core validation functionality with standardized patterns and functions."""
|
||||
@@ -497,9 +500,9 @@ class ActionFileParser:
|
||||
"""Get the action name from an action.yml file."""
|
||||
try:
|
||||
data = ActionFileParser.load_action_file(action_file)
|
||||
return data.get("name", "Unknown")
|
||||
return data.get("name", DEFAULT_UNKNOWN)
|
||||
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
||||
return "Unknown"
|
||||
return DEFAULT_UNKNOWN
|
||||
|
||||
@staticmethod
|
||||
def get_action_inputs(action_file: str) -> list[str]:
|
||||
@@ -521,6 +524,16 @@ class ActionFileParser:
|
||||
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def get_action_runs_using(action_file: str) -> str:
|
||||
"""Get the runs.using value from an action.yml file."""
|
||||
try:
|
||||
data = ActionFileParser.load_action_file(action_file)
|
||||
runs = data.get("runs", {})
|
||||
return runs.get("using", "unknown")
|
||||
except (OSError, ValueError, yaml.YAMLError, AttributeError):
|
||||
return "unknown"
|
||||
|
||||
@staticmethod
|
||||
def _get_required_property(input_data: dict, property_name: str) -> str:
|
||||
"""Get the required/optional property."""
|
||||
@@ -787,6 +800,11 @@ Examples:
|
||||
mode_group.add_argument("--inputs", metavar="ACTION_FILE", help="List action inputs")
|
||||
mode_group.add_argument("--outputs", metavar="ACTION_FILE", help="List action outputs")
|
||||
mode_group.add_argument("--name", metavar="ACTION_FILE", help="Get action name")
|
||||
mode_group.add_argument(
|
||||
"--runs-using",
|
||||
metavar="ACTION_FILE",
|
||||
help="Get action runs.using value",
|
||||
)
|
||||
mode_group.add_argument(
|
||||
"--validate-yaml",
|
||||
metavar="YAML_FILE",
|
||||
@@ -834,6 +852,12 @@ def _handle_name_command(args):
|
||||
print(name)
|
||||
|
||||
|
||||
def _handle_runs_using_command(args):
|
||||
"""Handle the runs-using command."""
|
||||
runs_using = ActionFileParser.get_action_runs_using(args.runs_using)
|
||||
print(runs_using)
|
||||
|
||||
|
||||
def _handle_validate_yaml_command(args):
|
||||
"""Handle the validate-yaml command."""
|
||||
try:
|
||||
@@ -853,6 +877,7 @@ def _execute_command(args):
|
||||
"inputs": _handle_inputs_command,
|
||||
"outputs": _handle_outputs_command,
|
||||
"name": _handle_name_command,
|
||||
"runs_using": _handle_runs_using_command,
|
||||
"validate_yaml": _handle_validate_yaml_command,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
#!/usr/bin/env shellspec
|
||||
# Unit tests for common-cache action validation and logic
|
||||
|
||||
# Framework is automatically loaded via spec_helper.sh
|
||||
|
||||
Describe "common-cache action"
|
||||
ACTION_DIR="common-cache"
|
||||
ACTION_FILE="$ACTION_DIR/action.yml"
|
||||
|
||||
Context "when validating cache type input"
|
||||
It "accepts npm cache type"
|
||||
When call validate_input_python "common-cache" "type" "npm"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts composer cache type"
|
||||
When call validate_input_python "common-cache" "type" "composer"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts go cache type"
|
||||
When call validate_input_python "common-cache" "type" "go"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts pip cache type"
|
||||
When call validate_input_python "common-cache" "type" "pip"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts maven cache type"
|
||||
When call validate_input_python "common-cache" "type" "maven"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts gradle cache type"
|
||||
When call validate_input_python "common-cache" "type" "gradle"
|
||||
The status should be success
|
||||
End
|
||||
It "rejects empty cache type"
|
||||
When call validate_input_python "common-cache" "type" ""
|
||||
The status should be failure
|
||||
End
|
||||
It "rejects invalid cache type"
|
||||
Pending "TODO: Implement enum validation for cache type"
|
||||
When call validate_input_python "common-cache" "type" "invalid-type"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating paths input"
|
||||
It "accepts single path"
|
||||
When call validate_input_python "common-cache" "paths" "node_modules"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts multiple paths"
|
||||
When call validate_input_python "common-cache" "paths" "node_modules,dist,build"
|
||||
The status should be success
|
||||
End
|
||||
It "rejects empty paths"
|
||||
When call validate_input_python "common-cache" "paths" ""
|
||||
The status should be failure
|
||||
End
|
||||
It "rejects path traversal"
|
||||
When call validate_input_python "common-cache" "paths" "../../../etc/passwd"
|
||||
The status should be failure
|
||||
End
|
||||
It "rejects command injection in paths"
|
||||
When call validate_input_python "common-cache" "paths" "node_modules;rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating key-prefix input"
|
||||
It "accepts valid key prefix"
|
||||
When call validate_input_python "common-cache" "key-prefix" "v2-build"
|
||||
The status should be success
|
||||
End
|
||||
It "rejects command injection in key-prefix"
|
||||
When call validate_input_python "common-cache" "key-prefix" "v2&&malicious"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating key-files input"
|
||||
It "accepts single key file"
|
||||
When call validate_input_python "common-cache" "key-files" "package.json"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts multiple key files"
|
||||
When call validate_input_python "common-cache" "key-files" "package.json,package-lock.json,yarn.lock"
|
||||
The status should be success
|
||||
End
|
||||
It "rejects path traversal in key-files"
|
||||
When call validate_input_python "common-cache" "key-files" "../../../sensitive.json"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating restore-keys input"
|
||||
It "accepts valid restore keys format"
|
||||
When call validate_input_python "common-cache" "restore-keys" "Linux-npm-,Linux-"
|
||||
The status should be success
|
||||
End
|
||||
It "rejects malicious restore keys"
|
||||
When call validate_input_python "common-cache" "restore-keys" "Linux-npm-;rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when checking action.yml structure"
|
||||
It "has valid YAML syntax"
|
||||
When call validate_action_yml_quiet "$ACTION_FILE"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "has correct action name"
|
||||
name=$(get_action_name "$ACTION_FILE")
|
||||
When call echo "$name"
|
||||
The output should equal "Common Cache"
|
||||
End
|
||||
|
||||
It "defines required inputs"
|
||||
inputs=$(get_action_inputs "$ACTION_FILE")
|
||||
When call echo "$inputs"
|
||||
The output should include "type"
|
||||
The output should include "paths"
|
||||
End
|
||||
|
||||
It "defines optional inputs"
|
||||
inputs=$(get_action_inputs "$ACTION_FILE")
|
||||
When call echo "$inputs"
|
||||
The output should include "key-prefix"
|
||||
The output should include "key-files"
|
||||
The output should include "restore-keys"
|
||||
The output should include "env-vars"
|
||||
End
|
||||
|
||||
It "defines expected outputs"
|
||||
outputs=$(get_action_outputs "$ACTION_FILE")
|
||||
When call echo "$outputs"
|
||||
The output should include "cache-hit"
|
||||
The output should include "cache-key"
|
||||
The output should include "cache-paths"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating security"
|
||||
It "rejects injection in all input types"
|
||||
When call validate_input_python "common-cache" "type" "npm;malicious"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates environment variable names safely"
|
||||
When call validate_input_python "common-cache" "env-vars" "NODE_ENV,CI"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects injection in environment variables"
|
||||
When call validate_input_python "common-cache" "env-vars" "NODE_ENV;rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing outputs"
|
||||
It "produces all expected outputs consistently"
|
||||
When call test_action_outputs "$ACTION_DIR" "type" "npm" "paths" "node_modules"
|
||||
The status should be success
|
||||
The stderr should include "Testing action outputs for: common-cache"
|
||||
The stderr should include "Output test passed for: common-cache"
|
||||
End
|
||||
End
|
||||
End
|
||||
@@ -1,242 +0,0 @@
|
||||
#!/usr/bin/env shellspec
|
||||
# Unit tests for node-setup action
|
||||
|
||||
# Framework is automatically loaded via spec_helper.sh
|
||||
|
||||
Describe "node-setup action"
|
||||
ACTION_DIR="node-setup"
|
||||
ACTION_FILE="$ACTION_DIR/action.yml"
|
||||
|
||||
# Framework is automatically initialized via spec_helper.sh
|
||||
|
||||
Context "when validating inputs"
|
||||
It "accepts valid Node.js version"
|
||||
When call validate_input_python "node-setup" "default-version" "18.17.0"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts valid package manager"
|
||||
When call validate_input_python "node-setup" "package-manager" "npm"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts yarn as package manager"
|
||||
When call validate_input_python "node-setup" "package-manager" "yarn"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts pnpm as package manager"
|
||||
When call validate_input_python "node-setup" "package-manager" "pnpm"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts bun as package manager"
|
||||
When call validate_input_python "node-setup" "package-manager" "bun"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid package manager"
|
||||
When call validate_input_python "node-setup" "package-manager" "invalid-manager"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects malformed Node.js version"
|
||||
When call validate_input_python "node-setup" "default-version" "not-a-version"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects command injection in inputs"
|
||||
When call validate_input_python "node-setup" "default-version" "18.0.0; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when checking action structure"
|
||||
It "has valid YAML syntax"
|
||||
When call validate_action_yml_quiet "$ACTION_FILE"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "has correct action name"
|
||||
When call get_action_name "$ACTION_FILE"
|
||||
The output should equal "Node Setup"
|
||||
End
|
||||
|
||||
It "defines expected inputs"
|
||||
When call get_action_inputs "$ACTION_FILE"
|
||||
The output should include "default-version"
|
||||
The output should include "package-manager"
|
||||
End
|
||||
|
||||
It "defines expected outputs"
|
||||
When call get_action_outputs "$ACTION_FILE"
|
||||
The output should include "node-version"
|
||||
The output should include "package-manager"
|
||||
The output should include "cache-hit"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing Node.js version detection"
|
||||
BeforeEach "shellspec_setup_test_env 'node-version-detection'"
|
||||
AfterEach "shellspec_cleanup_test_env 'node-version-detection'"
|
||||
|
||||
It "detects version from package.json engines field"
|
||||
create_mock_node_repo
|
||||
|
||||
# Mock action output based on package.json
|
||||
echo "node-version=18.0.0" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "node-version" "18.0.0"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "detects version from .nvmrc file"
|
||||
create_mock_node_repo
|
||||
echo "18.17.1" >.nvmrc
|
||||
|
||||
# Mock action output
|
||||
echo "node-version=18.17.1" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "node-version" "18.17.1"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "uses default version when none specified"
|
||||
create_mock_node_repo
|
||||
# Remove engines field simulation
|
||||
|
||||
# Mock default version output
|
||||
echo "node-version=20.0.0" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "node-version" "20.0.0"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing package manager detection"
|
||||
BeforeEach "shellspec_setup_test_env 'package-manager-detection'"
|
||||
AfterEach "shellspec_cleanup_test_env 'package-manager-detection'"
|
||||
|
||||
It "detects bun from bun.lockb"
|
||||
create_mock_node_repo
|
||||
touch bun.lockb
|
||||
|
||||
echo "package-manager=bun" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "package-manager" "bun"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "detects pnpm from pnpm-lock.yaml"
|
||||
create_mock_node_repo
|
||||
touch pnpm-lock.yaml
|
||||
|
||||
echo "package-manager=pnpm" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "package-manager" "pnpm"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "detects yarn from yarn.lock"
|
||||
create_mock_node_repo
|
||||
touch yarn.lock
|
||||
|
||||
echo "package-manager=yarn" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "package-manager" "yarn"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "detects npm from package-lock.json"
|
||||
create_mock_node_repo
|
||||
touch package-lock.json
|
||||
|
||||
echo "package-manager=npm" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "package-manager" "npm"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "detects packageManager field from package.json"
|
||||
create_mock_node_repo
|
||||
|
||||
# Add packageManager field to package.json
|
||||
cat >package.json <<EOF
|
||||
{
|
||||
"name": "test-project",
|
||||
"version": "1.0.0",
|
||||
"packageManager": "pnpm@8.0.0",
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo "package-manager=pnpm" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "package-manager" "pnpm"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing Corepack integration"
|
||||
BeforeEach "shellspec_setup_test_env 'corepack-test'"
|
||||
AfterEach "shellspec_cleanup_test_env 'corepack-test'"
|
||||
|
||||
It "enables Corepack when packageManager is specified"
|
||||
create_mock_node_repo
|
||||
|
||||
# Simulate packageManager field
|
||||
cat >package.json <<EOF
|
||||
{
|
||||
"name": "test-project",
|
||||
"version": "1.0.0",
|
||||
"packageManager": "yarn@3.6.0"
|
||||
}
|
||||
EOF
|
||||
|
||||
# Mock Corepack enabled output
|
||||
echo "corepack-enabled=true" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "corepack-enabled" "true"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing cache functionality"
|
||||
BeforeEach "shellspec_setup_test_env 'cache-test'"
|
||||
AfterEach "shellspec_cleanup_test_env 'cache-test'"
|
||||
|
||||
It "reports cache hit when dependencies are cached"
|
||||
create_mock_node_repo
|
||||
touch package-lock.json
|
||||
mkdir -p node_modules
|
||||
|
||||
# Mock cache hit
|
||||
echo "cache-hit=true" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "cache-hit" "true"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "reports cache miss when no cache exists"
|
||||
create_mock_node_repo
|
||||
touch package-lock.json
|
||||
|
||||
# Mock cache miss
|
||||
echo "cache-hit=false" >>"$GITHUB_OUTPUT"
|
||||
|
||||
When call shellspec_validate_action_output "cache-hit" "false"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing output consistency"
|
||||
It "produces all expected outputs"
|
||||
When call test_action_outputs "$ACTION_DIR" "node-version" "18.0.0" "package-manager" "npm"
|
||||
The status should be success
|
||||
The stderr should include "Testing action outputs for: node-setup"
|
||||
The stderr should include "Output test passed for: node-setup"
|
||||
End
|
||||
End
|
||||
End
|
||||
@@ -1,407 +0,0 @@
|
||||
#!/usr/bin/env shellspec
|
||||
# Unit tests for php-composer action validation and logic
|
||||
|
||||
# Framework is automatically loaded via spec_helper.sh
|
||||
|
||||
Describe "php-composer action"
|
||||
ACTION_DIR="php-composer"
|
||||
ACTION_FILE="$ACTION_DIR/action.yml"
|
||||
|
||||
Context "when validating php input"
|
||||
It "accepts valid PHP version"
|
||||
When call validate_input_python "php-composer" "php" "8.4"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts PHP version with patch"
|
||||
When call validate_input_python "php-composer" "php" "8.4.1"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts PHP 7.4"
|
||||
When call validate_input_python "php-composer" "php" "7.4"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts PHP 8.0"
|
||||
When call validate_input_python "php-composer" "php" "8.0"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts PHP 8.1"
|
||||
When call validate_input_python "php-composer" "php" "8.1"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects PHP version too old"
|
||||
When call validate_input_python "php-composer" "php" "5.5"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects invalid version format"
|
||||
When call validate_input_python "php-composer" "php" "php8.4"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects version with command injection"
|
||||
When call validate_input_python "php-composer" "php" "8.4; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects empty version"
|
||||
When call validate_input_python "php-composer" "php" ""
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating extensions input"
|
||||
It "accepts valid PHP extensions"
|
||||
When call validate_input_python "php-composer" "extensions" "mbstring, xml, zip"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts single extension"
|
||||
When call validate_input_python "php-composer" "extensions" "mbstring"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts extensions without spaces"
|
||||
When call validate_input_python "php-composer" "extensions" "mbstring,xml,zip"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts extensions with underscores"
|
||||
When call validate_input_python "php-composer" "extensions" "pdo_mysql, gd_jpeg"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects extensions with special characters"
|
||||
When call validate_input_python "php-composer" "extensions" "mbstring@xml"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects extensions with command injection"
|
||||
When call validate_input_python "php-composer" "extensions" "mbstring; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects empty extensions"
|
||||
When call validate_input_python "php-composer" "extensions" ""
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating tools input"
|
||||
It "accepts valid Composer tools"
|
||||
When call validate_input_python "php-composer" "tools" "composer:v2"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts multiple tools"
|
||||
When call validate_input_python "php-composer" "tools" "composer:v2, phpunit:^9.0"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts tools with version constraints"
|
||||
When call validate_input_python "php-composer" "tools" "phpcs, phpstan:1.10"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts tools with stability flags (@ allowed)"
|
||||
When call validate_input_python "php-composer" "tools" "dev-master@dev"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts tools with version and stability flag"
|
||||
When call validate_input_python "php-composer" "tools" "monolog/monolog@dev"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects tools with backticks"
|
||||
When call validate_input_python "php-composer" "tools" "composer\`whoami\`"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects tools with command injection"
|
||||
When call validate_input_python "php-composer" "tools" "composer; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects empty tools"
|
||||
When call validate_input_python "php-composer" "tools" ""
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating composer-version input"
|
||||
It "accepts composer version 1"
|
||||
When call validate_input_python "php-composer" "composer-version" "1"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts composer version 2"
|
||||
When call validate_input_python "php-composer" "composer-version" "2"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid composer version"
|
||||
When call validate_input_python "php-composer" "composer-version" "3"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects non-numeric composer version"
|
||||
When call validate_input_python "php-composer" "composer-version" "latest"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects empty composer version"
|
||||
When call validate_input_python "php-composer" "composer-version" ""
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating stability input"
|
||||
It "accepts stable"
|
||||
When call validate_input_python "php-composer" "stability" "stable"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts RC"
|
||||
When call validate_input_python "php-composer" "stability" "RC"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts beta"
|
||||
When call validate_input_python "php-composer" "stability" "beta"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts alpha"
|
||||
When call validate_input_python "php-composer" "stability" "alpha"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts dev"
|
||||
When call validate_input_python "php-composer" "stability" "dev"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid stability"
|
||||
When call validate_input_python "php-composer" "stability" "unstable"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects stability with injection"
|
||||
When call validate_input_python "php-composer" "stability" "stable; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating cache-directories input"
|
||||
It "accepts valid cache directory"
|
||||
When call validate_input_python "php-composer" "cache-directories" "vendor/cache"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts multiple cache directories"
|
||||
When call validate_input_python "php-composer" "cache-directories" "vendor/cache, .cache"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts directories with underscores and hyphens"
|
||||
When call validate_input_python "php-composer" "cache-directories" "cache_dir, cache-dir"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects path traversal"
|
||||
When call validate_input_python "php-composer" "cache-directories" "../malicious"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects absolute paths"
|
||||
When call validate_input_python "php-composer" "cache-directories" "/etc/passwd"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects directories with command injection"
|
||||
When call validate_input_python "php-composer" "cache-directories" "cache; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects empty cache directories"
|
||||
When call validate_input_python "php-composer" "cache-directories" ""
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating token input"
|
||||
It "accepts GitHub token expression"
|
||||
When call validate_input_python "php-composer" "token" "\${{ github.token }}"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts GitHub fine-grained token"
|
||||
When call validate_input_python "php-composer" "token" "ghp_abcdefghijklmnopqrstuvwxyz1234567890"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts GitHub app token"
|
||||
When call validate_input_python "php-composer" "token" "ghs_abcdefghijklmnopqrstuvwxyz1234567890"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid token format"
|
||||
When call validate_input_python "php-composer" "token" "invalid-token"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects empty token"
|
||||
When call validate_input_python "php-composer" "token" ""
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating max-retries input"
|
||||
It "accepts valid retry count"
|
||||
When call validate_input_python "php-composer" "max-retries" "3"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts minimum retries"
|
||||
When call validate_input_python "php-composer" "max-retries" "1"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts maximum retries"
|
||||
When call validate_input_python "php-composer" "max-retries" "10"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects zero retries"
|
||||
When call validate_input_python "php-composer" "max-retries" "0"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects too many retries"
|
||||
When call validate_input_python "php-composer" "max-retries" "11"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects non-numeric retries"
|
||||
When call validate_input_python "php-composer" "max-retries" "many"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects negative retries"
|
||||
When call validate_input_python "php-composer" "max-retries" "-1"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating args input"
|
||||
It "accepts valid Composer arguments"
|
||||
When call validate_input_python "php-composer" "args" "--no-progress --prefer-dist"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects empty args"
|
||||
When call validate_input_python "php-composer" "args" ""
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects args with command injection"
|
||||
When call validate_input_python "php-composer" "args" "--no-progress; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects args with pipe"
|
||||
When call validate_input_python "php-composer" "args" "--no-progress | cat /etc/passwd"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when checking action.yml structure"
|
||||
It "has valid YAML syntax"
|
||||
When call validate_action_yml_quiet "$ACTION_FILE"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "has correct action name"
|
||||
name=$(get_action_name "$ACTION_FILE")
|
||||
When call echo "$name"
|
||||
The output should equal "Run Composer Install"
|
||||
End
|
||||
|
||||
It "defines expected inputs"
|
||||
When call get_action_inputs "$ACTION_FILE"
|
||||
The output should include "php"
|
||||
The output should include "extensions"
|
||||
The output should include "tools"
|
||||
The output should include "args"
|
||||
The output should include "composer-version"
|
||||
The output should include "stability"
|
||||
The output should include "cache-directories"
|
||||
The output should include "token"
|
||||
The output should include "max-retries"
|
||||
End
|
||||
|
||||
It "defines expected outputs"
|
||||
When call get_action_outputs "$ACTION_FILE"
|
||||
The output should include "lock"
|
||||
The output should include "php-version"
|
||||
The output should include "composer-version"
|
||||
The output should include "cache-hit"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing input requirements"
|
||||
It "requires php input"
|
||||
When call uv run "_tests/shared/validation_core.py" --property "$ACTION_FILE" "php" "required"
|
||||
The output should equal "required"
|
||||
End
|
||||
|
||||
It "has extensions as optional input"
|
||||
When call uv run "_tests/shared/validation_core.py" --property "$ACTION_FILE" "extensions" "optional"
|
||||
The output should equal "optional"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing security validations"
|
||||
It "validates against path traversal in cache directories"
|
||||
When call validate_input_python "php-composer" "cache-directories" "../../etc/passwd"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates against shell metacharacters in tools"
|
||||
When call validate_input_python "php-composer" "tools" "composer && rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates against backtick injection in args"
|
||||
When call validate_input_python "php-composer" "args" "--no-progress \`whoami\`"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates against variable expansion in extensions"
|
||||
When call validate_input_python "php-composer" "extensions" "mbstring,\${HOME}"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing PHP-specific validations"
|
||||
It "validates PHP version boundaries"
|
||||
When call validate_input_python "php-composer" "php" "10.0"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates Composer version enum restriction"
|
||||
When call validate_input_python "php-composer" "composer-version" "0"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates stability enum values"
|
||||
When call validate_input_python "php-composer" "stability" "experimental"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
End
|
||||
@@ -1,280 +0,0 @@
|
||||
#!/usr/bin/env shellspec
|
||||
# Unit tests for php-laravel-phpunit action validation and logic
|
||||
|
||||
# Framework is automatically loaded via spec_helper.sh
|
||||
|
||||
Describe "php-laravel-phpunit action"
|
||||
ACTION_DIR="php-laravel-phpunit"
|
||||
ACTION_FILE="$ACTION_DIR/action.yml"
|
||||
|
||||
Context "when validating php-version input"
|
||||
It "accepts latest"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "latest"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts valid PHP version"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "8.4"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts PHP version with patch"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "8.4.1"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts PHP 7.4"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "7.4"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts PHP 8.0"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "8.0"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid version format"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "php8.4"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects version with command injection"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "8.4; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "accepts empty version (uses default)"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" ""
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating php-version-file input"
|
||||
It "accepts valid PHP version file"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" ".php-version"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts custom version file"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" "custom-php-version"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts version file with path"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" "config/.php-version"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects path traversal in version file"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" "../../../etc/passwd"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects absolute path in version file"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" "/etc/passwd"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects version file with command injection"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" ".php-version; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "accepts empty version file"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" ""
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating extensions input"
|
||||
It "accepts valid PHP extensions"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "mbstring, intl, json"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts single extension"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "mbstring"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts extensions without spaces"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "mbstring,intl,json"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts extensions with underscores"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "pdo_sqlite, pdo_mysql"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts extensions with numbers"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "sqlite3, gd2"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects extensions with special characters"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "mbstring@intl"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects extensions with command injection"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "mbstring; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "accepts empty extensions"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" ""
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating coverage input"
|
||||
It "accepts none coverage"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "none"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts xdebug coverage"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "xdebug"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts pcov coverage"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "pcov"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts xdebug3 coverage"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "xdebug3"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid coverage driver"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "invalid"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects coverage with command injection"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "none; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "accepts empty coverage"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" ""
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating token input"
|
||||
It "accepts GitHub token expression"
|
||||
When call validate_input_python "php-laravel-phpunit" "token" "\${{ github.token }}"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts GitHub fine-grained token"
|
||||
When call validate_input_python "php-laravel-phpunit" "token" "ghp_abcdefghijklmnopqrstuvwxyz1234567890"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts GitHub app token"
|
||||
When call validate_input_python "php-laravel-phpunit" "token" "ghs_abcdefghijklmnopqrstuvwxyz1234567890"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid token format"
|
||||
When call validate_input_python "php-laravel-phpunit" "token" "invalid-token"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "accepts empty token"
|
||||
When call validate_input_python "php-laravel-phpunit" "token" ""
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when checking action.yml structure"
|
||||
It "has valid YAML syntax"
|
||||
When call validate_action_yml_quiet "$ACTION_FILE"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "has correct action name"
|
||||
name=$(get_action_name "$ACTION_FILE")
|
||||
When call echo "$name"
|
||||
The output should equal "Laravel Setup and Composer test"
|
||||
End
|
||||
|
||||
It "defines expected inputs"
|
||||
When call get_action_inputs "$ACTION_FILE"
|
||||
The output should include "php-version"
|
||||
The output should include "php-version-file"
|
||||
The output should include "extensions"
|
||||
The output should include "coverage"
|
||||
The output should include "token"
|
||||
End
|
||||
|
||||
It "defines expected outputs"
|
||||
When call get_action_outputs "$ACTION_FILE"
|
||||
The output should include "php-version"
|
||||
The output should include "php-version-file"
|
||||
The output should include "extensions"
|
||||
The output should include "coverage"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing input requirements"
|
||||
It "has all inputs as optional"
|
||||
When call uv run "_tests/shared/validation_core.py" --property "$ACTION_FILE" "" "all_optional"
|
||||
The output should equal "none"
|
||||
End
|
||||
|
||||
It "has correct default php-version"
|
||||
When call uv run "_tests/shared/validation_core.py" --property "$ACTION_FILE" "php-version" "default"
|
||||
The output should equal "latest"
|
||||
End
|
||||
|
||||
It "has correct default php-version-file"
|
||||
When call uv run "_tests/shared/validation_core.py" --property "$ACTION_FILE" "php-version-file" "default"
|
||||
The output should equal ".php-version"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing security validations"
|
||||
It "validates against path traversal in php-version-file"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" "../../etc/passwd"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates against shell metacharacters in extensions"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "mbstring && rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates against backtick injection in coverage"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "none\`whoami\`"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates against variable expansion in php-version"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version" "8.4\${HOME}"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing Laravel-specific validations"
|
||||
It "validates coverage driver enum values"
|
||||
When call validate_input_python "php-laravel-phpunit" "coverage" "invalid-driver"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates php-version-file path safety"
|
||||
When call validate_input_python "php-laravel-phpunit" "php-version-file" "/etc/shadow"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates extensions format for Laravel requirements"
|
||||
When call validate_input_python "php-laravel-phpunit" "extensions" "mbstring, intl, json, pdo_sqlite, sqlite3"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
End
|
||||
@@ -174,10 +174,10 @@ End
|
||||
|
||||
It "defines expected outputs"
|
||||
When call get_action_outputs "$ACTION_FILE"
|
||||
The output should include "test_status"
|
||||
The output should include "tests_run"
|
||||
The output should include "tests_passed"
|
||||
The output should include "coverage_path"
|
||||
The output should include "test-status"
|
||||
The output should include "tests-run"
|
||||
The output should include "tests-passed"
|
||||
The output should include "framework"
|
||||
End
|
||||
End
|
||||
|
||||
@@ -245,5 +245,214 @@ It "validates default email is secure"
|
||||
When call validate_input_python "php-tests" "email" "github-actions@github.com"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
# Helper function that replicates the PHPUnit output parsing logic from action.yml
|
||||
parse_phpunit_output() {
|
||||
local phpunit_output="$1"
|
||||
local phpunit_exit_code="$2"
|
||||
|
||||
local tests_run="0"
|
||||
local tests_passed="0"
|
||||
|
||||
# Pattern 1: "OK (N test(s), M assertions)" - success case (handles both singular and plural)
|
||||
if echo "$phpunit_output" | grep -qE 'OK \([0-9]+ tests?,'; then
|
||||
tests_run=$(echo "$phpunit_output" | grep -oE 'OK \([0-9]+ tests?,' | grep -oE '[0-9]+' | head -1)
|
||||
tests_passed="$tests_run"
|
||||
# Pattern 2: "Tests: N" line - failure/error/skipped case
|
||||
elif echo "$phpunit_output" | grep -qE '^Tests:'; then
|
||||
tests_run=$(echo "$phpunit_output" | grep -E '^Tests:' | grep -oE '[0-9]+' | head -1)
|
||||
|
||||
# Calculate passed from failures and errors
|
||||
failures=$(echo "$phpunit_output" | grep -oE 'Failures: [0-9]+' | grep -oE '[0-9]+' | head -1 || echo "0")
|
||||
errors=$(echo "$phpunit_output" | grep -oE 'Errors: [0-9]+' | grep -oE '[0-9]+' | head -1 || echo "0")
|
||||
tests_passed=$((tests_run - failures - errors))
|
||||
|
||||
# Ensure non-negative
|
||||
if [ "$tests_passed" -lt 0 ]; then
|
||||
tests_passed="0"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Determine status
|
||||
local status
|
||||
if [ "$phpunit_exit_code" -eq 0 ]; then
|
||||
status="success"
|
||||
else
|
||||
status="failure"
|
||||
fi
|
||||
|
||||
# Output as KEY=VALUE format
|
||||
echo "tests_run=$tests_run"
|
||||
echo "tests_passed=$tests_passed"
|
||||
echo "status=$status"
|
||||
}
|
||||
|
||||
Context "when parsing PHPUnit output"
|
||||
# Success cases
|
||||
It "parses single successful test"
|
||||
output="OK (1 test, 2 assertions)"
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=1"
|
||||
The line 2 of output should equal "tests_passed=1"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
It "parses multiple successful tests"
|
||||
output="OK (5 tests, 10 assertions)"
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=5"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
It "parses successful tests with plural form"
|
||||
output="OK (25 tests, 50 assertions)"
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=25"
|
||||
The line 2 of output should equal "tests_passed=25"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
# Failure cases
|
||||
It "parses test failures"
|
||||
output="FAILURES!
|
||||
Tests: 5, Assertions: 10, Failures: 2."
|
||||
When call parse_phpunit_output "$output" 1
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=3"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
It "parses test errors"
|
||||
output="ERRORS!
|
||||
Tests: 5, Assertions: 10, Errors: 1."
|
||||
When call parse_phpunit_output "$output" 2
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=4"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
It "parses mixed failures and errors"
|
||||
output="FAILURES!
|
||||
Tests: 10, Assertions: 20, Failures: 2, Errors: 1."
|
||||
When call parse_phpunit_output "$output" 1
|
||||
The line 1 of output should equal "tests_run=10"
|
||||
The line 2 of output should equal "tests_passed=7"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
It "handles all tests failing"
|
||||
output="FAILURES!
|
||||
Tests: 5, Assertions: 10, Failures: 5."
|
||||
When call parse_phpunit_output "$output" 1
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=0"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
It "prevents negative passed count"
|
||||
output="ERRORS!
|
||||
Tests: 2, Assertions: 4, Failures: 1, Errors: 2."
|
||||
When call parse_phpunit_output "$output" 2
|
||||
The line 1 of output should equal "tests_run=2"
|
||||
The line 2 of output should equal "tests_passed=0"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
# Skipped tests
|
||||
It "parses skipped tests with success"
|
||||
output="OK, but some tests were skipped!
|
||||
Tests: 5, Assertions: 8, Skipped: 2."
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=5"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
# Edge cases
|
||||
It "handles no parseable output (fallback)"
|
||||
output="Some random output without test info"
|
||||
When call parse_phpunit_output "$output" 1
|
||||
The line 1 of output should equal "tests_run=0"
|
||||
The line 2 of output should equal "tests_passed=0"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
It "handles empty output"
|
||||
output=""
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=0"
|
||||
The line 2 of output should equal "tests_passed=0"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
It "handles PHPUnit 10+ format with singular test"
|
||||
output="OK (1 test, 3 assertions)"
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=1"
|
||||
The line 2 of output should equal "tests_passed=1"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
It "handles verbose output with noise"
|
||||
output="PHPUnit 10.5.0 by Sebastian Bergmann and contributors.
|
||||
Runtime: PHP 8.3.0
|
||||
|
||||
..... 5 / 5 (100%)
|
||||
|
||||
Time: 00:00.123, Memory: 10.00 MB
|
||||
|
||||
OK (5 tests, 10 assertions)"
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=5"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
It "handles failure output with full details"
|
||||
output="PHPUnit 10.5.0 by Sebastian Bergmann and contributors.
|
||||
|
||||
..F.. 5 / 5 (100%)
|
||||
|
||||
Time: 00:00.234, Memory: 12.00 MB
|
||||
|
||||
FAILURES!
|
||||
Tests: 5, Assertions: 10, Failures: 1."
|
||||
When call parse_phpunit_output "$output" 1
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=4"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
# Status determination tests
|
||||
It "marks as success when exit code is 0"
|
||||
output="OK (3 tests, 6 assertions)"
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
It "marks as failure when exit code is non-zero"
|
||||
output="OK (3 tests, 6 assertions)"
|
||||
When call parse_phpunit_output "$output" 1
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
|
||||
It "handles skipped tests without OK prefix"
|
||||
output="Tests: 5, Assertions: 8, Skipped: 2."
|
||||
When call parse_phpunit_output "$output" 0
|
||||
The line 1 of output should equal "tests_run=5"
|
||||
The line 2 of output should equal "tests_passed=5"
|
||||
The line 3 of output should equal "status=success"
|
||||
End
|
||||
|
||||
It "handles risky tests output"
|
||||
output="FAILURES!
|
||||
Tests: 8, Assertions: 15, Failures: 1, Risky: 2."
|
||||
When call parse_phpunit_output "$output" 1
|
||||
The line 1 of output should equal "tests_run=8"
|
||||
The line 2 of output should equal "tests_passed=7"
|
||||
The line 3 of output should equal "status=failure"
|
||||
End
|
||||
End
|
||||
End
|
||||
End
|
||||
|
||||
116
_tests/unit/security-scan/validation.spec.sh
Executable file
116
_tests/unit/security-scan/validation.spec.sh
Executable file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env shellspec
|
||||
# Unit tests for security-scan action validation and logic
|
||||
# Framework is automatically loaded via spec_helper.sh
|
||||
|
||||
Describe "security-scan action"
|
||||
ACTION_DIR="security-scan"
|
||||
ACTION_FILE="$ACTION_DIR/action.yml"
|
||||
|
||||
Context "when validating token input"
|
||||
It "accepts valid GitHub token"
|
||||
When call validate_input_python "security-scan" "token" "ghp_123456789012345678901234567890123456"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects injection in token"
|
||||
When call validate_input_python "security-scan" "token" "token; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "accepts empty token (optional)"
|
||||
When call validate_input_python "security-scan" "token" ""
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating actionlint-enabled input"
|
||||
It "accepts true value"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "true"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts false value"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "false"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects non-boolean value"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "maybe"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when checking action.yml structure"
|
||||
It "has valid YAML syntax"
|
||||
When call validate_action_yml_quiet "$ACTION_FILE"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "has correct action name"
|
||||
name=$(get_action_name "$ACTION_FILE")
|
||||
When call echo "$name"
|
||||
The output should equal "Security Scan"
|
||||
End
|
||||
|
||||
It "defines all expected inputs"
|
||||
inputs=$(get_action_inputs "$ACTION_FILE")
|
||||
When call echo "$inputs"
|
||||
The output should include "gitleaks-license"
|
||||
The output should include "gitleaks-config"
|
||||
The output should include "trivy-severity"
|
||||
The output should include "trivy-scanners"
|
||||
The output should include "trivy-timeout"
|
||||
The output should include "actionlint-enabled"
|
||||
The output should include "token"
|
||||
End
|
||||
|
||||
It "defines all expected outputs"
|
||||
outputs=$(get_action_outputs "$ACTION_FILE")
|
||||
When call echo "$outputs"
|
||||
The output should include "has_trivy_results"
|
||||
The output should include "has_gitleaks_results"
|
||||
The output should include "total_issues"
|
||||
The output should include "critical_issues"
|
||||
End
|
||||
|
||||
It "uses composite run type"
|
||||
run_type=$(get_action_runs_using "$ACTION_FILE")
|
||||
When call echo "$run_type"
|
||||
The output should equal "composite"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating inputs per conventions"
|
||||
It "validates token against github_token convention"
|
||||
When call validate_input_python "security-scan" "token" "ghp_123456789012345678901234567890123456"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "validates actionlint-enabled as boolean"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "true"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects invalid boolean for actionlint-enabled"
|
||||
When call validate_input_python "security-scan" "actionlint-enabled" "1"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing optional inputs"
|
||||
It "accepts empty gitleaks-license"
|
||||
When call validate_input_python "security-scan" "gitleaks-license" ""
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts empty token"
|
||||
When call validate_input_python "security-scan" "token" ""
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "accepts valid gitleaks-license value"
|
||||
When call validate_input_python "security-scan" "gitleaks-license" "license-key-123"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
End
|
||||
@@ -92,10 +92,6 @@ setup_default_inputs() {
|
||||
"go-build" | "go-lint")
|
||||
[[ "$input_name" != "go-version" ]] && export INPUT_GO_VERSION="1.21"
|
||||
;;
|
||||
"common-cache")
|
||||
[[ "$input_name" != "type" ]] && export INPUT_TYPE="npm"
|
||||
[[ "$input_name" != "paths" ]] && export INPUT_PATHS="node_modules"
|
||||
;;
|
||||
"common-retry")
|
||||
[[ "$input_name" != "command" ]] && export INPUT_COMMAND="echo test"
|
||||
;;
|
||||
@@ -114,11 +110,6 @@ setup_default_inputs() {
|
||||
"validate-inputs")
|
||||
[[ "$input_name" != "action-type" && "$input_name" != "action" && "$input_name" != "rules-file" && "$input_name" != "fail-on-error" ]] && export INPUT_ACTION_TYPE="test-action"
|
||||
;;
|
||||
"version-file-parser")
|
||||
[[ "$input_name" != "language" ]] && export INPUT_LANGUAGE="node"
|
||||
[[ "$input_name" != "tool-versions-key" ]] && export INPUT_TOOL_VERSIONS_KEY="nodejs"
|
||||
[[ "$input_name" != "dockerfile-image" ]] && export INPUT_DOCKERFILE_IMAGE="node"
|
||||
;;
|
||||
"codeql-analysis")
|
||||
[[ "$input_name" != "language" ]] && export INPUT_LANGUAGE="javascript"
|
||||
[[ "$input_name" != "token" ]] && export INPUT_TOKEN="ghp_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
@@ -163,10 +154,6 @@ cleanup_default_inputs() {
|
||||
"go-build" | "go-lint")
|
||||
[[ "$input_name" != "go-version" ]] && unset INPUT_GO_VERSION
|
||||
;;
|
||||
"common-cache")
|
||||
[[ "$input_name" != "type" ]] && unset INPUT_TYPE
|
||||
[[ "$input_name" != "paths" ]] && unset INPUT_PATHS
|
||||
;;
|
||||
"common-retry")
|
||||
[[ "$input_name" != "command" ]] && unset INPUT_COMMAND
|
||||
;;
|
||||
@@ -185,11 +172,6 @@ cleanup_default_inputs() {
|
||||
"validate-inputs")
|
||||
[[ "$input_name" != "action-type" && "$input_name" != "action" && "$input_name" != "rules-file" && "$input_name" != "fail-on-error" ]] && unset INPUT_ACTION_TYPE
|
||||
;;
|
||||
"version-file-parser")
|
||||
[[ "$input_name" != "language" ]] && unset INPUT_LANGUAGE
|
||||
[[ "$input_name" != "tool-versions-key" ]] && unset INPUT_TOOL_VERSIONS_KEY
|
||||
[[ "$input_name" != "dockerfile-image" ]] && unset INPUT_DOCKERFILE_IMAGE
|
||||
;;
|
||||
"codeql-analysis")
|
||||
[[ "$input_name" != "language" ]] && unset INPUT_LANGUAGE
|
||||
[[ "$input_name" != "token" ]] && unset INPUT_TOKEN
|
||||
@@ -244,10 +226,6 @@ shellspec_mock_action_run() {
|
||||
action_name=$(basename "$action_dir")
|
||||
|
||||
case "$action_name" in
|
||||
"version-file-parser")
|
||||
echo "detected-version=1.0.0" >>"$GITHUB_OUTPUT"
|
||||
echo "package-manager=npm" >>"$GITHUB_OUTPUT"
|
||||
;;
|
||||
"node-setup")
|
||||
echo "node-version=18.0.0" >>"$GITHUB_OUTPUT"
|
||||
echo "package-manager=npm" >>"$GITHUB_OUTPUT"
|
||||
@@ -258,11 +236,6 @@ shellspec_mock_action_run() {
|
||||
echo "build-time=45" >>"$GITHUB_OUTPUT"
|
||||
echo "platforms=linux/amd64" >>"$GITHUB_OUTPUT"
|
||||
;;
|
||||
"common-cache")
|
||||
echo "cache-hit=true" >>"$GITHUB_OUTPUT"
|
||||
echo "cache-key=Linux-npm-abc123" >>"$GITHUB_OUTPUT"
|
||||
echo "cache-paths=node_modules" >>"$GITHUB_OUTPUT"
|
||||
;;
|
||||
"common-file-check")
|
||||
echo "found=true" >>"$GITHUB_OUTPUT"
|
||||
;;
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
#!/usr/bin/env shellspec
|
||||
# Unit tests for version-file-parser action validation and logic
|
||||
# Framework is automatically loaded via spec_helper.sh
|
||||
|
||||
Describe "version-file-parser action"
|
||||
ACTION_DIR="version-file-parser"
|
||||
ACTION_FILE="$ACTION_DIR/action.yml"
|
||||
|
||||
Context "when validating language input"
|
||||
It "accepts valid language input"
|
||||
When call validate_input_python "version-file-parser" "language" "node"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts php language"
|
||||
When call validate_input_python "version-file-parser" "language" "php"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts python language"
|
||||
When call validate_input_python "version-file-parser" "language" "python"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts go language"
|
||||
When call validate_input_python "version-file-parser" "language" "go"
|
||||
The status should be success
|
||||
End
|
||||
It "rejects invalid language with special characters"
|
||||
When call validate_input_python "version-file-parser" "language" "node; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
It "rejects empty required inputs"
|
||||
When call validate_input_python "version-file-parser" "language" ""
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating dockerfile-image input"
|
||||
It "accepts valid dockerfile image"
|
||||
When call validate_input_python "version-file-parser" "dockerfile-image" "node"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts php dockerfile image"
|
||||
When call validate_input_python "version-file-parser" "dockerfile-image" "php"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts python dockerfile image"
|
||||
When call validate_input_python "version-file-parser" "dockerfile-image" "python"
|
||||
The status should be success
|
||||
End
|
||||
It "rejects injection in dockerfile image"
|
||||
When call validate_input_python "version-file-parser" "dockerfile-image" "node;malicious"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating optional inputs"
|
||||
It "accepts valid validation regex"
|
||||
When call validate_input_python "version-file-parser" "validation-regex" "^[0-9]+\.[0-9]+(\.[0-9]+)?$"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts valid default version"
|
||||
When call validate_input_python "version-file-parser" "default-version" "18.0.0"
|
||||
The status should be success
|
||||
End
|
||||
It "accepts tool versions key"
|
||||
When call validate_input_python "version-file-parser" "tool-versions-key" "nodejs"
|
||||
The status should be success
|
||||
End
|
||||
End
|
||||
|
||||
Context "when checking action.yml structure"
|
||||
It "has valid YAML syntax"
|
||||
When call validate_action_yml_quiet "$ACTION_FILE"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "contains required metadata"
|
||||
When call get_action_name "$ACTION_FILE"
|
||||
The output should equal "Version File Parser"
|
||||
End
|
||||
|
||||
It "defines expected inputs"
|
||||
When call get_action_inputs "$ACTION_FILE"
|
||||
The output should include "language"
|
||||
The output should include "tool-versions-key"
|
||||
The output should include "dockerfile-image"
|
||||
End
|
||||
|
||||
It "defines expected outputs"
|
||||
When call get_action_outputs "$ACTION_FILE"
|
||||
The output should include "detected-version"
|
||||
The output should include "package-manager"
|
||||
End
|
||||
End
|
||||
|
||||
Context "when validating security"
|
||||
It "rejects injection in language parameter"
|
||||
When call validate_input_python "version-file-parser" "language" "node&&malicious"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "rejects pipe injection in tool versions key"
|
||||
When call validate_input_python "version-file-parser" "tool-versions-key" "nodejs|dangerous"
|
||||
The status should be failure
|
||||
End
|
||||
|
||||
It "validates regex patterns safely"
|
||||
When call validate_input_python "version-file-parser" "validation-regex" "^[0-9]+\.[0-9]+$"
|
||||
The status should be success
|
||||
End
|
||||
|
||||
It "rejects malicious regex patterns"
|
||||
When call validate_input_python "version-file-parser" "validation-regex" ".*; rm -rf /"
|
||||
The status should be failure
|
||||
End
|
||||
End
|
||||
|
||||
Context "when testing outputs"
|
||||
It "produces all expected outputs consistently"
|
||||
When call test_action_outputs "$ACTION_DIR" "language" "node" "dockerfile-image" "node"
|
||||
The status should be success
|
||||
The stderr should include "Testing action outputs for: version-file-parser"
|
||||
The stderr should include "Output test passed for: version-file-parser"
|
||||
End
|
||||
End
|
||||
End
|
||||
@@ -76,11 +76,7 @@ if ! git diff --quiet; then
|
||||
git commit -m "chore: bump major version from $OLD_VERSION to $NEW_VERSION
|
||||
|
||||
This commit updates all internal action references from $OLD_VERSION
|
||||
to $NEW_VERSION.
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.com/claude-code)
|
||||
|
||||
Co-Authored-By: Claude <noreply@anthropic.com>"
|
||||
to $NEW_VERSION."
|
||||
|
||||
printf '%b' "${GREEN}✅ Committed version bump${NC}\n"
|
||||
else
|
||||
|
||||
@@ -95,7 +95,7 @@ runs:
|
||||
find . -maxdepth 2 -name "action.yml" -path "*/action.yml" ! -path "./_*" ! -path "./.github/*" -exec grep -h "uses: ivuorinen/actions/" {} \; > "$temp_file"
|
||||
|
||||
while IFS= read -r line; do
|
||||
current_sha=$(echo "$line" | grep -oE '@[a-f0-9]{40}' | sed 's/@//')
|
||||
current_sha=$(printf '%s' "$line" | grep -oE '@[a-f0-9]{40}' | sed 's/@//')
|
||||
|
||||
if [ "$current_sha" != "$TAG_SHA" ]; then
|
||||
echo "Found outdated reference: $current_sha (should be $TAG_SHA)"
|
||||
@@ -153,11 +153,7 @@ runs:
|
||||
git commit -m "chore: update action references to $MAJOR_VERSION ($TAG_SHA)" \
|
||||
-m "" \
|
||||
-m "This commit updates all internal action references to point to the latest" \
|
||||
-m "$MAJOR_VERSION tag SHA." \
|
||||
-m "" \
|
||||
-m "🤖 Generated with [Claude Code](https://claude.com/claude-code)" \
|
||||
-m "" \
|
||||
-m "Co-Authored-By: Claude <noreply@anthropic.com>"
|
||||
-m "$MAJOR_VERSION tag SHA."
|
||||
|
||||
commit_sha=$(git rev-parse HEAD)
|
||||
printf '%s\n' "sha=$commit_sha" >> "$GITHUB_OUTPUT"
|
||||
|
||||
@@ -45,7 +45,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'ansible-lint-fix'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -73,20 +73,17 @@ runs:
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Cache Python Dependencies
|
||||
- name: Setup Python
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
id: cache-pip
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
type: 'pip'
|
||||
paths: '~/.cache/pip'
|
||||
key-files: 'requirements*.txt,pyproject.toml,setup.py,setup.cfg'
|
||||
key-prefix: 'ansible-lint-fix'
|
||||
python-version: '3.14'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install ansible-lint
|
||||
id: install-ansible-lint
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 5
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
@@ -125,7 +122,7 @@ runs:
|
||||
|
||||
- name: Commit Fixes
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
uses: stefanzweifel/git-auto-commit-action@be7095c202abcf573b09f20541e0ee2f6a3a9d9b # v5.0.1
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: apply ansible lint fixes'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
@@ -133,6 +130,6 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: steps.check-files.outputs.files_found == 'true'
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ansible-lint.sarif
|
||||
|
||||
@@ -56,7 +56,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
MODE: ${{ inputs.mode }}
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
@@ -65,7 +65,7 @@ runs:
|
||||
MAX_RETRIES: ${{ inputs.max-retries }}
|
||||
FAIL_ON_ERROR: ${{ inputs.fail-on-error }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Validate mode
|
||||
case "$MODE" in
|
||||
@@ -79,16 +79,26 @@ runs:
|
||||
esac
|
||||
|
||||
# Validate GitHub token presence if provided
|
||||
if [[ -n "$GITHUB_TOKEN" ]] && ! [[ "$GITHUB_TOKEN" =~ ^\$\{\{ ]]; then
|
||||
echo "Using provided GitHub token"
|
||||
if [ -n "$GITHUB_TOKEN" ]; then
|
||||
case "$GITHUB_TOKEN" in
|
||||
\$\{\{*)
|
||||
# Token is a GitHub Actions expression, skip validation
|
||||
;;
|
||||
*)
|
||||
echo "Using provided GitHub token"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate email format (basic check) - required for fix mode
|
||||
if [ "$MODE" = "fix" ]; then
|
||||
if [[ "$EMAIL" != *"@"* ]] || [[ "$EMAIL" != *"."* ]]; then
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
exit 1
|
||||
fi
|
||||
case "$EMAIL" in
|
||||
*@*.*) ;;
|
||||
*)
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate username format (GitHub canonical rules)
|
||||
username="$USERNAME"
|
||||
@@ -100,32 +110,45 @@ runs:
|
||||
fi
|
||||
|
||||
# Check allowed characters (letters, digits, hyphens only)
|
||||
if ! [[ "$username" =~ ^[a-zA-Z0-9-]+$ ]]; then
|
||||
echo "::error::Invalid username characters in '$username'. Only letters, digits, and hyphens allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
*[!a-zA-Z0-9-]*)
|
||||
echo "::error::Invalid username characters in '$username'. Only letters, digits, and hyphens allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check doesn't start or end with hyphen
|
||||
if [[ "$username" == -* ]] || [[ "$username" == *- ]]; then
|
||||
echo "::error::Invalid username '$username'. Cannot start or end with hyphen"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
-*|*-)
|
||||
echo "::error::Invalid username '$username'. Cannot start or end with hyphen"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check no consecutive hyphens
|
||||
if [[ "$username" == *--* ]]; then
|
||||
echo "::error::Invalid username '$username'. Consecutive hyphens not allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
*--*)
|
||||
echo "::error::Invalid username '$username'. Consecutive hyphens not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate max retries (positive integer with reasonable upper limit)
|
||||
if ! [[ "$MAX_RETRIES" =~ ^[0-9]+$ ]] || [ "$MAX_RETRIES" -le 0 ] || [ "$MAX_RETRIES" -gt 10 ]; then
|
||||
case "$MAX_RETRIES" in
|
||||
''|*[!0-9]*)
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$MAX_RETRIES" -le 0 ] || [ "$MAX_RETRIES" -gt 10 ]; then
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate fail-on-error (boolean)
|
||||
if [[ "$FAIL_ON_ERROR" != "true" ]] && [[ "$FAIL_ON_ERROR" != "false" ]]; then
|
||||
if [ "$FAIL_ON_ERROR" != "true" ] && [ "$FAIL_ON_ERROR" != "false" ]; then
|
||||
echo "::error::Invalid fail-on-error value: '$FAIL_ON_ERROR'. Must be 'true' or 'false'"
|
||||
exit 1
|
||||
fi
|
||||
@@ -137,26 +160,79 @@ runs:
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Node Setup
|
||||
id: node-setup
|
||||
uses: ivuorinen/actions/node-setup@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
- name: Detect Package Manager
|
||||
id: detect-pm
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Detect package manager from lockfiles
|
||||
if [ -f bun.lockb ]; then
|
||||
package_manager="bun"
|
||||
elif [ -f pnpm-lock.yaml ]; then
|
||||
package_manager="pnpm"
|
||||
elif [ -f yarn.lock ]; then
|
||||
package_manager="yarn"
|
||||
else
|
||||
package_manager="npm"
|
||||
fi
|
||||
|
||||
printf 'package-manager=%s\n' "$package_manager" >> "$GITHUB_OUTPUT"
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
corepack enable
|
||||
|
||||
- name: Install Package Manager
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
pnpm)
|
||||
corepack prepare pnpm@latest --activate
|
||||
;;
|
||||
yarn)
|
||||
corepack prepare yarn@stable --activate
|
||||
;;
|
||||
bun|npm)
|
||||
# Bun installed separately, npm built-in
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Setup Bun
|
||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||
uses: oven-sh/setup-bun@b7a1c7ccf290d58743029c4f6903da283811b979 # v2.1.0
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
type: 'npm'
|
||||
paths: 'node_modules'
|
||||
key-files: 'package-lock.json,yarn.lock,pnpm-lock.yaml,bun.lockb'
|
||||
key-prefix: 'biome-lint-${{ inputs.mode }}-${{ steps.node-setup.outputs.package-manager }}'
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-biome-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-biome-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-
|
||||
${{ runner.os }}-biome-lint-${{ inputs.mode }}-
|
||||
|
||||
- name: Install Biome
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
MAX_RETRIES: ${{ inputs.max-retries }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Check if biome is already installed
|
||||
if command -v biome >/dev/null 2>&1; then
|
||||
@@ -208,11 +284,11 @@ runs:
|
||||
- name: Run Biome Check
|
||||
if: inputs.mode == 'check'
|
||||
id: check
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
FAIL_ON_ERROR: ${{ inputs.fail-on-error }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Running Biome check mode..."
|
||||
|
||||
@@ -255,16 +331,16 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: inputs.mode == 'check' && always()
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: biome-report.sarif
|
||||
|
||||
- name: Run Biome Fix
|
||||
if: inputs.mode == 'fix'
|
||||
id: fix
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Running Biome fix mode..."
|
||||
|
||||
@@ -289,7 +365,7 @@ runs:
|
||||
|
||||
- name: Commit and Push Fixes
|
||||
if: inputs.mode == 'fix' && success()
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: autofix Biome violations'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
|
||||
@@ -28,7 +28,8 @@ conventions:
|
||||
mode: mode_enum
|
||||
token: github_token
|
||||
username: username
|
||||
overrides: {}
|
||||
overrides:
|
||||
mode: mode_enum
|
||||
statistics:
|
||||
total_inputs: 6
|
||||
validated_inputs: 6
|
||||
|
||||
@@ -81,21 +81,13 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate threads
|
||||
if inputs.get("threads"):
|
||||
result = self.codeql_validator.validate_threads(inputs["threads"])
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.codeql_validator, "validate_threads", inputs["threads"]
|
||||
)
|
||||
|
||||
# Validate RAM
|
||||
if inputs.get("ram"):
|
||||
result = self.codeql_validator.validate_ram(inputs["ram"])
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(self.codeql_validator, "validate_ram", inputs["ram"])
|
||||
|
||||
# Validate debug mode
|
||||
if inputs.get("debug"):
|
||||
@@ -226,19 +218,10 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty queries first
|
||||
if not queries or not queries.strip():
|
||||
self.add_error("CodeQL queries cannot be empty")
|
||||
return False
|
||||
|
||||
# Use the CodeQL validator
|
||||
result = self.codeql_validator.validate_codeql_queries(queries)
|
||||
# Copy any errors from codeql validator
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
return result
|
||||
return self.validate_with(self.codeql_validator, "validate_codeql_queries", queries)
|
||||
|
||||
def validate_categories(self, categories: str) -> bool:
|
||||
"""Validate CodeQL categories.
|
||||
@@ -249,14 +232,7 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use the CodeQL validator
|
||||
result = self.codeql_validator.validate_category_format(categories)
|
||||
# Copy any errors from codeql validator
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
return result
|
||||
return self.validate_with(self.codeql_validator, "validate_category_format", categories)
|
||||
|
||||
def validate_category(self, category: str) -> bool:
|
||||
"""Validate CodeQL category (singular).
|
||||
@@ -267,14 +243,7 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use the CodeQL validator
|
||||
result = self.codeql_validator.validate_category_format(category)
|
||||
# Copy any errors from codeql validator
|
||||
for error in self.codeql_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.codeql_validator.clear_errors()
|
||||
return result
|
||||
return self.validate_with(self.codeql_validator, "validate_category_format", category)
|
||||
|
||||
def validate_config_file(self, config_file: str) -> bool:
|
||||
"""Validate CodeQL configuration file path.
|
||||
@@ -287,21 +256,11 @@ class CustomValidator(BaseValidator):
|
||||
"""
|
||||
if not config_file or not config_file.strip():
|
||||
return True
|
||||
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(config_file):
|
||||
return True
|
||||
|
||||
# Use FileValidator for yaml file validation
|
||||
result = self.file_validator.validate_yaml_file(config_file, "config-file")
|
||||
|
||||
# Copy any errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.file_validator, "validate_yaml_file", config_file, "config-file"
|
||||
)
|
||||
|
||||
def validate_database(self, database: str) -> bool:
|
||||
"""Validate CodeQL database path.
|
||||
@@ -312,25 +271,13 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(database):
|
||||
return True
|
||||
|
||||
# Use FileValidator for path validation
|
||||
result = self.file_validator.validate_file_path(database, "database")
|
||||
|
||||
# Copy any errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
result = self.validate_with(self.file_validator, "validate_file_path", database, "database")
|
||||
# Database paths often contain the language
|
||||
# e.g., "codeql-database/javascript" or "/tmp/codeql_databases/python"
|
||||
# Just validate it's a reasonable path after basic validation
|
||||
if result and database.startswith("/tmp/"): # noqa: S108
|
||||
return True
|
||||
|
||||
return result
|
||||
|
||||
def validate_debug(self, debug: str) -> bool:
|
||||
@@ -342,20 +289,9 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(debug):
|
||||
return True
|
||||
|
||||
# Use BooleanValidator
|
||||
result = self.boolean_validator.validate_boolean(debug, "debug")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(self.boolean_validator, "validate_boolean", debug, "debug")
|
||||
|
||||
def validate_upload_database(self, upload: str) -> bool:
|
||||
"""Validate upload-database setting.
|
||||
@@ -366,20 +302,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(upload):
|
||||
return True
|
||||
|
||||
# Use BooleanValidator
|
||||
result = self.boolean_validator.validate_boolean(upload, "upload-database")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", upload, "upload-database"
|
||||
)
|
||||
|
||||
def validate_upload_sarif(self, upload: str) -> bool:
|
||||
"""Validate upload-sarif setting.
|
||||
@@ -390,20 +317,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(upload):
|
||||
return True
|
||||
|
||||
# Use BooleanValidator
|
||||
result = self.boolean_validator.validate_boolean(upload, "upload-sarif")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", upload, "upload-sarif"
|
||||
)
|
||||
|
||||
def validate_packs(self, packs: str) -> bool:
|
||||
"""Validate CodeQL packs.
|
||||
@@ -487,16 +405,9 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use the TokenValidator for proper validation
|
||||
result = self.token_validator.validate_github_token(token, required=False)
|
||||
|
||||
# Copy any errors from token validator
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.token_validator, "validate_github_token", token, required=False
|
||||
)
|
||||
|
||||
def validate_token(self, token: str) -> bool:
|
||||
"""Validate GitHub token.
|
||||
@@ -507,21 +418,12 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty token
|
||||
if not token or not token.strip():
|
||||
self.add_error("Input 'token' is missing or empty")
|
||||
return False
|
||||
|
||||
# Use the TokenValidator for proper validation
|
||||
result = self.token_validator.validate_github_token(token, required=True)
|
||||
|
||||
# Copy any errors from token validator
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.token_validator, "validate_github_token", token, required=True
|
||||
)
|
||||
|
||||
def validate_working_directory(self, directory: str) -> bool:
|
||||
"""Validate working directory path.
|
||||
@@ -532,20 +434,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(directory):
|
||||
return True
|
||||
|
||||
# Use FileValidator for path validation
|
||||
result = self.file_validator.validate_file_path(directory, "working-directory")
|
||||
|
||||
# Copy any errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.file_validator, "validate_file_path", directory, "working-directory"
|
||||
)
|
||||
|
||||
def validate_upload_results(self, value: str) -> bool:
|
||||
"""Validate upload-results boolean value.
|
||||
@@ -556,27 +449,14 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty
|
||||
if not value or not value.strip():
|
||||
self.add_error("upload-results cannot be empty")
|
||||
return False
|
||||
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(value):
|
||||
return True
|
||||
|
||||
# Check for uppercase TRUE/FALSE first
|
||||
if value in ["TRUE", "FALSE"]:
|
||||
self.add_error("Must be lowercase 'true' or 'false'")
|
||||
return False
|
||||
|
||||
# Use BooleanValidator for normal validation
|
||||
result = self.boolean_validator.validate_boolean(value, "upload-results")
|
||||
|
||||
# Copy any errors from boolean validator
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", value, "upload-results"
|
||||
)
|
||||
|
||||
@@ -107,7 +107,7 @@ runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: codeql-analysis
|
||||
language: ${{ inputs.language }}
|
||||
@@ -128,13 +128,14 @@ runs:
|
||||
skip-queries: ${{ inputs.skip-queries }}
|
||||
|
||||
- name: Validate checkout safety
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
CHECKOUT_REF: ${{ inputs.checkout-ref }}
|
||||
EVENT_NAME: ${{ github.event_name }}
|
||||
run: |
|
||||
set -eu
|
||||
# Security check: Warn if checking out custom ref on pull_request_target
|
||||
if [[ "$EVENT_NAME" == "pull_request_target" ]] && [[ -n "$CHECKOUT_REF" ]]; then
|
||||
if [ "$EVENT_NAME" = "pull_request_target" ] && [ -n "$CHECKOUT_REF" ]; then
|
||||
echo "::warning::Using custom checkout-ref on pull_request_target is potentially unsafe"
|
||||
echo "::warning::Ensure the ref is validated before running untrusted code"
|
||||
fi
|
||||
@@ -147,28 +148,30 @@ runs:
|
||||
|
||||
- name: Set analysis category
|
||||
id: set-category
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
CATEGORY: ${{ inputs.category }}
|
||||
LANGUAGE: ${{ inputs.language }}
|
||||
run: |
|
||||
if [[ -n "$CATEGORY" ]]; then
|
||||
set -eu
|
||||
if [ -n "$CATEGORY" ]; then
|
||||
category="$CATEGORY"
|
||||
else
|
||||
category="/language:$LANGUAGE"
|
||||
fi
|
||||
echo "category=$category" >> $GITHUB_OUTPUT
|
||||
echo "category=$category" >> "$GITHUB_OUTPUT"
|
||||
echo "Using analysis category: $category"
|
||||
|
||||
- name: Set build mode
|
||||
id: set-build-mode
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
BUILD_MODE: ${{ inputs.build-mode }}
|
||||
LANGUAGE: ${{ inputs.language }}
|
||||
run: |
|
||||
set -eu
|
||||
build_mode="$BUILD_MODE"
|
||||
if [[ -z "$build_mode" ]]; then
|
||||
if [ -z "$build_mode" ]; then
|
||||
# Auto-detect build mode based on language
|
||||
case "$LANGUAGE" in
|
||||
javascript|python|ruby|actions)
|
||||
@@ -179,11 +182,11 @@ runs:
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
echo "build-mode=$build_mode" >> $GITHUB_OUTPUT
|
||||
echo "build-mode=$build_mode" >> "$GITHUB_OUTPUT"
|
||||
echo "Using build mode: $build_mode"
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: ${{ inputs.language }}
|
||||
queries: ${{ inputs.queries }}
|
||||
@@ -196,12 +199,12 @@ runs:
|
||||
threads: ${{ inputs.threads }}
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
if: ${{ steps.set-build-mode.outputs.build-mode == 'autobuild' }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
id: analysis
|
||||
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: ${{ steps.set-category.outputs.category }}
|
||||
upload: ${{ inputs.upload-results }}
|
||||
@@ -211,7 +214,7 @@ runs:
|
||||
skip-queries: ${{ inputs.skip-queries }}
|
||||
|
||||
- name: Summary
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
LANGUAGE: ${{ inputs.language }}
|
||||
CATEGORY: ${{ steps.set-category.outputs.category }}
|
||||
@@ -221,14 +224,15 @@ runs:
|
||||
UPLOAD_RESULTS: ${{ inputs.upload-results }}
|
||||
OUTPUT: ${{ inputs.output }}
|
||||
run: |
|
||||
set -eu
|
||||
echo "✅ CodeQL analysis completed for language: $LANGUAGE"
|
||||
echo "📊 Category: $CATEGORY"
|
||||
echo "🏗️ Build mode: $BUILD_MODE"
|
||||
echo "🔍 Queries: ${QUERIES:-default}"
|
||||
echo "📦 Packs: ${PACKS:-none}"
|
||||
if [[ "$UPLOAD_RESULTS" == "true" ]]; then
|
||||
if [ "$UPLOAD_RESULTS" = "true" ]; then
|
||||
echo "📤 Results uploaded to GitHub Security tab"
|
||||
fi
|
||||
if [[ -n "$OUTPUT" ]]; then
|
||||
if [ -n "$OUTPUT" ]; then
|
||||
echo "💾 SARIF saved to: $OUTPUT"
|
||||
fi
|
||||
|
||||
@@ -42,7 +42,7 @@ conventions:
|
||||
packs: codeql_packs
|
||||
queries: codeql_queries
|
||||
ram: numeric_range_256_32768
|
||||
skip-queries: codeql_queries
|
||||
skip-queries: boolean
|
||||
source-root: file_path
|
||||
threads: numeric_range_1_128
|
||||
token: github_token
|
||||
@@ -51,6 +51,7 @@ overrides:
|
||||
build-mode: codeql_build_mode
|
||||
category: category_format
|
||||
config: codeql_config
|
||||
language: codeql_language
|
||||
output: file_path
|
||||
packs: codeql_packs
|
||||
queries: codeql_queries
|
||||
|
||||
@@ -1,244 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Custom validator for common-cache action.
|
||||
|
||||
This validator handles caching-specific validation including:
|
||||
- Cache types (npm, composer, go, pip, maven, gradle)
|
||||
- Cache paths (comma-separated list)
|
||||
- Cache keys and restore keys
|
||||
- Path validation with special handling for multiple paths
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
# Add validate-inputs directory to path to import validators
|
||||
validate_inputs_path = Path(__file__).parent.parent / "validate-inputs"
|
||||
sys.path.insert(0, str(validate_inputs_path))
|
||||
|
||||
from validators.base import BaseValidator
|
||||
from validators.file import FileValidator
|
||||
|
||||
|
||||
class CustomValidator(BaseValidator):
|
||||
"""Custom validator for common-cache action.
|
||||
|
||||
Provides validation for cache configuration.
|
||||
"""
|
||||
|
||||
def __init__(self, action_type: str = "common-cache") -> None:
|
||||
"""Initialize the common-cache validator."""
|
||||
super().__init__(action_type)
|
||||
self.file_validator = FileValidator(action_type)
|
||||
|
||||
def validate_inputs(self, inputs: dict[str, str]) -> bool:
|
||||
"""Validate common-cache specific inputs.
|
||||
|
||||
Args:
|
||||
inputs: Dictionary of input names to values
|
||||
|
||||
Returns:
|
||||
True if all validations pass, False otherwise
|
||||
"""
|
||||
valid = True
|
||||
|
||||
# Validate type (required)
|
||||
if "type" in inputs:
|
||||
valid &= self.validate_cache_type(inputs["type"])
|
||||
else:
|
||||
# Type is required
|
||||
self.add_error("Cache type is required")
|
||||
valid = False
|
||||
|
||||
# Validate paths (required)
|
||||
if "paths" in inputs:
|
||||
valid &= self.validate_cache_paths(inputs["paths"])
|
||||
else:
|
||||
# Paths is required
|
||||
self.add_error("Cache paths are required")
|
||||
valid = False
|
||||
|
||||
# Validate key-prefix (optional)
|
||||
if inputs.get("key-prefix"):
|
||||
valid &= self.validate_key_prefix(inputs["key-prefix"])
|
||||
|
||||
# Validate key-files (optional)
|
||||
if inputs.get("key-files"):
|
||||
valid &= self.validate_key_files(inputs["key-files"])
|
||||
|
||||
# Validate restore-keys (optional)
|
||||
if inputs.get("restore-keys"):
|
||||
valid &= self.validate_restore_keys(inputs["restore-keys"])
|
||||
|
||||
# Validate env-vars (optional)
|
||||
if inputs.get("env-vars"):
|
||||
valid &= self.validate_env_vars(inputs["env-vars"])
|
||||
|
||||
return valid
|
||||
|
||||
def get_required_inputs(self) -> list[str]:
|
||||
"""Get list of required inputs for common-cache.
|
||||
|
||||
Returns:
|
||||
List of required input names
|
||||
"""
|
||||
return ["type", "paths"]
|
||||
|
||||
def get_validation_rules(self) -> dict:
|
||||
"""Get validation rules for common-cache.
|
||||
|
||||
Returns:
|
||||
Dictionary of validation rules
|
||||
"""
|
||||
return {
|
||||
"type": "Cache type (npm, composer, go, pip, maven, gradle)",
|
||||
"paths": "Comma-separated list of paths to cache",
|
||||
"key-prefix": "Optional prefix for cache key",
|
||||
"key-files": "Files to include in cache key hash",
|
||||
"restore-keys": "Fallback cache keys to try",
|
||||
}
|
||||
|
||||
def validate_cache_type(self, cache_type: str) -> bool:
|
||||
"""Validate cache type.
|
||||
|
||||
Args:
|
||||
cache_type: Type of cache
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty
|
||||
if not cache_type or not cache_type.strip():
|
||||
self.add_error("Cache type cannot be empty")
|
||||
return False
|
||||
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(cache_type):
|
||||
return True
|
||||
|
||||
# Note: The test says "accepts invalid cache type (no validation in action)"
|
||||
# This suggests we should accept any value, not just the supported ones
|
||||
# So we'll just validate for security issues, not restrict to specific types
|
||||
|
||||
# Check for command injection using base validator
|
||||
return self.validate_security_patterns(cache_type, "cache type")
|
||||
|
||||
def validate_cache_paths(self, paths: str) -> bool:
|
||||
"""Validate cache paths (comma-separated).
|
||||
|
||||
Args:
|
||||
paths: Comma-separated paths
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Check for empty
|
||||
if not paths or not paths.strip():
|
||||
self.add_error("Cache paths cannot be empty")
|
||||
return False
|
||||
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(paths):
|
||||
return True
|
||||
|
||||
# Split paths and validate each
|
||||
path_list = [p.strip() for p in paths.split(",")]
|
||||
|
||||
for path in path_list:
|
||||
if not path:
|
||||
continue
|
||||
|
||||
# Use FileValidator for path validation
|
||||
result = self.file_validator.validate_file_path(path, "paths")
|
||||
# Propagate errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
if not result:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_key_prefix(self, key_prefix: str) -> bool:
|
||||
"""Validate cache key prefix.
|
||||
|
||||
Args:
|
||||
key_prefix: Key prefix
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(key_prefix):
|
||||
return True
|
||||
|
||||
# Check for command injection using base validator
|
||||
return self.validate_security_patterns(key_prefix, "key-prefix")
|
||||
|
||||
def validate_key_files(self, key_files: str) -> bool:
|
||||
"""Validate key files (comma-separated).
|
||||
|
||||
Args:
|
||||
key_files: Comma-separated file paths
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(key_files):
|
||||
return True
|
||||
|
||||
# Split files and validate each
|
||||
file_list = [f.strip() for f in key_files.split(",")]
|
||||
|
||||
for file_path in file_list:
|
||||
if not file_path:
|
||||
continue
|
||||
|
||||
# Use FileValidator for path validation
|
||||
result = self.file_validator.validate_file_path(file_path, "key-files")
|
||||
# Propagate errors from file validator
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
if not result:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_restore_keys(self, restore_keys: str) -> bool:
|
||||
"""Validate restore keys.
|
||||
|
||||
Args:
|
||||
restore_keys: Restore keys specification
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(restore_keys):
|
||||
return True
|
||||
|
||||
# Check for command injection using base validator
|
||||
return self.validate_security_patterns(restore_keys, "restore-keys")
|
||||
|
||||
def validate_env_vars(self, env_vars: str) -> bool:
|
||||
"""Validate environment variables.
|
||||
|
||||
Args:
|
||||
env_vars: Environment variables specification
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(env_vars):
|
||||
return True
|
||||
|
||||
# Check for command injection using base validator
|
||||
return self.validate_security_patterns(env_vars, "env-vars")
|
||||
@@ -1,72 +0,0 @@
|
||||
# ivuorinen/actions/common-cache
|
||||
|
||||
## Common Cache
|
||||
|
||||
### Description
|
||||
|
||||
Standardized caching strategy for all actions
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|----------------|------------------------------------------------------|----------|---------|
|
||||
| `type` | <p>Type of cache (npm, composer, go, pip, etc.)</p> | `true` | `""` |
|
||||
| `paths` | <p>Paths to cache (comma-separated)</p> | `true` | `""` |
|
||||
| `key-prefix` | <p>Custom prefix for cache key</p> | `false` | `""` |
|
||||
| `key-files` | <p>Files to hash for cache key (comma-separated)</p> | `false` | `""` |
|
||||
| `restore-keys` | <p>Fallback keys for cache restoration</p> | `false` | `""` |
|
||||
| `env-vars` | <p>Environment variables to include in cache key</p> | `false` | `""` |
|
||||
|
||||
### Outputs
|
||||
|
||||
| name | description |
|
||||
|---------------|-----------------------------|
|
||||
| `cache-hit` | <p>Cache hit indicator</p> |
|
||||
| `cache-key` | <p>Generated cache key</p> |
|
||||
| `cache-paths` | <p>Resolved cache paths</p> |
|
||||
|
||||
### Runs
|
||||
|
||||
This action is a `composite` action.
|
||||
|
||||
### Usage
|
||||
|
||||
```yaml
|
||||
- uses: ivuorinen/actions/common-cache@main
|
||||
with:
|
||||
type:
|
||||
# Type of cache (npm, composer, go, pip, etc.)
|
||||
#
|
||||
# Required: true
|
||||
# Default: ""
|
||||
|
||||
paths:
|
||||
# Paths to cache (comma-separated)
|
||||
#
|
||||
# Required: true
|
||||
# Default: ""
|
||||
|
||||
key-prefix:
|
||||
# Custom prefix for cache key
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
key-files:
|
||||
# Files to hash for cache key (comma-separated)
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
restore-keys:
|
||||
# Fallback keys for cache restoration
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
env-vars:
|
||||
# Environment variables to include in cache key
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
```
|
||||
@@ -1,122 +0,0 @@
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-action.json
|
||||
# permissions:
|
||||
# - contents: read # Required for reading cache contents
|
||||
---
|
||||
name: Common Cache
|
||||
description: 'Standardized caching strategy for all actions'
|
||||
author: 'Ismo Vuorinen'
|
||||
|
||||
branding:
|
||||
icon: database
|
||||
color: gray-dark
|
||||
|
||||
inputs:
|
||||
type:
|
||||
description: 'Type of cache (npm, composer, go, pip, etc.)'
|
||||
required: true
|
||||
paths:
|
||||
description: 'Paths to cache (comma-separated)'
|
||||
required: true
|
||||
key-prefix:
|
||||
description: 'Custom prefix for cache key'
|
||||
required: false
|
||||
default: ''
|
||||
key-files:
|
||||
description: 'Files to hash for cache key (comma-separated)'
|
||||
required: false
|
||||
default: ''
|
||||
restore-keys:
|
||||
description: 'Fallback keys for cache restoration'
|
||||
required: false
|
||||
default: ''
|
||||
env-vars:
|
||||
description: 'Environment variables to include in cache key'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'Cache hit indicator'
|
||||
value: ${{ steps.cache.outputs.cache-hit }}
|
||||
cache-key:
|
||||
description: 'Generated cache key'
|
||||
value: ${{ steps.prepare.outputs.cache-key }}
|
||||
cache-paths:
|
||||
description: 'Resolved cache paths'
|
||||
value: ${{ steps.prepare.outputs.cache-paths }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- id: prepare
|
||||
shell: bash
|
||||
env:
|
||||
RUNNER_OS: ${{ runner.os }}
|
||||
CACHE_TYPE: ${{ inputs.type }}
|
||||
KEY_PREFIX: ${{ inputs.key-prefix }}
|
||||
KEY_FILES: ${{ inputs.key-files }}
|
||||
ENV_VARS: ${{ inputs.env-vars }}
|
||||
CACHE_PATHS: ${{ inputs.paths }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Generate standardized cache key components
|
||||
os_key="$RUNNER_OS"
|
||||
type_key="$CACHE_TYPE"
|
||||
prefix_key="$KEY_PREFIX"
|
||||
|
||||
# Process file hashes
|
||||
# Note: For simple glob patterns, hashFiles() function could be used directly
|
||||
# in the cache key. This manual approach is used to support comma-separated
|
||||
# file lists with complex cache key construction.
|
||||
files_hash=""
|
||||
if [ -n "$KEY_FILES" ]; then
|
||||
IFS=',' read -ra FILES <<< "$KEY_FILES"
|
||||
existing_files=()
|
||||
for file in "${FILES[@]}"; do
|
||||
# Trim whitespace
|
||||
file=$(echo "$file" | xargs)
|
||||
if [ -f "$file" ]; then
|
||||
existing_files+=("$file")
|
||||
fi
|
||||
done
|
||||
# Hash all files together for better performance
|
||||
if [ ${#existing_files[@]} -gt 0 ]; then
|
||||
files_hash=$(cat "${existing_files[@]}" | sha256sum | cut -d' ' -f1)
|
||||
fi
|
||||
fi
|
||||
|
||||
# Process environment variables
|
||||
env_hash=""
|
||||
if [ -n "$ENV_VARS" ]; then
|
||||
IFS=',' read -ra VARS <<< "$ENV_VARS"
|
||||
for var in "${VARS[@]}"; do
|
||||
if [ -n "${!var}" ]; then
|
||||
env_hash="${env_hash}-${var}-${!var}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Generate final cache key
|
||||
cache_key="${os_key}"
|
||||
[ -n "$prefix_key" ] && cache_key="${cache_key}-${prefix_key}"
|
||||
[ -n "$type_key" ] && cache_key="${cache_key}-${type_key}"
|
||||
[ -n "$files_hash" ] && cache_key="${cache_key}-${files_hash}"
|
||||
[ -n "$env_hash" ] && cache_key="${cache_key}-${env_hash}"
|
||||
|
||||
echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Process cache paths
|
||||
IFS=',' read -ra PATHS <<< "$CACHE_PATHS"
|
||||
cache_paths=""
|
||||
for path in "${PATHS[@]}"; do
|
||||
cache_paths="${cache_paths}${path}\n"
|
||||
done
|
||||
echo "cache-paths=${cache_paths}" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ steps.prepare.outputs.cache-paths }}
|
||||
key: ${{ steps.prepare.outputs.cache-key }}
|
||||
restore-keys: ${{ inputs.restore-keys }}
|
||||
@@ -1,42 +0,0 @@
|
||||
---
|
||||
# Validation rules for common-cache action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 50% (3/6 inputs)
|
||||
#
|
||||
# This file defines validation rules for the common-cache GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
# action is used.
|
||||
#
|
||||
|
||||
schema_version: '1.0'
|
||||
action: common-cache
|
||||
description: Standardized caching strategy for all actions
|
||||
generator_version: 1.0.0
|
||||
required_inputs:
|
||||
- paths
|
||||
- type
|
||||
optional_inputs:
|
||||
- env-vars
|
||||
- key-files
|
||||
- key-prefix
|
||||
- restore-keys
|
||||
conventions:
|
||||
key-files: file_path
|
||||
key-prefix: prefix
|
||||
paths: file_path
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 6
|
||||
validated_inputs: 3
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 50
|
||||
validation_coverage: 50
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
quality_indicators:
|
||||
has_required_inputs: true
|
||||
has_token_validation: false
|
||||
has_version_validation: false
|
||||
has_file_validation: true
|
||||
has_security_validation: false
|
||||
@@ -36,47 +36,35 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate optional inputs
|
||||
if inputs.get("image-quality"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["image-quality"], min_val=0, max_val=100
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["image-quality"],
|
||||
min_val=0,
|
||||
max_val=100,
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
if inputs.get("png-quality"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["png-quality"], min_val=0, max_val=100
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["png-quality"],
|
||||
min_val=0,
|
||||
max_val=100,
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
if inputs.get("directory"):
|
||||
result = self.file_validator.validate_file_path(inputs["directory"], "directory")
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.file_validator, "validate_file_path", inputs["directory"], "directory"
|
||||
)
|
||||
|
||||
if inputs.get("ignore-paths"):
|
||||
# Validate for injection
|
||||
result = self.security_validator.validate_no_injection(
|
||||
inputs["ignore-paths"], "ignore-paths"
|
||||
valid &= self.validate_with(
|
||||
self.security_validator,
|
||||
"validate_no_injection",
|
||||
inputs["ignore-paths"],
|
||||
"ignore-paths",
|
||||
)
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
WORKING_DIRECTORY: ${{ inputs.working-directory }}
|
||||
IMAGE_QUALITY: ${{ inputs.image-quality }}
|
||||
@@ -67,7 +67,7 @@ runs:
|
||||
USERNAME: ${{ inputs.username }}
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Validate working directory
|
||||
if [ ! -d "$WORKING_DIRECTORY" ]; then
|
||||
@@ -76,70 +76,73 @@ runs:
|
||||
fi
|
||||
|
||||
# Validate path security (prevent absolute paths and path traversal)
|
||||
if [[ "$WORKING_DIRECTORY" == "/"* ]] || [[ "$WORKING_DIRECTORY" == "~"* ]] || [[ "$WORKING_DIRECTORY" =~ ^[A-Za-z]:[/\\] ]]; then
|
||||
echo "::error::Invalid working-directory: '$WORKING_DIRECTORY'. Absolute paths not allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$WORKING_DIRECTORY" in
|
||||
/*|~*|[A-Za-z]:*|*..*)
|
||||
echo "::error::Invalid working-directory: '$WORKING_DIRECTORY'. Absolute paths and path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$WORKING_DIRECTORY" == *".."* ]]; then
|
||||
echo "::error::Invalid working-directory: '$WORKING_DIRECTORY'. Path traversal not allowed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate image quality (0-100)
|
||||
if ! [[ "$IMAGE_QUALITY" =~ ^[0-9]+$ ]]; then
|
||||
echo "::error::Invalid image-quality: '$IMAGE_QUALITY'. Must be a number between 0 and 100"
|
||||
exit 1
|
||||
fi
|
||||
# Validate image quality (0-100) - must be numeric
|
||||
case "$IMAGE_QUALITY" in
|
||||
''|*[!0-9]*)
|
||||
echo "::error::Invalid image-quality: '$IMAGE_QUALITY'. Must be a number between 0 and 100"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$IMAGE_QUALITY" -lt 0 ] || [ "$IMAGE_QUALITY" -gt 100 ]; then
|
||||
echo "::error::Invalid image-quality: '$IMAGE_QUALITY'. Must be between 0 and 100"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate PNG quality (0-100)
|
||||
if ! [[ "$PNG_QUALITY" =~ ^[0-9]+$ ]]; then
|
||||
echo "::error::Invalid png-quality: '$PNG_QUALITY'. Must be a number between 0 and 100"
|
||||
exit 1
|
||||
fi
|
||||
# Validate PNG quality (0-100) - must be numeric
|
||||
case "$PNG_QUALITY" in
|
||||
''|*[!0-9]*)
|
||||
echo "::error::Invalid png-quality: '$PNG_QUALITY'. Must be a number between 0 and 100"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$PNG_QUALITY" -lt 0 ] || [ "$PNG_QUALITY" -gt 100 ]; then
|
||||
echo "::error::Invalid png-quality: '$PNG_QUALITY'. Must be between 0 and 100"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate ignore paths format (prevent command injection)
|
||||
if [[ "$IGNORE_PATHS" == *";"* ]] || [[ "$IGNORE_PATHS" == *"&&"* ]] || \
|
||||
[[ "$IGNORE_PATHS" == *"|"* ]] || [[ "$IGNORE_PATHS" == *'`'* ]] || \
|
||||
[[ "$IGNORE_PATHS" == *'$('* ]] || [[ "$IGNORE_PATHS" == *'${'* ]] || \
|
||||
[[ "$IGNORE_PATHS" == *"<"* ]] || [[ "$IGNORE_PATHS" == *">"* ]]; then
|
||||
echo "::error::Invalid ignore-paths: '$IGNORE_PATHS'. Command injection patterns not allowed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate ignore paths for path traversal
|
||||
if [[ "$IGNORE_PATHS" == *".."* ]]; then
|
||||
echo "::error::Invalid ignore-paths: '$IGNORE_PATHS'. Path traversal not allowed"
|
||||
exit 1
|
||||
fi
|
||||
# Validate ignore paths format (prevent command injection and path traversal)
|
||||
case "$IGNORE_PATHS" in
|
||||
*\;*|*\&\&*|*\|*|*\`*|*\$\(*|*\$\{*|*\<*|*\>*|*..\*)
|
||||
echo "::error::Invalid ignore-paths: '$IGNORE_PATHS'. Command injection patterns and path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate email format (basic check)
|
||||
if [[ "$EMAIL" != *"@"* ]] || [[ "$EMAIL" != *"."* ]]; then
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
exit 1
|
||||
fi
|
||||
case "$EMAIL" in
|
||||
*@*.*) ;;
|
||||
*)
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate username format (prevent command injection)
|
||||
if [[ "$USERNAME" == *";"* ]] || [[ "$USERNAME" == *"&&"* ]] || [[ "$USERNAME" == *"|"* ]]; then
|
||||
echo "::error::Invalid username: '$USERNAME'. Command injection patterns not allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$USERNAME" in
|
||||
*\;*|*\&\&*|*\|*)
|
||||
echo "::error::Invalid username: '$USERNAME'. Command injection patterns not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate token format if provided (basic GitHub token pattern)
|
||||
if [[ -n "$GITHUB_TOKEN" ]]; then
|
||||
if ! [[ "$GITHUB_TOKEN" =~ ^gh[efpousr]_[a-zA-Z0-9]{36}$ ]]; then
|
||||
echo "::warning::GitHub token format may be invalid. Expected format: gh*_36characters"
|
||||
fi
|
||||
if [ -n "$GITHUB_TOKEN" ]; then
|
||||
case "$GITHUB_TOKEN" in
|
||||
gh[efpousr]_?????????????????????????????????????)
|
||||
;;
|
||||
*)
|
||||
echo "::warning::GitHub token format may be invalid. Expected format: gh*_36characters"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
- name: Checkout Repository
|
||||
@@ -160,7 +163,7 @@ runs:
|
||||
|
||||
- name: Create New Pull Request If Needed
|
||||
if: steps.calibre.outputs.markdown != ''
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
token: ${{ inputs.token }}
|
||||
title: 'chore: compress images'
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for compress-images action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 86% (6/7 inputs)
|
||||
# Coverage: 100% (7/7 inputs)
|
||||
#
|
||||
# This file defines validation rules for the compress-images GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -24,6 +24,7 @@ optional_inputs:
|
||||
- working-directory
|
||||
conventions:
|
||||
email: email
|
||||
ignore-paths: path_list
|
||||
image-quality: numeric_range_0_100
|
||||
png-quality: numeric_range_0_100
|
||||
token: github_token
|
||||
@@ -32,10 +33,10 @@ conventions:
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 7
|
||||
validated_inputs: 6
|
||||
validated_inputs: 7
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 86
|
||||
validation_coverage: 86
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
|
||||
@@ -50,28 +50,112 @@ runs:
|
||||
|
||||
- name: Detect .NET SDK Version
|
||||
id: detect-dotnet-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'dotnet'
|
||||
default-version: "${{ inputs.dotnet-version || '7.0' }}"
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: "${{ inputs.dotnet-version || '7.0' }}"
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]* | [0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for dotnet..." >&2
|
||||
version=$(awk '/^dotnet[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in .tool-versions: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for dotnet..." >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "dotnet:" | head -1 | \
|
||||
sed -n -E "s/.*dotnet:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in Dockerfile: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for dotnet..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*dotnet:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in devcontainer: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse global.json
|
||||
if [ -z "$detected_version" ] && [ -f global.json ]; then
|
||||
echo "Checking global.json..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.sdk.version // empty' global.json 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in global.json: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping global.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default .NET version: $detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected .NET version: $detected_version" >&2
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@d4c94342e560b34958eacfc5d055d21461ed1c5d # v5.0.0
|
||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
||||
with:
|
||||
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
||||
|
||||
- name: Cache NuGet packages
|
||||
id: cache-nuget
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
type: 'nuget'
|
||||
paths: '~/.nuget/packages'
|
||||
key-files: '**/*.csproj,**/*.props,**/*.targets'
|
||||
key-prefix: 'csharp-build'
|
||||
cache: true
|
||||
cache-dependency-path: '**/packages.lock.json'
|
||||
|
||||
- name: Restore Dependencies
|
||||
if: steps.cache-nuget.outputs.cache-hit != 'true'
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
@@ -79,17 +163,11 @@ runs:
|
||||
echo "Restoring .NET dependencies..."
|
||||
dotnet restore --verbosity normal
|
||||
|
||||
- name: Skip Restore (Cache Hit)
|
||||
if: steps.cache-nuget.outputs.cache-hit == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Cache hit - skipping dotnet restore"
|
||||
|
||||
- name: Build Solution
|
||||
id: build
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
echo "Building .NET solution..."
|
||||
if dotnet build --configuration Release --no-restore --verbosity normal; then
|
||||
echo "status=success" >> "$GITHUB_OUTPUT"
|
||||
@@ -102,9 +180,9 @@ runs:
|
||||
|
||||
- name: Run Tests
|
||||
id: test
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
echo "Running .NET tests..."
|
||||
if find . -name "*.csproj" | xargs grep -lE "(Microsoft\.NET\.Test\.Sdk|xunit|nunit)" | head -1 | grep -q .; then
|
||||
if dotnet test --configuration Release --no-build \
|
||||
@@ -125,7 +203,7 @@ runs:
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: csharp-test-results
|
||||
path: |
|
||||
|
||||
@@ -36,15 +36,15 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
DOTNET_VERSION: ${{ inputs.dotnet-version }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Validate .NET version format if provided
|
||||
if [[ -n "$DOTNET_VERSION" ]]; then
|
||||
if ! [[ "$DOTNET_VERSION" =~ ^[0-9]+(\.[0-9]+(\.[0-9]+)?)?$ ]]; then
|
||||
if [ -n "$DOTNET_VERSION" ]; then
|
||||
if ! printf '%s' "$DOTNET_VERSION" | grep -qE '^[0-9]+(\.[0-9]+(\.[0-9]+)?)?$'; then
|
||||
echo "::error::Invalid dotnet-version format: '$DOTNET_VERSION'. Expected format: X.Y or X.Y.Z (e.g., 7.0, 8.0.100)"
|
||||
exit 1
|
||||
fi
|
||||
@@ -66,28 +66,122 @@ runs:
|
||||
|
||||
- name: Detect .NET SDK Version
|
||||
id: detect-dotnet-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'dotnet'
|
||||
default-version: ${{ inputs.dotnet-version || '7.0' }}
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: "${{ inputs.dotnet-version || '7.0' }}"
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]* | [0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for dotnet..." >&2
|
||||
version=$(awk '/^dotnet[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in .tool-versions: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for dotnet..." >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "dotnet:" | head -1 | \
|
||||
sed -n -E "s/.*dotnet:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in Dockerfile: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for dotnet..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*dotnet:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in devcontainer: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse global.json
|
||||
if [ -z "$detected_version" ] && [ -f global.json ]; then
|
||||
echo "Checking global.json..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.sdk.version // empty' global.json 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in global.json: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping global.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default .NET version: $detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected .NET version: $detected_version" >&2
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@d4c94342e560b34958eacfc5d055d21461ed1c5d # v5.0.0
|
||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
||||
with:
|
||||
dotnet-version: ${{ steps.detect-dotnet-version.outputs.detected-version }}
|
||||
cache: true
|
||||
cache-dependency-path: '**/packages.lock.json'
|
||||
|
||||
- name: Install dotnet-format
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
dotnet tool install --global dotnet-format --version 7.0.1
|
||||
|
||||
- name: Run dotnet-format
|
||||
id: dotnet-format
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Initialize counters
|
||||
errors_count=0
|
||||
@@ -112,6 +206,6 @@ runs:
|
||||
fi
|
||||
|
||||
- name: Upload SARIF Report
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: dotnet-format.sarif
|
||||
|
||||
@@ -8,11 +8,12 @@ Publishes a C# project to GitHub Packages.
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|------------------|----------------------------------------------------|----------|-------------|
|
||||
| `dotnet-version` | <p>Version of .NET SDK to use.</p> | `false` | `""` |
|
||||
| `namespace` | <p>GitHub namespace for the package.</p> | `true` | `ivuorinen` |
|
||||
| `token` | <p>GitHub token with package write permissions</p> | `false` | `""` |
|
||||
| name | description | required | default |
|
||||
|------------------|--------------------------------------------------------------------|----------|-------------|
|
||||
| `dotnet-version` | <p>Version of .NET SDK to use.</p> | `false` | `""` |
|
||||
| `namespace` | <p>GitHub namespace for the package.</p> | `true` | `ivuorinen` |
|
||||
| `token` | <p>GitHub token with package write permissions</p> | `false` | `""` |
|
||||
| `max-retries` | <p>Maximum number of retry attempts for dependency restoration</p> | `false` | `3` |
|
||||
|
||||
### Outputs
|
||||
|
||||
@@ -48,4 +49,10 @@ This action is a `composite` action.
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
max-retries:
|
||||
# Maximum number of retry attempts for dependency restoration
|
||||
#
|
||||
# Required: false
|
||||
# Default: 3
|
||||
```
|
||||
|
||||
@@ -22,6 +22,10 @@ inputs:
|
||||
token:
|
||||
description: 'GitHub token with package write permissions'
|
||||
required: false
|
||||
max-retries:
|
||||
description: 'Maximum number of retry attempts for dependency restoration'
|
||||
required: false
|
||||
default: '3'
|
||||
|
||||
outputs:
|
||||
publish_status:
|
||||
@@ -38,7 +42,7 @@ runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Mask Secrets
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
API_KEY: ${{ inputs.token || github.token }}
|
||||
run: |
|
||||
@@ -51,7 +55,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'csharp-publish'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -60,57 +64,138 @@ runs:
|
||||
|
||||
- name: Detect .NET SDK Version
|
||||
id: detect-dotnet-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'dotnet'
|
||||
default-version: '7.0'
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: '7.0'
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]* | [0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for dotnet..." >&2
|
||||
version=$(awk '/^dotnet[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in .tool-versions: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for dotnet..." >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "dotnet:" | head -1 | \
|
||||
sed -n -E "s/.*dotnet:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in Dockerfile: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for dotnet..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*dotnet:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in devcontainer: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse global.json
|
||||
if [ -z "$detected_version" ] && [ -f global.json ]; then
|
||||
echo "Checking global.json..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.sdk.version // empty' global.json 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found .NET version in global.json: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping global.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default .NET version: $detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected .NET version: $detected_version" >&2
|
||||
|
||||
- name: Setup .NET SDK
|
||||
uses: actions/setup-dotnet@d4c94342e560b34958eacfc5d055d21461ed1c5d # v5.0.0
|
||||
uses: actions/setup-dotnet@2016bd2012dba4e32de620c46fe006a3ac9f0602 # v5.0.1
|
||||
with:
|
||||
dotnet-version: ${{ inputs.dotnet-version || steps.detect-dotnet-version.outputs.detected-version }}
|
||||
|
||||
- name: Cache NuGet packages
|
||||
id: cache-nuget
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
type: 'nuget'
|
||||
paths: '~/.nuget/packages'
|
||||
key-files: '**/*.csproj,**/*.props,**/*.targets'
|
||||
key-prefix: 'csharp-publish'
|
||||
cache: true
|
||||
cache-dependency-path: '**/packages.lock.json'
|
||||
|
||||
- name: Restore Dependencies
|
||||
shell: bash
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.cache-nuget.outputs.cache-hit }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Always run dotnet restore to ensure project.assets.json is present
|
||||
if [[ "$CACHE_HIT" == 'true' ]]; then
|
||||
echo "Cache hit - running fast dotnet restore"
|
||||
fi
|
||||
dotnet restore
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
command: |
|
||||
echo "Restoring .NET dependencies..."
|
||||
dotnet restore --verbosity normal
|
||||
|
||||
- name: Build Solution
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
dotnet build --configuration Release --no-restore
|
||||
|
||||
- name: Pack Solution
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
dotnet pack --configuration Release --no-build --no-restore --output ./artifacts
|
||||
|
||||
- name: Extract Package Version
|
||||
id: extract-version
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Find the newest .nupkg file by modification time and extract version
|
||||
PACKAGE_FILE=$(find ./artifacts -name "*.nupkg" -type f -printf '%T@ %p\n' | sort -rn | head -n 1 | cut -d' ' -f2-)
|
||||
@@ -126,12 +211,12 @@ runs:
|
||||
|
||||
- name: Publish Package
|
||||
id: publish-package
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
API_KEY: ${{ inputs.token || github.token }}
|
||||
NAMESPACE: ${{ inputs.namespace }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
PACKAGE_URL="https://github.com/$NAMESPACE/packages/nuget"
|
||||
printf '%s\n' "package_url=$PACKAGE_URL" >> "$GITHUB_OUTPUT"
|
||||
@@ -156,7 +241,7 @@ runs:
|
||||
- name: Set publish status output
|
||||
if: always()
|
||||
id: set-status
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
PUBLISH_STATUS: ${{ steps.publish-package.outcome == 'success' && 'success' || 'failure' }}
|
||||
run: |-
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for csharp-publish action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 100% (3/3 inputs)
|
||||
# Coverage: 100% (4/4 inputs)
|
||||
#
|
||||
# This file defines validation rules for the csharp-publish GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -17,15 +17,17 @@ required_inputs:
|
||||
- namespace
|
||||
optional_inputs:
|
||||
- dotnet-version
|
||||
- max-retries
|
||||
- token
|
||||
conventions:
|
||||
dotnet-version: dotnet_version
|
||||
max-retries: numeric_range_1_10
|
||||
namespace: namespace_with_lookahead
|
||||
token: github_token
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 3
|
||||
validated_inputs: 3
|
||||
total_inputs: 4
|
||||
validated_inputs: 4
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
|
||||
@@ -65,35 +65,24 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate image name
|
||||
if inputs.get("image-name"):
|
||||
result = self.docker_validator.validate_image_name(inputs["image-name"], "image-name")
|
||||
# Propagate errors from docker validator
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator, "validate_image_name", inputs["image-name"], "image-name"
|
||||
)
|
||||
|
||||
# Validate tag (singular - as per action.yml)
|
||||
if inputs.get("tag"):
|
||||
result = self.docker_validator.validate_docker_tag(inputs["tag"], "tag")
|
||||
# Propagate errors
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator, "validate_docker_tag", inputs["tag"], "tag"
|
||||
)
|
||||
|
||||
# Validate architectures/platforms
|
||||
if inputs.get("architectures"):
|
||||
result = self.docker_validator.validate_architectures(
|
||||
inputs["architectures"], "architectures"
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator,
|
||||
"validate_architectures",
|
||||
inputs["architectures"],
|
||||
"architectures",
|
||||
)
|
||||
# Propagate errors
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate build arguments
|
||||
if inputs.get("build-args"):
|
||||
@@ -101,12 +90,9 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate push flag
|
||||
if inputs.get("push"):
|
||||
result = self.boolean_validator.validate_optional_boolean(inputs["push"], "push")
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator, "validate_optional_boolean", inputs["push"], "push"
|
||||
)
|
||||
|
||||
# Validate cache settings
|
||||
if inputs.get("cache-from"):
|
||||
@@ -117,22 +103,35 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate cache-mode
|
||||
if inputs.get("cache-mode"):
|
||||
valid &= self.validate_cache_mode(inputs["cache-mode"])
|
||||
valid &= self.validate_enum(
|
||||
inputs["cache-mode"],
|
||||
"cache-mode",
|
||||
["min", "max", "inline"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
# Validate buildx-version
|
||||
if inputs.get("buildx-version"):
|
||||
valid &= self.validate_buildx_version(inputs["buildx-version"])
|
||||
version = inputs["buildx-version"]
|
||||
# Allow 'latest' as special value
|
||||
if version != "latest" and not self.is_github_expression(version):
|
||||
valid &= self.validate_with(
|
||||
self.version_validator,
|
||||
"validate_semantic_version",
|
||||
version,
|
||||
"buildx-version",
|
||||
)
|
||||
|
||||
# Validate parallel-builds
|
||||
if inputs.get("parallel-builds"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["parallel-builds"], min_val=0, max_val=16, name="parallel-builds"
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["parallel-builds"],
|
||||
min_val=0,
|
||||
max_val=16,
|
||||
name="parallel-builds",
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate boolean flags
|
||||
for bool_input in [
|
||||
@@ -144,29 +143,32 @@ class CustomValidator(BaseValidator):
|
||||
"auto-detect-platforms",
|
||||
]:
|
||||
if inputs.get(bool_input):
|
||||
result = self.boolean_validator.validate_optional_boolean(
|
||||
inputs[bool_input], bool_input
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator,
|
||||
"validate_optional_boolean",
|
||||
inputs[bool_input],
|
||||
bool_input,
|
||||
)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate sbom-format
|
||||
if inputs.get("sbom-format"):
|
||||
valid &= self.validate_sbom_format(inputs["sbom-format"])
|
||||
valid &= self.validate_enum(
|
||||
inputs["sbom-format"],
|
||||
"sbom-format",
|
||||
["spdx-json", "cyclonedx-json", "syft-json"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
# Validate max-retries
|
||||
if inputs.get("max-retries"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["max-retries"], min_val=0, max_val=10, name="max-retries"
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["max-retries"],
|
||||
min_val=0,
|
||||
max_val=10,
|
||||
name="max-retries",
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
return valid
|
||||
|
||||
@@ -209,19 +211,11 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(dockerfile):
|
||||
return True
|
||||
|
||||
# Use file validator for path validation
|
||||
result = self.file_validator.validate_file_path(dockerfile, "dockerfile")
|
||||
# Propagate errors
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.file_validator, "validate_file_path", dockerfile, "dockerfile"
|
||||
)
|
||||
|
||||
def validate_context(self, context: str) -> bool:
|
||||
"""Validate build context path.
|
||||
@@ -245,10 +239,9 @@ class CustomValidator(BaseValidator):
|
||||
# We allow path traversal for context as Docker needs to access parent directories
|
||||
# Only check for command injection patterns like ; | ` $()
|
||||
dangerous_chars = [";", "|", "`", "$(", "&&", "||"]
|
||||
for char in dangerous_chars:
|
||||
if char in context:
|
||||
self.add_error(f"Command injection detected in context: {context}")
|
||||
return False
|
||||
if any(char in context for char in dangerous_chars):
|
||||
self.add_error(f"Command injection detected in context: {context}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -261,15 +254,9 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Use docker validator for architectures
|
||||
result = self.docker_validator.validate_architectures(platforms, "platforms")
|
||||
# Propagate errors
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
|
||||
return result
|
||||
return self.validate_with(
|
||||
self.docker_validator, "validate_architectures", platforms, "platforms"
|
||||
)
|
||||
|
||||
def validate_build_args(self, build_args: str) -> bool:
|
||||
"""Validate build arguments.
|
||||
@@ -353,78 +340,3 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Check for security issues
|
||||
return self.validate_security_patterns(cache_to, "cache-to")
|
||||
|
||||
def validate_cache_mode(self, cache_mode: str) -> bool:
|
||||
"""Validate cache mode.
|
||||
|
||||
Args:
|
||||
cache_mode: Cache mode value
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(cache_mode):
|
||||
return True
|
||||
|
||||
# Valid cache modes
|
||||
valid_modes = ["min", "max", "inline"]
|
||||
if cache_mode.lower() not in valid_modes:
|
||||
self.add_error(f"Invalid cache-mode: {cache_mode}. Must be one of: min, max, inline")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_buildx_version(self, version: str) -> bool:
|
||||
"""Validate buildx version.
|
||||
|
||||
Args:
|
||||
version: Buildx version
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(version):
|
||||
return True
|
||||
|
||||
# Allow 'latest'
|
||||
if version == "latest":
|
||||
return True
|
||||
|
||||
# Check for security issues (semicolon injection etc)
|
||||
if not self.validate_security_patterns(version, "buildx-version"):
|
||||
return False
|
||||
|
||||
# Basic version format validation (e.g., 0.12.0, v0.12.0)
|
||||
import re
|
||||
|
||||
if not re.match(r"^v?\d+\.\d+(\.\d+)?$", version):
|
||||
self.add_error(f"Invalid buildx-version format: {version}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_sbom_format(self, sbom_format: str) -> bool:
|
||||
"""Validate SBOM format.
|
||||
|
||||
Args:
|
||||
sbom_format: SBOM format value
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(sbom_format):
|
||||
return True
|
||||
|
||||
# Valid SBOM formats
|
||||
valid_formats = ["spdx-json", "cyclonedx-json", "syft-json"]
|
||||
if sbom_format.lower() not in valid_formats:
|
||||
self.add_error(
|
||||
f"Invalid sbom-format: {sbom_format}. "
|
||||
"Must be one of: spdx-json, cyclonedx-json, syft-json"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -147,7 +147,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'docker-build'
|
||||
image-name: ${{ inputs.image-name }}
|
||||
@@ -159,7 +159,7 @@ runs:
|
||||
parallel-builds: ${{ inputs.parallel-builds }}
|
||||
|
||||
- name: Check Dockerfile Exists
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
DOCKERFILE: ${{ inputs.dockerfile }}
|
||||
run: |
|
||||
@@ -175,7 +175,7 @@ runs:
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
with:
|
||||
version: ${{ inputs.buildx-version }}
|
||||
platforms: ${{ inputs.architectures }}
|
||||
@@ -186,12 +186,12 @@ runs:
|
||||
|
||||
- name: Detect Available Platforms
|
||||
id: detect-platforms
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
ARCHITECTURES: ${{ inputs.architectures }}
|
||||
AUTO_DETECT: ${{ inputs.auto-detect-platforms }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# When auto-detect is enabled, try to detect available platforms
|
||||
if [ "$AUTO_DETECT" = "true" ]; then
|
||||
@@ -212,11 +212,11 @@ runs:
|
||||
|
||||
- name: Determine Image Name
|
||||
id: image-name
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
if [ -z "$IMAGE_NAME" ]; then
|
||||
repo_name=$(basename "${GITHUB_REPOSITORY}")
|
||||
@@ -227,16 +227,23 @@ runs:
|
||||
|
||||
- name: Parse Build Arguments
|
||||
id: build-args
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
BUILD_ARGS_INPUT: ${{ inputs.build-args }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
args=""
|
||||
if [ -n "$BUILD_ARGS_INPUT" ]; then
|
||||
IFS=',' read -ra BUILD_ARGS <<< "$BUILD_ARGS_INPUT"
|
||||
for arg in "${BUILD_ARGS[@]}"; do
|
||||
# Save IFS and use comma as delimiter
|
||||
old_ifs="$IFS"
|
||||
IFS=','
|
||||
# Use set -- to load comma-separated values into positional parameters
|
||||
set -- $BUILD_ARGS_INPUT
|
||||
IFS="$old_ifs"
|
||||
|
||||
# Iterate through positional parameters
|
||||
for arg; do
|
||||
args="$args --build-arg $arg"
|
||||
done
|
||||
fi
|
||||
@@ -244,16 +251,23 @@ runs:
|
||||
|
||||
- name: Parse Build Contexts
|
||||
id: build-contexts
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
BUILD_CONTEXTS: ${{ inputs.build-contexts }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
contexts=""
|
||||
if [ -n "$BUILD_CONTEXTS" ]; then
|
||||
IFS=',' read -ra CONTEXTS <<< "$BUILD_CONTEXTS"
|
||||
for ctx in "${CONTEXTS[@]}"; do
|
||||
# Save IFS and use comma as delimiter
|
||||
old_ifs="$IFS"
|
||||
IFS=','
|
||||
# Use set -- to load comma-separated values into positional parameters
|
||||
set -- $BUILD_CONTEXTS
|
||||
IFS="$old_ifs"
|
||||
|
||||
# Iterate through positional parameters
|
||||
for ctx; do
|
||||
contexts="$contexts --build-context $ctx"
|
||||
done
|
||||
fi
|
||||
@@ -261,36 +275,46 @@ runs:
|
||||
|
||||
- name: Parse Secrets
|
||||
id: secrets
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
INPUT_SECRETS: ${{ inputs.secrets }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
secrets=""
|
||||
if [ -n "$INPUT_SECRETS" ]; then
|
||||
IFS=',' read -ra SECRETS <<< "$INPUT_SECRETS"
|
||||
for secret in "${SECRETS[@]}"; do
|
||||
# Save IFS and use comma as delimiter
|
||||
old_ifs="$IFS"
|
||||
IFS=','
|
||||
# Use set -- to load comma-separated values into positional parameters
|
||||
set -- $INPUT_SECRETS
|
||||
IFS="$old_ifs"
|
||||
|
||||
# Iterate through positional parameters
|
||||
for secret; do
|
||||
# Trim whitespace
|
||||
secret=$(echo "$secret" | xargs)
|
||||
|
||||
if [[ "$secret" == *"="* ]]; then
|
||||
# Parse id=src format
|
||||
id="${secret%%=*}"
|
||||
src="${secret#*=}"
|
||||
case "$secret" in
|
||||
*=*)
|
||||
# Parse id=src format
|
||||
id="${secret%%=*}"
|
||||
src="${secret#*=}"
|
||||
|
||||
# Validate id and src are not empty
|
||||
if [[ -z "$id" || -z "$src" ]]; then
|
||||
echo "::error::Invalid secret format: '$secret'. Expected 'id=src' where both id and src are non-empty"
|
||||
# Validate id and src are not empty
|
||||
if [ -z "$id" ] || [ -z "$src" ]; then
|
||||
echo "::error::Invalid secret format: '$secret'. Expected 'id=src' where both id and src are non-empty"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
secrets="$secrets --secret id=$id,src=$src"
|
||||
;;
|
||||
*)
|
||||
# Handle legacy format - treat as id only (error for now)
|
||||
echo "::error::Invalid secret format: '$secret'. Expected 'id=src' format for Buildx compatibility"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
secrets="$secrets --secret id=$id,src=$src"
|
||||
else
|
||||
# Handle legacy format - treat as id only (error for now)
|
||||
echo "::error::Invalid secret format: '$secret'. Expected 'id=src' format for Buildx compatibility"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
echo "secrets=${secrets}" >> $GITHUB_OUTPUT
|
||||
@@ -305,7 +329,7 @@ runs:
|
||||
|
||||
- name: Set up Build Cache
|
||||
id: cache
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
CACHE_IMPORT: ${{ inputs.cache-import }}
|
||||
CACHE_FROM: ${{ inputs.cache-from }}
|
||||
@@ -314,7 +338,7 @@ runs:
|
||||
INPUT_TOKEN: ${{ inputs.token }}
|
||||
CACHE_MODE: ${{ inputs.cache-mode }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Use provided token or fall back to GITHUB_TOKEN
|
||||
TOKEN="${INPUT_TOKEN:-${GITHUB_TOKEN:-}}"
|
||||
@@ -335,7 +359,7 @@ runs:
|
||||
fi
|
||||
|
||||
# Registry cache configuration for better performance (only if authenticated)
|
||||
if [ "$PUSH" == "true" ] || [ -n "$TOKEN" ]; then
|
||||
if [ "$PUSH" = "true" ] || [ -n "$TOKEN" ]; then
|
||||
normalized_repo=$(echo "${GITHUB_REPOSITORY}" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9._\/-]/-/g')
|
||||
registry_cache_ref="ghcr.io/${normalized_repo}/cache:latest"
|
||||
cache_from="$cache_from --cache-from type=registry,ref=$registry_cache_ref"
|
||||
@@ -349,16 +373,21 @@ runs:
|
||||
|
||||
# Also include local cache as fallback
|
||||
cache_from="$cache_from --cache-from type=local,src=/tmp/.buildx-cache"
|
||||
if [[ "$cache_to" != *"type=local"* ]]; then
|
||||
cache_to="$cache_to --cache-to type=local,dest=/tmp/.buildx-cache-new,mode=${cache_mode}"
|
||||
fi
|
||||
case "$cache_to" in
|
||||
*"type=local"*)
|
||||
# Already has local cache, don't add
|
||||
;;
|
||||
*)
|
||||
cache_to="$cache_to --cache-to type=local,dest=/tmp/.buildx-cache-new,mode=${cache_mode}"
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "from=${cache_from}" >> $GITHUB_OUTPUT
|
||||
echo "to=${cache_to}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build Multi-Architecture Docker Image
|
||||
id: build
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
AUTO_DETECT_PLATFORMS: ${{ inputs.auto-detect-platforms }}
|
||||
DETECTED_PLATFORMS: ${{ steps.detect-platforms.outputs.platforms }}
|
||||
@@ -378,7 +407,7 @@ runs:
|
||||
DOCKERFILE: ${{ inputs.dockerfile }}
|
||||
CONTEXT: ${{ inputs.context }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Track build start time
|
||||
build_start=$(date +%s)
|
||||
@@ -518,9 +547,9 @@ runs:
|
||||
- name: Process Scan Results
|
||||
id: scan-output
|
||||
if: inputs.scan-image == 'true' && inputs.dry-run != 'true'
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Read and format scan results for output
|
||||
scan_results=$(cat trivy-results.json | jq -c '.')
|
||||
@@ -539,12 +568,12 @@ runs:
|
||||
- name: Sign Image
|
||||
id: sign
|
||||
if: inputs.sign-image == 'true' && inputs.push == 'true' && inputs.dry-run != 'true'
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
IMAGE_NAME: ${{ steps.image-name.outputs.name }}
|
||||
IMAGE_TAG: ${{ inputs.tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Sign the image (using keyless signing with OIDC)
|
||||
export COSIGN_EXPERIMENTAL=1
|
||||
@@ -555,13 +584,13 @@ runs:
|
||||
- name: Verify Build
|
||||
id: verify
|
||||
if: inputs.dry-run != 'true'
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
PUSH: ${{ inputs.push }}
|
||||
IMAGE_NAME: ${{ steps.image-name.outputs.name }}
|
||||
IMAGE_TAG: ${{ inputs.tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Verify image exists
|
||||
if [ "$PUSH" == "true" ]; then
|
||||
@@ -584,9 +613,9 @@ runs:
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |-
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Cleanup temporary files
|
||||
rm -rf /tmp/.buildx-cache*
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for docker-build action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 63% (17/27 inputs)
|
||||
# Coverage: 100% (27/27 inputs)
|
||||
#
|
||||
# This file defines validation rules for the docker-build GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -45,17 +45,27 @@ optional_inputs:
|
||||
conventions:
|
||||
architectures: docker_architectures
|
||||
auto-detect-platforms: docker_architectures
|
||||
build-args: key_value_list
|
||||
build-contexts: key_value_list
|
||||
buildkit-version: semantic_version
|
||||
buildx-version: semantic_version
|
||||
cache-mode: boolean
|
||||
cache-export: cache_config
|
||||
cache-from: cache_config
|
||||
cache-import: cache_config
|
||||
cache-mode: cache_mode
|
||||
context: file_path
|
||||
dockerfile: file_path
|
||||
dry-run: boolean
|
||||
image-name: docker_image_name
|
||||
max-retries: numeric_range_1_10
|
||||
network: network_mode
|
||||
parallel-builds: numeric_range_0_16
|
||||
platform-build-args: json_format
|
||||
platform-fallback: docker_architectures
|
||||
sbom-format: report_format
|
||||
push: boolean
|
||||
sbom-format: sbom_format
|
||||
scan-image: boolean
|
||||
secrets: key_value_list
|
||||
sign-image: boolean
|
||||
tag: docker_tag
|
||||
token: github_token
|
||||
@@ -65,12 +75,12 @@ overrides:
|
||||
sbom-format: sbom_format
|
||||
statistics:
|
||||
total_inputs: 27
|
||||
validated_inputs: 17
|
||||
validated_inputs: 27
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 63
|
||||
validation_coverage: 63
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: true
|
||||
has_token_validation: true
|
||||
|
||||
@@ -11,6 +11,7 @@ This validator handles Docker publish-specific validation including:
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Add validate-inputs directory to path to import validators
|
||||
@@ -58,12 +59,9 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate platforms
|
||||
if inputs.get("platforms"):
|
||||
result = self.docker_validator.validate_architectures(inputs["platforms"], "platforms")
|
||||
for error in self.docker_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.docker_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.docker_validator, "validate_architectures", inputs["platforms"], "platforms"
|
||||
)
|
||||
|
||||
# Validate boolean flags
|
||||
for bool_input in [
|
||||
@@ -74,18 +72,18 @@ class CustomValidator(BaseValidator):
|
||||
"verbose",
|
||||
]:
|
||||
if inputs.get(bool_input):
|
||||
result = self.boolean_validator.validate_optional_boolean(
|
||||
inputs[bool_input], bool_input
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator,
|
||||
"validate_optional_boolean",
|
||||
inputs[bool_input],
|
||||
bool_input,
|
||||
)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate cache-mode
|
||||
if inputs.get("cache-mode"):
|
||||
valid &= self.validate_cache_mode(inputs["cache-mode"])
|
||||
valid &= self.validate_enum(
|
||||
inputs["cache-mode"], "cache-mode", ["min", "max", "inline"]
|
||||
)
|
||||
|
||||
# Validate buildx-version
|
||||
if inputs.get("buildx-version"):
|
||||
@@ -96,24 +94,18 @@ class CustomValidator(BaseValidator):
|
||||
valid &= self.validate_username(inputs["dockerhub-username"])
|
||||
|
||||
if inputs.get("dockerhub-password"):
|
||||
# Use token validator for password/token
|
||||
result = self.token_validator.validate_docker_token(
|
||||
inputs["dockerhub-password"], "dockerhub-password"
|
||||
valid &= self.validate_with(
|
||||
self.token_validator,
|
||||
"validate_docker_token",
|
||||
inputs["dockerhub-password"],
|
||||
"dockerhub-password",
|
||||
)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
valid &= result
|
||||
|
||||
# Validate github-token
|
||||
if inputs.get("github-token"):
|
||||
result = self.token_validator.validate_github_token(inputs["github-token"])
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
valid &= result
|
||||
valid &= self.validate_with(
|
||||
self.token_validator, "validate_github_token", inputs["github-token"]
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
@@ -156,40 +148,7 @@ class CustomValidator(BaseValidator):
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(registry):
|
||||
return True
|
||||
|
||||
# Valid registry values according to action description
|
||||
valid_registries = ["dockerhub", "github", "both"]
|
||||
if registry.lower() not in valid_registries:
|
||||
self.add_error(
|
||||
f"Invalid registry: {registry}. Must be one of: dockerhub, github, or both"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def validate_cache_mode(self, cache_mode: str) -> bool:
|
||||
"""Validate cache mode.
|
||||
|
||||
Args:
|
||||
cache_mode: Cache mode value
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
# Allow GitHub Actions expressions
|
||||
if self.is_github_expression(cache_mode):
|
||||
return True
|
||||
|
||||
# Valid cache modes
|
||||
valid_modes = ["min", "max", "inline"]
|
||||
if cache_mode.lower() not in valid_modes:
|
||||
self.add_error(f"Invalid cache-mode: {cache_mode}. Must be one of: min, max, inline")
|
||||
return False
|
||||
|
||||
return True
|
||||
return self.validate_enum(registry, "registry", ["dockerhub", "github", "both"])
|
||||
|
||||
def validate_buildx_version(self, version: str) -> bool:
|
||||
"""Validate buildx version.
|
||||
@@ -213,8 +172,6 @@ class CustomValidator(BaseValidator):
|
||||
return False
|
||||
|
||||
# Basic version format validation
|
||||
import re
|
||||
|
||||
if not re.match(r"^v?\d+\.\d+(\.\d+)?$", version):
|
||||
self.add_error(f"Invalid buildx-version format: {version}")
|
||||
return False
|
||||
@@ -244,8 +201,6 @@ class CustomValidator(BaseValidator):
|
||||
return False
|
||||
|
||||
# Docker Hub username rules: lowercase letters, digits, periods, hyphens, underscores
|
||||
import re
|
||||
|
||||
if not re.match(r"^[a-z0-9._-]+$", username.lower()):
|
||||
self.add_error(f"Invalid Docker Hub username format: {username}")
|
||||
return False
|
||||
|
||||
@@ -112,7 +112,7 @@ runs:
|
||||
dockerhub|github|both)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid registry value. Must be 'dockerhub', 'github', or 'both'"
|
||||
printf '%s\n' "::error::Invalid registry value. Must be 'dockerhub', 'github', or 'both'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -120,7 +120,7 @@ runs:
|
||||
# Validate Docker Hub credentials if needed
|
||||
if [ "$INPUT_REGISTRY" = "dockerhub" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
||||
if [ -z "$INPUT_DOCKERHUB_USERNAME" ] || [ -z "$INPUT_DOCKERHUB_TOKEN" ]; then
|
||||
echo "::error::Docker Hub username and token are required when publishing to Docker Hub"
|
||||
printf '%s\n' "::error::Docker Hub username and token are required when publishing to Docker Hub"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
@@ -129,49 +129,80 @@ runs:
|
||||
if [ "$INPUT_REGISTRY" = "github" ] || [ "$INPUT_REGISTRY" = "both" ]; then
|
||||
token="${INPUT_TOKEN:-${GITHUB_TOKEN:-}}"
|
||||
if [ -z "$token" ]; then
|
||||
echo "::error::GitHub token is required when publishing to GitHub Packages"
|
||||
printf '%s\n' "::error::GitHub token is required when publishing to GitHub Packages"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate context input for security
|
||||
INPUT_CONTEXT="${INPUT_CONTEXT:-.}"
|
||||
|
||||
case "$INPUT_CONTEXT" in
|
||||
.|./*|*/*)
|
||||
# Relative paths are allowed
|
||||
# Check for path traversal attempts
|
||||
case "$INPUT_CONTEXT" in
|
||||
*/../*|../*|*/..)
|
||||
printf '%s\n' "::error::Context path contains path traversal: '$INPUT_CONTEXT'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
/*)
|
||||
echo "::error::Context cannot be an absolute path: '$INPUT_CONTEXT'"
|
||||
echo "::error::Use relative paths (e.g., '.', './app') to prevent code injection"
|
||||
printf '%s\n' "::error::Context cannot be an absolute path: '$INPUT_CONTEXT'"
|
||||
printf '%s\n' "::error::Use relative paths (e.g., '.', './app')"
|
||||
exit 1
|
||||
;;
|
||||
*://*)
|
||||
echo "::warning::Context is a remote URL: '$INPUT_CONTEXT'"
|
||||
echo "::warning::Ensure this URL is from a trusted source to prevent code injection"
|
||||
git://*|git@*|https://*.git|https://github.com/*|https://gitlab.com/*)
|
||||
# Allow trusted git repository URLs
|
||||
printf '%s\n' "::notice::Using git repository URL for context"
|
||||
;;
|
||||
http://*|https://*)
|
||||
printf '%s\n' "::error::Context cannot be an arbitrary HTTP URL: '$INPUT_CONTEXT'"
|
||||
printf '%s\n' "::error::Only git repository URLs are allowed for remote contexts"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
printf '%s\n' "::error::Invalid context format: '$INPUT_CONTEXT'"
|
||||
printf '%s\n' "::error::Must be a relative path or git repository URL"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate dockerfile input for security
|
||||
INPUT_DOCKERFILE="${INPUT_DOCKERFILE:-Dockerfile}"
|
||||
|
||||
case "$INPUT_DOCKERFILE" in
|
||||
Dockerfile|*/Dockerfile|*.dockerfile|*/*.dockerfile)
|
||||
# Common dockerfile patterns are allowed
|
||||
# Check for path traversal attempts
|
||||
case "$INPUT_DOCKERFILE" in
|
||||
*/../*|../*|*/..)
|
||||
printf '%s\n' "::error::Dockerfile path contains path traversal: '$INPUT_DOCKERFILE'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
/*)
|
||||
echo "::error::Dockerfile path cannot be absolute: '$INPUT_DOCKERFILE'"
|
||||
echo "::error::Use relative paths (e.g., 'Dockerfile', './docker/Dockerfile')"
|
||||
printf '%s\n' "::error::Dockerfile path cannot be absolute: '$INPUT_DOCKERFILE'"
|
||||
printf '%s\n' "::error::Use relative paths (e.g., 'Dockerfile', './docker/Dockerfile')"
|
||||
exit 1
|
||||
;;
|
||||
*://*)
|
||||
echo "::error::Dockerfile path cannot be a URL: '$INPUT_DOCKERFILE'"
|
||||
printf '%s\n' "::error::Dockerfile path cannot be a URL: '$INPUT_DOCKERFILE'"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
printf '%s\n' "::error::Invalid Dockerfile format: '$INPUT_DOCKERFILE'"
|
||||
printf '%s\n' "::error::Must be 'Dockerfile', '*/Dockerfile', '*.dockerfile', or '*/*.dockerfile'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Input validation completed successfully"
|
||||
printf '%s\n' "Input validation completed successfully"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Determine Image Names and Tags
|
||||
id: meta
|
||||
@@ -223,25 +254,25 @@ runs:
|
||||
# Output results
|
||||
printf 'image-name=%s\n' "$base_name" >> "$GITHUB_OUTPUT"
|
||||
{
|
||||
echo 'tags<<EOF'
|
||||
echo "$tags"
|
||||
echo 'EOF'
|
||||
printf '%s\n' 'tags<<EOF'
|
||||
printf '%s\n' "$tags"
|
||||
printf '%s\n' 'EOF'
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "Image name: $base_name"
|
||||
echo "Tags:"
|
||||
echo "$tags"
|
||||
printf 'Image name: %s\n' "$base_name"
|
||||
printf '%s\n' "Tags:"
|
||||
printf '%s\n' "$tags"
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: inputs.registry == 'dockerhub' || inputs.registry == 'both'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-username }}
|
||||
password: ${{ inputs.dockerhub-token }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: inputs.registry == 'github' || inputs.registry == 'both'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -249,7 +280,7 @@ runs:
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
id: build
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ inputs.context }}
|
||||
file: ${{ inputs.dockerfile }}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for docker-publish action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 73% (8/11 inputs)
|
||||
# Coverage: 100% (11/11 inputs)
|
||||
#
|
||||
# This file defines validation rules for the docker-publish GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -27,25 +27,27 @@ optional_inputs:
|
||||
- tags
|
||||
- token
|
||||
conventions:
|
||||
build-args: key_value_list
|
||||
context: file_path
|
||||
dockerfile: file_path
|
||||
dockerhub-token: github_token
|
||||
dockerhub-username: username
|
||||
image-name: docker_image_name
|
||||
platforms: docker_architectures
|
||||
registry: registry
|
||||
push: boolean
|
||||
registry: registry_enum
|
||||
tags: docker_tag
|
||||
token: github_token
|
||||
overrides:
|
||||
platforms: null
|
||||
registry: registry_enum
|
||||
statistics:
|
||||
total_inputs: 11
|
||||
validated_inputs: 8
|
||||
skipped_inputs: 1
|
||||
coverage_percentage: 73
|
||||
validation_coverage: 73
|
||||
validated_inputs: 11
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: false
|
||||
has_token_validation: true
|
||||
|
||||
@@ -97,7 +97,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
MODE: ${{ inputs.mode }}
|
||||
WORKING_DIRECTORY: ${{ inputs.working-directory }}
|
||||
@@ -113,7 +113,7 @@ runs:
|
||||
EMAIL: ${{ inputs.email }}
|
||||
USERNAME: ${{ inputs.username }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Validate mode
|
||||
case "$MODE" in
|
||||
@@ -133,44 +133,54 @@ runs:
|
||||
fi
|
||||
|
||||
# Validate working directory path security (prevent traversal)
|
||||
if [[ "$WORKING_DIRECTORY" == *".."* ]]; then
|
||||
echo "::error::Invalid working directory path: '$WORKING_DIRECTORY'. Path traversal not allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$WORKING_DIRECTORY" in
|
||||
*..*)
|
||||
echo "::error::Invalid working directory path: '$WORKING_DIRECTORY'. Path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate ESLint version format
|
||||
if [[ -n "$ESLINT_VERSION" ]] && [[ "$ESLINT_VERSION" != "latest" ]]; then
|
||||
if ! [[ "$ESLINT_VERSION" =~ ^[0-9]+(\.[0-9]+(\.[0-9]+)?)?(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "::error::Invalid eslint-version format: '$ESLINT_VERSION'. Expected format: X.Y.Z or 'latest' (e.g., 8.57.0, latest)"
|
||||
if [ -n "$ESLINT_VERSION" ] && [ "$ESLINT_VERSION" != "latest" ]; then
|
||||
if ! echo "$ESLINT_VERSION" | grep -Eq '^[0-9]+\.[0-9]+(\.[0-9]+)?(-[a-zA-Z0-9]+([.-][a-zA-Z0-9]+)*)?$'; then
|
||||
echo "::error::Invalid eslint-version format: '$ESLINT_VERSION'. Expected format: X.Y.Z or X.Y.Z-prerelease or 'latest' (e.g., 8.57.0, 8.57.0-rc.1, latest)"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate config file path if not default
|
||||
if [[ "$CONFIG_FILE" != ".eslintrc" ]] && [[ "$CONFIG_FILE" == *".."* ]]; then
|
||||
echo "::error::Invalid config file path: '$CONFIG_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
if [ "$CONFIG_FILE" != ".eslintrc" ]; then
|
||||
case "$CONFIG_FILE" in
|
||||
*..*)
|
||||
echo "::error::Invalid config file path: '$CONFIG_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate ignore file path if not default
|
||||
if [[ "$IGNORE_FILE" != ".eslintignore" ]] && [[ "$IGNORE_FILE" == *".."* ]]; then
|
||||
echo "::error::Invalid ignore file path: '$IGNORE_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
if [ "$IGNORE_FILE" != ".eslintignore" ]; then
|
||||
case "$IGNORE_FILE" in
|
||||
*..*)
|
||||
echo "::error::Invalid ignore file path: '$IGNORE_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate file extensions format
|
||||
if ! [[ "$FILE_EXTENSIONS" =~ ^(\.[a-zA-Z0-9]+)(,\.[a-zA-Z0-9]+)*$ ]]; then
|
||||
# Validate file extensions format (must start with . and contain letters/numbers)
|
||||
if ! echo "$FILE_EXTENSIONS" | grep -Eq '^\.[a-zA-Z0-9]+(,\.[a-zA-Z0-9]+)*$'; then
|
||||
echo "::error::Invalid file extensions format: '$FILE_EXTENSIONS'. Expected format: .js,.jsx,.ts,.tsx"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate boolean inputs
|
||||
validate_boolean() {
|
||||
local value="$1"
|
||||
local name="$2"
|
||||
value="$1"
|
||||
name="$2"
|
||||
|
||||
case "${value,,}" in
|
||||
true|false)
|
||||
case "$value" in
|
||||
true|True|TRUE|false|False|FALSE)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid boolean value for $name: '$value'. Expected: true or false"
|
||||
@@ -182,11 +192,13 @@ runs:
|
||||
validate_boolean "$CACHE" "cache"
|
||||
validate_boolean "$FAIL_ON_ERROR" "fail-on-error"
|
||||
|
||||
# Validate max warnings (positive integer)
|
||||
if ! [[ "$MAX_WARNINGS" =~ ^[0-9]+$ ]]; then
|
||||
echo "::error::Invalid max-warnings: '$MAX_WARNINGS'. Must be a non-negative integer"
|
||||
exit 1
|
||||
fi
|
||||
# Validate max warnings (non-negative integer)
|
||||
case "$MAX_WARNINGS" in
|
||||
''|*[!0-9]*)
|
||||
echo "::error::Invalid max-warnings: '$MAX_WARNINGS'. Must be a non-negative integer"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate report format
|
||||
case "$REPORT_FORMAT" in
|
||||
@@ -199,15 +211,22 @@ runs:
|
||||
esac
|
||||
|
||||
# Validate max retries
|
||||
if ! [[ "$MAX_RETRIES" =~ ^[0-9]+$ ]] || [ "$MAX_RETRIES" -le 0 ] || [ "$MAX_RETRIES" -gt 10 ]; then
|
||||
case "$MAX_RETRIES" in
|
||||
''|*[!0-9]*)
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$MAX_RETRIES" -le 0 ] || [ "$MAX_RETRIES" -gt 10 ]; then
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate email and username for fix mode
|
||||
if [ "$MODE" = "fix" ]; then
|
||||
if [[ "$EMAIL" != *"@"* ]] || [[ "$EMAIL" != *"."* ]]; then
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
if ! echo "$EMAIL" | grep -Eq '^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'; then
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address (e.g., user@example.com)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -219,20 +238,26 @@ runs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$username" =~ ^[a-zA-Z0-9-]+$ ]]; then
|
||||
echo "::error::Invalid username characters in '$username'. Only letters, digits, and hyphens allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
*[!a-zA-Z0-9-]*)
|
||||
echo "::error::Invalid username characters in '$username'. Only letters, digits, and hyphens allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$username" == -* ]] || [[ "$username" == *- ]]; then
|
||||
echo "::error::Invalid username '$username'. Cannot start or end with hyphen"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
-*|*-)
|
||||
echo "::error::Invalid username '$username'. Cannot start or end with hyphen"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$username" == *--* ]]; then
|
||||
echo "::error::Invalid username '$username'. Consecutive hyphens not allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
*--*)
|
||||
echo "::error::Invalid username '$username'. Consecutive hyphens not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
echo "Input validation completed successfully"
|
||||
@@ -242,26 +267,79 @@ runs:
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Node Setup
|
||||
id: node-setup
|
||||
uses: ivuorinen/actions/node-setup@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
- name: Detect Package Manager
|
||||
id: detect-pm
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Detect package manager from lockfiles
|
||||
if [ -f bun.lockb ]; then
|
||||
package_manager="bun"
|
||||
elif [ -f pnpm-lock.yaml ]; then
|
||||
package_manager="pnpm"
|
||||
elif [ -f yarn.lock ]; then
|
||||
package_manager="yarn"
|
||||
else
|
||||
package_manager="npm"
|
||||
fi
|
||||
|
||||
printf 'package-manager=%s\n' "$package_manager" >> "$GITHUB_OUTPUT"
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
corepack enable
|
||||
|
||||
- name: Install Package Manager
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
pnpm)
|
||||
corepack prepare pnpm@latest --activate
|
||||
;;
|
||||
yarn)
|
||||
corepack prepare yarn@stable --activate
|
||||
;;
|
||||
bun|npm)
|
||||
# Bun installed separately, npm built-in
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Setup Bun
|
||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||
uses: oven-sh/setup-bun@b7a1c7ccf290d58743029c4f6903da283811b979 # v2.1.0
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
type: 'npm'
|
||||
paths: 'node_modules'
|
||||
key-files: 'package-lock.json,yarn.lock,pnpm-lock.yaml,bun.lockb'
|
||||
key-prefix: 'eslint-lint-${{ inputs.mode }}-${{ steps.node-setup.outputs.package-manager }}'
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-eslint-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-eslint-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-
|
||||
${{ runner.os }}-eslint-lint-${{ inputs.mode }}-
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Installing dependencies using $PACKAGE_MANAGER..."
|
||||
|
||||
@@ -289,10 +367,10 @@ runs:
|
||||
- name: Run ESLint Check
|
||||
if: inputs.mode == 'check'
|
||||
id: check
|
||||
shell: bash
|
||||
shell: sh
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
ESLINT_VERSION: ${{ inputs.eslint-version }}
|
||||
CONFIG_FILE: ${{ inputs.config-file }}
|
||||
CACHE: ${{ inputs.cache }}
|
||||
@@ -301,12 +379,25 @@ runs:
|
||||
REPORT_FORMAT: ${{ inputs.report-format }}
|
||||
FILE_EXTENSIONS: ${{ inputs.file-extensions }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Running ESLint check mode..."
|
||||
|
||||
# Build ESLint command
|
||||
eslint_cmd="npx eslint ."
|
||||
# Build ESLint command based on package manager
|
||||
case "$PACKAGE_MANAGER" in
|
||||
"pnpm")
|
||||
eslint_cmd="pnpm exec eslint . --ext $FILE_EXTENSIONS"
|
||||
;;
|
||||
"yarn")
|
||||
eslint_cmd="yarn eslint . --ext $FILE_EXTENSIONS"
|
||||
;;
|
||||
"bun")
|
||||
eslint_cmd="bunx eslint . --ext $FILE_EXTENSIONS"
|
||||
;;
|
||||
"npm"|*)
|
||||
eslint_cmd="npx eslint . --ext $FILE_EXTENSIONS"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Add config file if specified
|
||||
if [ "$CONFIG_FILE" != ".eslintrc" ] && [ -f "$CONFIG_FILE" ]; then
|
||||
@@ -366,19 +457,20 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: inputs.mode == 'check' && inputs.report-format == 'sarif' && always()
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ${{ inputs.working-directory }}/eslint-results.sarif
|
||||
|
||||
- name: Run ESLint Fix
|
||||
if: inputs.mode == 'fix'
|
||||
id: fix
|
||||
shell: bash
|
||||
shell: sh
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
FILE_EXTENSIONS: ${{ inputs.file-extensions }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Running ESLint fix mode..."
|
||||
|
||||
@@ -388,16 +480,16 @@ runs:
|
||||
# Run ESLint fix based on package manager
|
||||
case "$PACKAGE_MANAGER" in
|
||||
"pnpm")
|
||||
pnpm exec eslint . --fix || true
|
||||
pnpm exec eslint . --ext $FILE_EXTENSIONS --fix || true
|
||||
;;
|
||||
"yarn")
|
||||
yarn eslint . --fix || true
|
||||
yarn eslint . --ext $FILE_EXTENSIONS --fix || true
|
||||
;;
|
||||
"bun")
|
||||
bunx eslint . --fix || true
|
||||
bunx eslint . --ext $FILE_EXTENSIONS --fix || true
|
||||
;;
|
||||
"npm"|*)
|
||||
npx eslint . --fix || true
|
||||
npx eslint . --ext $FILE_EXTENSIONS --fix || true
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -416,7 +508,7 @@ runs:
|
||||
|
||||
- name: Commit and Push Fixes
|
||||
if: inputs.mode == 'fix' && success()
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: autofix ESLint violations'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
|
||||
@@ -44,7 +44,8 @@ conventions:
|
||||
token: github_token
|
||||
username: username
|
||||
working-directory: file_path
|
||||
overrides: {}
|
||||
overrides:
|
||||
mode: mode_enum
|
||||
statistics:
|
||||
total_inputs: 14
|
||||
validated_inputs: 14
|
||||
|
||||
@@ -8,7 +8,6 @@ const { markdownTable } = require('markdown-table');
|
||||
// Category mappings
|
||||
const CATEGORIES = {
|
||||
// Setup & Environment
|
||||
'node-setup': 'Setup',
|
||||
'language-version-detect': 'Setup',
|
||||
|
||||
// Utilities
|
||||
@@ -29,8 +28,6 @@ const CATEGORIES = {
|
||||
|
||||
// Testing & Quality
|
||||
'php-tests': 'Testing',
|
||||
'php-laravel-phpunit': 'Testing',
|
||||
'php-composer': 'Testing',
|
||||
|
||||
// Build & Package
|
||||
'csharp-build': 'Build',
|
||||
@@ -47,20 +44,19 @@ const CATEGORIES = {
|
||||
'sync-labels': 'Repository',
|
||||
stale: 'Repository',
|
||||
'compress-images': 'Repository',
|
||||
'common-cache': 'Repository',
|
||||
'codeql-analysis': 'Repository',
|
||||
|
||||
// Security
|
||||
'security-scan': 'Security',
|
||||
|
||||
// Validation
|
||||
'validate-inputs': 'Validation',
|
||||
};
|
||||
|
||||
// Language support mappings
|
||||
const LANGUAGE_SUPPORT = {
|
||||
'node-setup': ['Node.js', 'JavaScript', 'TypeScript'],
|
||||
'language-version-detect': ['PHP', 'Python', 'Go', '.NET', 'Node.js'],
|
||||
'php-tests': ['PHP'],
|
||||
'php-laravel-phpunit': ['PHP', 'Laravel'],
|
||||
'php-composer': ['PHP'],
|
||||
'php-tests': ['PHP', 'Laravel'],
|
||||
'python-lint-fix': ['Python'],
|
||||
'go-lint': ['Go'],
|
||||
'go-build': ['Go'],
|
||||
@@ -85,7 +81,6 @@ const LANGUAGE_SUPPORT = {
|
||||
'release-monthly': ['GitHub Actions'],
|
||||
stale: ['GitHub Actions'],
|
||||
'compress-images': ['Images', 'PNG', 'JPEG'],
|
||||
'common-cache': ['Caching'],
|
||||
};
|
||||
|
||||
// Icon mapping for GitHub branding
|
||||
@@ -128,6 +123,7 @@ const CATEGORY_ICONS = {
|
||||
Build: '🏗️',
|
||||
Publishing: '🚀',
|
||||
Repository: '📦',
|
||||
Security: '🛡️',
|
||||
Validation: '✅',
|
||||
};
|
||||
|
||||
@@ -240,7 +236,7 @@ function generateCategoryTables(actions) {
|
||||
let output = '';
|
||||
|
||||
// Sort categories by priority
|
||||
const categoryOrder = ['Setup', 'Utilities', 'Linting', 'Testing', 'Build', 'Publishing', 'Repository', 'Validation'];
|
||||
const categoryOrder = ['Setup', 'Utilities', 'Linting', 'Testing', 'Build', 'Publishing', 'Repository', 'Security', 'Validation'];
|
||||
|
||||
for (const category of categoryOrder) {
|
||||
if (!categories[category]) continue;
|
||||
|
||||
@@ -54,29 +54,118 @@ runs:
|
||||
|
||||
- name: Detect Go Version
|
||||
id: detect-go-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'go'
|
||||
default-version: "${{ inputs.go-version || '1.21' }}"
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: "${{ inputs.go-version || '1.24' }}"
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for golang..." >&2
|
||||
version=$(awk '/^golang[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in .tool-versions: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for golang..." >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "golang:" | head -1 | \
|
||||
sed -n -E "s/.*golang:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in Dockerfile: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for golang..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*golang:([0-9]+(\.[0-9]+)*)(-[^:]*)?. */\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in devcontainer: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .go-version file
|
||||
if [ -z "$detected_version" ] && [ -f .go-version ]; then
|
||||
echo "Checking .go-version..." >&2
|
||||
version=$(tr -d '\r' < .go-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in .go-version: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse go.mod
|
||||
if [ -z "$detected_version" ] && [ -f go.mod ]; then
|
||||
echo "Checking go.mod..." >&2
|
||||
version=$(grep -E '^go[[:space:]]+[0-9]' go.mod | awk '{print $2}' | head -1 || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Go version in go.mod: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default Go version: $detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected Go version: $detected_version" >&2
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
go-version: ${{ steps.detect-go-version.outputs.detected-version }}
|
||||
cache: true
|
||||
|
||||
- name: Cache Go Dependencies
|
||||
id: cache-go
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
type: 'go'
|
||||
paths: '~/go/pkg/mod'
|
||||
key-files: 'go.mod,go.sum'
|
||||
key-prefix: 'go-build'
|
||||
|
||||
- name: Download Dependencies
|
||||
if: steps.cache-go.outputs.cache-hit != 'true'
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
@@ -87,11 +176,11 @@ runs:
|
||||
|
||||
- name: Build Go Project
|
||||
id: build
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
DESTINATION: ${{ inputs.destination }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
echo "Building Go project..."
|
||||
|
||||
# Create destination directory
|
||||
@@ -126,9 +215,9 @@ runs:
|
||||
|
||||
- name: Run Tests
|
||||
id: test
|
||||
shell: bash
|
||||
shell: sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
echo "Running Go tests..."
|
||||
if find . -name "*_test.go" | grep -q .; then
|
||||
# Check if race detector is supported on this platform
|
||||
@@ -164,7 +253,7 @@ runs:
|
||||
|
||||
- name: Upload Build Artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: go-build-artifacts
|
||||
path: |
|
||||
|
||||
@@ -37,105 +37,78 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate working-directory if provided
|
||||
if inputs.get("working-directory"):
|
||||
result = self.file_validator.validate_file_path(
|
||||
inputs["working-directory"], "working-directory"
|
||||
valid &= self.validate_with(
|
||||
self.file_validator,
|
||||
"validate_file_path",
|
||||
inputs["working-directory"],
|
||||
"working-directory",
|
||||
)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate golangci-lint-version if provided
|
||||
if inputs.get("golangci-lint-version"):
|
||||
value = inputs["golangci-lint-version"]
|
||||
# Accept 'latest' or version format
|
||||
if value != "latest" and not self.is_github_expression(value):
|
||||
result = self.version_validator.validate_semantic_version(
|
||||
value, "golangci-lint-version"
|
||||
valid &= self.validate_with(
|
||||
self.version_validator,
|
||||
"validate_semantic_version",
|
||||
value,
|
||||
"golangci-lint-version",
|
||||
)
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.version_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate go-version if provided
|
||||
if inputs.get("go-version"):
|
||||
value = inputs["go-version"]
|
||||
# Accept 'stable', 'oldstable' or version format
|
||||
if value not in ["stable", "oldstable"] and not self.is_github_expression(value):
|
||||
result = self.version_validator.validate_go_version(value, "go-version")
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.version_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.version_validator, "validate_go_version", value, "go-version"
|
||||
)
|
||||
|
||||
# Validate config-file if provided
|
||||
if inputs.get("config-file"):
|
||||
result = self.file_validator.validate_file_path(inputs["config-file"], "config-file")
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.file_validator, "validate_file_path", inputs["config-file"], "config-file"
|
||||
)
|
||||
|
||||
# Validate timeout if provided
|
||||
if inputs.get("timeout"):
|
||||
value = inputs["timeout"]
|
||||
# Validate timeout format (e.g., 5m, 1h, 30s)
|
||||
if not self.is_github_expression(value):
|
||||
timeout_pattern = r"^\d+[smh]$"
|
||||
if not re.match(timeout_pattern, value):
|
||||
self.add_error(
|
||||
f"Invalid timeout format: {value}. Expected format like '5m', '1h', '30s'"
|
||||
)
|
||||
valid = False
|
||||
if not self.is_github_expression(value) and not re.match(r"^\d+[smh]$", value):
|
||||
self.add_error(
|
||||
f"Invalid timeout format: {value}. Expected format like '5m', '1h', '30s'"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Validate boolean inputs
|
||||
for field in ["cache", "fail-on-error", "only-new-issues", "disable-all"]:
|
||||
if inputs.get(field):
|
||||
result = self.boolean_validator.validate_boolean(inputs[field], field)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", inputs[field], field
|
||||
)
|
||||
|
||||
# Validate report-format
|
||||
if inputs.get("report-format"):
|
||||
value = inputs["report-format"]
|
||||
valid_formats = ["json", "sarif", "github-actions", "colored-line-number", "tab"]
|
||||
if value not in valid_formats and not self.is_github_expression(value):
|
||||
self.add_error(
|
||||
f"Invalid report format: {value}. Must be one of: {', '.join(valid_formats)}"
|
||||
)
|
||||
valid = False
|
||||
valid &= self.validate_enum(
|
||||
inputs["report-format"],
|
||||
"report-format",
|
||||
["json", "sarif", "github-actions", "colored-line-number", "tab"],
|
||||
case_sensitive=True,
|
||||
)
|
||||
|
||||
# Validate max-retries
|
||||
if inputs.get("max-retries"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["max-retries"], min_val=1, max_val=10, name="max-retries"
|
||||
valid &= self.validate_with(
|
||||
self.numeric_validator,
|
||||
"validate_numeric_range",
|
||||
inputs["max-retries"],
|
||||
min_val=1,
|
||||
max_val=10,
|
||||
name="max-retries",
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate enable-linters and disable-linters
|
||||
for field in ["enable-linters", "disable-linters"]:
|
||||
if inputs.get(field):
|
||||
value = inputs[field]
|
||||
|
||||
# First check format - must be comma-separated without spaces
|
||||
if not self.is_github_expression(value):
|
||||
if " " in value:
|
||||
self.add_error(f"Invalid {field} format: spaces not allowed in linter list")
|
||||
@@ -145,15 +118,9 @@ class CustomValidator(BaseValidator):
|
||||
f"Invalid {field} format: must be comma-separated list of linters"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Then check for injection
|
||||
result = self.security_validator.validate_no_injection(value, field)
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.security_validator, "validate_no_injection", value, field
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -205,7 +205,7 @@ runs:
|
||||
validate_linter_list "$DISABLE_LINTERS" "disable-linters"
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
go-version: ${{ inputs.go-version }}
|
||||
cache: true
|
||||
@@ -215,16 +215,17 @@ runs:
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Set up Cache
|
||||
- name: Cache golangci-lint
|
||||
id: cache
|
||||
if: inputs.cache == 'true'
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
type: 'go'
|
||||
paths: '~/.cache/golangci-lint,~/.cache/go-build'
|
||||
key-prefix: 'golangci-${{ inputs.golangci-lint-version }}'
|
||||
key-files: 'go.sum,${{ inputs.config-file }}'
|
||||
restore-keys: '${{ runner.os }}-golangci-${{ inputs.golangci-lint-version }}-'
|
||||
path: |
|
||||
~/.cache/golangci-lint
|
||||
~/.cache/go-build
|
||||
key: ${{ runner.os }}-golangci-${{ inputs.golangci-lint-version }}-${{ hashFiles('go.sum', inputs.config-file) }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-golangci-${{ inputs.golangci-lint-version }}-
|
||||
|
||||
- name: Install golangci-lint
|
||||
shell: sh
|
||||
@@ -413,7 +414,7 @@ runs:
|
||||
|
||||
- name: Upload Lint Results
|
||||
if: always() && inputs.report-format == 'sarif'
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ${{ inputs.working-directory }}/reports/golangci-lint.sarif
|
||||
category: golangci-lint
|
||||
|
||||
@@ -36,15 +36,17 @@ conventions:
|
||||
disable-linters: linter_list
|
||||
enable-linters: linter_list
|
||||
fail-on-error: boolean
|
||||
go-version: semantic_version
|
||||
go-version: go_version
|
||||
golangci-lint-version: semantic_version
|
||||
max-retries: numeric_range_1_10
|
||||
only-new-issues: branch_name
|
||||
only-new-issues: boolean
|
||||
report-format: report_format
|
||||
timeout: numeric_range_1_3600
|
||||
timeout: timeout_with_unit
|
||||
token: github_token
|
||||
working-directory: file_path
|
||||
overrides:
|
||||
disable-linters: linter_list
|
||||
enable-linters: linter_list
|
||||
go-version: go_version
|
||||
only-new-issues: boolean
|
||||
timeout: timeout_with_unit
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
### Description
|
||||
|
||||
Detects language version from project configuration files with support for PHP, Python, Go, and .NET.
|
||||
DEPRECATED: This action is deprecated. Inline version detection directly in your actions instead. Detects language version from project configuration files with support for PHP, Python, Go, and .NET.
|
||||
|
||||
### Inputs
|
||||
|
||||
@@ -28,7 +28,7 @@ This action is a `composite` action.
|
||||
### Usage
|
||||
|
||||
```yaml
|
||||
- uses: ivuorinen/actions/language-version-detect@v2025
|
||||
- uses: ivuorinen/actions/language-version-detect@main
|
||||
with:
|
||||
language:
|
||||
# Language to detect version for (php, python, go, dotnet)
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
# - contents: read # Required for reading version files
|
||||
---
|
||||
name: Language Version Detect
|
||||
description: 'Detects language version from project configuration files with support for PHP, Python, Go, and .NET.'
|
||||
description: 'DEPRECATED: This action is deprecated. Inline version detection directly in your actions instead. Detects language version from project configuration files with support for PHP, Python, Go, and .NET.'
|
||||
author: 'Ismo Vuorinen'
|
||||
deprecated: true
|
||||
|
||||
branding:
|
||||
icon: code
|
||||
@@ -80,7 +81,7 @@ runs:
|
||||
php)
|
||||
# Validate PHP version format (X.Y or X.Y.Z)
|
||||
case "$version" in
|
||||
[0-9]*.[0-9]* | [0-9]*.[0-9]*.[0-9]*)
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid PHP version format: '$version'. Expected format: X.Y or X.Y.Z (e.g., 8.4, 8.3.1)"
|
||||
@@ -108,7 +109,7 @@ runs:
|
||||
python)
|
||||
# Validate Python version format
|
||||
case "$version" in
|
||||
[0-9]*.[0-9]* | [0-9]*.[0-9]*.[0-9]*)
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid Python version format: '$version'. Expected format: X.Y or X.Y.Z (e.g., 3.12, 3.11.5)"
|
||||
@@ -134,7 +135,7 @@ runs:
|
||||
go)
|
||||
# Validate Go version format
|
||||
case "$version" in
|
||||
[0-9]*.[0-9]* | [0-9]*.[0-9]*.[0-9]*)
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid Go version format: '$version'. Expected format: X.Y or X.Y.Z (e.g., 1.21, 1.21.5)"
|
||||
@@ -160,7 +161,7 @@ runs:
|
||||
dotnet)
|
||||
# Validate .NET version format
|
||||
case "$version" in
|
||||
[0-9]* | [0-9]*.[0-9]* | [0-9]*.[0-9]*.[0-9]*)
|
||||
[0-9]* | [0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid .NET version format: '$version'. Expected format: X, X.Y, or X.Y.Z (e.g., 7, 7.0, 7.0.1)"
|
||||
@@ -186,11 +187,203 @@ runs:
|
||||
|
||||
- name: Parse Language Version
|
||||
id: parse-version
|
||||
uses: ivuorinen/actions/version-file-parser@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: ${{ inputs.language }}
|
||||
tool-versions-key: ${{ inputs.language == 'go' && 'golang' || inputs.language }}
|
||||
dockerfile-image: ${{ inputs.language == 'go' && 'golang' || inputs.language }}
|
||||
version-file: ${{ inputs.language == 'php' && '.php-version' || inputs.language == 'python' && '.python-version' || inputs.language == 'go' && '.go-version' || '' }}
|
||||
validation-regex: '^[0-9]+(\.[0-9]+(\.[0-9]+)?)?$'
|
||||
default-version: ${{ steps.validate.outputs.default_version || inputs.default-version }}
|
||||
shell: sh
|
||||
env:
|
||||
LANGUAGE: ${{ inputs.language }}
|
||||
DEFAULT_VERSION: ${{ steps.validate.outputs.default_version || inputs.default-version }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Map language to tool-versions key and dockerfile image
|
||||
case "$LANGUAGE" in
|
||||
go)
|
||||
TOOL_VERSIONS_KEY="golang"
|
||||
DOCKERFILE_IMAGE="golang"
|
||||
VERSION_FILE=".go-version"
|
||||
;;
|
||||
php)
|
||||
TOOL_VERSIONS_KEY="php"
|
||||
DOCKERFILE_IMAGE="php"
|
||||
VERSION_FILE=".php-version"
|
||||
;;
|
||||
python)
|
||||
TOOL_VERSIONS_KEY="python"
|
||||
DOCKERFILE_IMAGE="python"
|
||||
VERSION_FILE=".python-version"
|
||||
;;
|
||||
dotnet)
|
||||
TOOL_VERSIONS_KEY="dotnet"
|
||||
DOCKERFILE_IMAGE="dotnet"
|
||||
VERSION_FILE=""
|
||||
;;
|
||||
esac
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
# Use case pattern matching for POSIX compatibility
|
||||
case "$version" in
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
# Initialize outputs
|
||||
printf 'detected-version=\n' >> "$GITHUB_OUTPUT"
|
||||
printf 'package-manager=\n' >> "$GITHUB_OUTPUT"
|
||||
|
||||
detected_version=""
|
||||
detected_package_manager=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for $TOOL_VERSIONS_KEY..." >&2
|
||||
version=$(awk "/^$TOOL_VERSIONS_KEY[[:space:]]/ {gsub(/#.*/, \"\"); print \$2; exit}" .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found $LANGUAGE version in .tool-versions: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for $DOCKERFILE_IMAGE..." >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "$DOCKERFILE_IMAGE:" | head -1 | \
|
||||
sed -n "s/.*$DOCKERFILE_IMAGE:\([0-9]\+\(\.[0-9]\+\)*\)\(-[^:]*\)\?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found $LANGUAGE version in Dockerfile: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for $DOCKERFILE_IMAGE..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n "s/.*$DOCKERFILE_IMAGE:\([0-9]\+\(\.[0-9]\+\)*\)\(-[^:]*\)\?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found $LANGUAGE version in devcontainer: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse language-specific version file
|
||||
if [ -z "$detected_version" ] && [ -n "$VERSION_FILE" ] && [ -f "$VERSION_FILE" ]; then
|
||||
echo "Checking $VERSION_FILE..." >&2
|
||||
version=$(tr -d '\r' < "$VERSION_FILE" | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found $LANGUAGE version in $VERSION_FILE: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse language-specific configuration files
|
||||
if [ -z "$detected_version" ]; then
|
||||
case "$LANGUAGE" in
|
||||
php)
|
||||
# Check composer.json
|
||||
if [ -f composer.json ] && command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.require.php // empty' composer.json 2>/dev/null | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p')
|
||||
if [ -z "$version" ]; then
|
||||
version=$(jq -r '.config.platform.php // empty' composer.json 2>/dev/null | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p')
|
||||
fi
|
||||
if [ -n "$version" ] && validate_version "$version"; then
|
||||
echo "Found PHP version in composer.json: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
# Detect package manager
|
||||
if [ -f composer.json ]; then
|
||||
detected_package_manager="composer"
|
||||
fi
|
||||
;;
|
||||
|
||||
python)
|
||||
# Check pyproject.toml
|
||||
if [ -f pyproject.toml ]; then
|
||||
if grep -q '^\[project\]' pyproject.toml; then
|
||||
version=$(grep -A 20 '^\[project\]' pyproject.toml | grep -E '^\s*requires-python[[:space:]]*=' | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p' | head -1)
|
||||
if [ -n "$version" ] && validate_version "$version"; then
|
||||
echo "Found Python version in pyproject.toml: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
# Detect package manager
|
||||
if [ -f pyproject.toml ] && grep -q '\[tool\.poetry\]' pyproject.toml; then
|
||||
detected_package_manager="poetry"
|
||||
elif [ -f Pipfile ]; then
|
||||
detected_package_manager="pipenv"
|
||||
else
|
||||
detected_package_manager="pip"
|
||||
fi
|
||||
;;
|
||||
|
||||
go)
|
||||
# Check go.mod
|
||||
if [ -f go.mod ]; then
|
||||
version=$(grep -E '^go[[:space:]]+[0-9]' go.mod | awk '{print $2}' | head -1 || echo "")
|
||||
if [ -n "$version" ] && validate_version "$version"; then
|
||||
echo "Found Go version in go.mod: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
detected_package_manager="go"
|
||||
fi
|
||||
;;
|
||||
|
||||
dotnet)
|
||||
# Check global.json
|
||||
if [ -f global.json ] && command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.sdk.version // empty' global.json 2>/dev/null || echo "")
|
||||
if [ -n "$version" ] && validate_version "$version"; then
|
||||
echo "Found .NET version in global.json: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
detected_package_manager="dotnet"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
if [ -n "$DEFAULT_VERSION" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default $LANGUAGE version: $detected_version" >&2
|
||||
else
|
||||
echo "No $LANGUAGE version detected and no default provided" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Set outputs
|
||||
if [ -n "$detected_version" ]; then
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected $LANGUAGE version: $detected_version" >&2
|
||||
fi
|
||||
|
||||
if [ -n "$detected_package_manager" ]; then
|
||||
printf 'package-manager=%s\n' "$detected_package_manager" >> "$GITHUB_OUTPUT"
|
||||
echo "Detected package manager: $detected_package_manager" >&2
|
||||
fi
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for language-version-detect action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 67% (2/3 inputs)
|
||||
# Coverage: 100% (3/3 inputs)
|
||||
#
|
||||
# This file defines validation rules for the language-version-detect GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -11,7 +11,8 @@
|
||||
|
||||
schema_version: '1.0'
|
||||
action: language-version-detect
|
||||
description: Detects language version from project configuration files with support for PHP, Python, Go, and .NET.
|
||||
description: 'DEPRECATED: This action is deprecated. Inline version detection directly in your actions instead. Detects language
|
||||
version from project configuration files with support for PHP, Python, Go, and .NET.'
|
||||
generator_version: 1.0.0
|
||||
required_inputs:
|
||||
- language
|
||||
@@ -20,16 +21,17 @@ optional_inputs:
|
||||
- token
|
||||
conventions:
|
||||
default-version: semantic_version
|
||||
language: language_enum
|
||||
token: github_token
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 3
|
||||
validated_inputs: 2
|
||||
validated_inputs: 3
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 67
|
||||
validation_coverage: 67
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: true
|
||||
has_token_validation: true
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Custom validator for node-setup action."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
# Add validate-inputs directory to path to import validators
|
||||
validate_inputs_path = Path(__file__).parent.parent / "validate-inputs"
|
||||
sys.path.insert(0, str(validate_inputs_path))
|
||||
|
||||
from validators.base import BaseValidator
|
||||
from validators.version import VersionValidator
|
||||
|
||||
|
||||
class CustomValidator(BaseValidator):
|
||||
"""Custom validator for node-setup action."""
|
||||
|
||||
def __init__(self, action_type: str = "node-setup") -> None:
|
||||
"""Initialize node-setup validator."""
|
||||
super().__init__(action_type)
|
||||
self.version_validator = VersionValidator()
|
||||
|
||||
def validate_inputs(self, inputs: dict[str, str]) -> bool:
|
||||
"""Validate node-setup action inputs."""
|
||||
valid = True
|
||||
|
||||
# Validate default-version if provided
|
||||
if "default-version" in inputs:
|
||||
value = inputs["default-version"]
|
||||
|
||||
# Empty string should fail validation
|
||||
if value == "":
|
||||
self.add_error("Node version cannot be empty")
|
||||
valid = False
|
||||
elif value:
|
||||
# Use the Node version validator
|
||||
result = self.version_validator.validate_node_version(value, "default-version")
|
||||
|
||||
# Propagate errors from the version validator
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
|
||||
# Clear the version validator's errors after propagating
|
||||
self.version_validator.clear_errors()
|
||||
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate package-manager if provided
|
||||
if "package-manager" in inputs:
|
||||
value = inputs["package-manager"]
|
||||
if value and value not in ["npm", "yarn", "pnpm", "bun"]:
|
||||
self.add_error(
|
||||
f"Invalid package manager: {value}. Must be one of: npm, yarn, pnpm, bun"
|
||||
)
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
def get_required_inputs(self) -> list[str]:
|
||||
"""Get list of required inputs."""
|
||||
return []
|
||||
|
||||
def get_validation_rules(self) -> dict:
|
||||
"""Get validation rules."""
|
||||
return {
|
||||
"default-version": {
|
||||
"type": "node_version",
|
||||
"required": False,
|
||||
"description": "Default Node.js version to use",
|
||||
},
|
||||
"package-manager": {
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": "Package manager to use",
|
||||
},
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
# ivuorinen/actions/node-setup
|
||||
|
||||
## Node Setup
|
||||
|
||||
### Description
|
||||
|
||||
Sets up Node.js environment with version detection and package manager configuration.
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|-------------------|--------------------------------------------------------------------------|----------|------------------------------|
|
||||
| `default-version` | <p>Default Node.js version to use if no configuration file is found.</p> | `false` | `22` |
|
||||
| `package-manager` | <p>Node.js package manager to use (npm, yarn, pnpm, bun, auto)</p> | `false` | `auto` |
|
||||
| `registry-url` | <p>Custom NPM registry URL</p> | `false` | `https://registry.npmjs.org` |
|
||||
| `token` | <p>Auth token for private registry</p> | `false` | `""` |
|
||||
| `node-mirror` | <p>Custom Node.js binary mirror</p> | `false` | `""` |
|
||||
| `force-version` | <p>Force specific Node.js version regardless of config files</p> | `false` | `""` |
|
||||
|
||||
### Outputs
|
||||
|
||||
| name | description |
|
||||
|-------------------|-------------------------------------|
|
||||
| `node-version` | <p>Installed Node.js version</p> |
|
||||
| `package-manager` | <p>Selected package manager</p> |
|
||||
| `node-path` | <p>Path to Node.js installation</p> |
|
||||
|
||||
### Runs
|
||||
|
||||
This action is a `composite` action.
|
||||
|
||||
### Usage
|
||||
|
||||
```yaml
|
||||
- uses: ivuorinen/actions/node-setup@main
|
||||
with:
|
||||
default-version:
|
||||
# Default Node.js version to use if no configuration file is found.
|
||||
#
|
||||
# Required: false
|
||||
# Default: 22
|
||||
|
||||
package-manager:
|
||||
# Node.js package manager to use (npm, yarn, pnpm, bun, auto)
|
||||
#
|
||||
# Required: false
|
||||
# Default: auto
|
||||
|
||||
registry-url:
|
||||
# Custom NPM registry URL
|
||||
#
|
||||
# Required: false
|
||||
# Default: https://registry.npmjs.org
|
||||
|
||||
token:
|
||||
# Auth token for private registry
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
node-mirror:
|
||||
# Custom Node.js binary mirror
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
force-version:
|
||||
# Force specific Node.js version regardless of config files
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
```
|
||||
@@ -1,242 +0,0 @@
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-action.json
|
||||
# permissions:
|
||||
# - (none required) # Setup action, no repository writes
|
||||
---
|
||||
name: Node Setup
|
||||
description: 'Sets up Node.js environment with version detection and package manager configuration.'
|
||||
author: 'Ismo Vuorinen'
|
||||
|
||||
branding:
|
||||
icon: server
|
||||
color: green
|
||||
|
||||
inputs:
|
||||
default-version:
|
||||
description: 'Default Node.js version to use if no configuration file is found.'
|
||||
required: false
|
||||
default: '22'
|
||||
package-manager:
|
||||
description: 'Node.js package manager to use (npm, yarn, pnpm, bun, auto)'
|
||||
required: false
|
||||
default: 'auto'
|
||||
registry-url:
|
||||
description: 'Custom NPM registry URL'
|
||||
required: false
|
||||
default: 'https://registry.npmjs.org'
|
||||
token:
|
||||
description: 'Auth token for private registry'
|
||||
required: false
|
||||
node-mirror:
|
||||
description: 'Custom Node.js binary mirror'
|
||||
required: false
|
||||
force-version:
|
||||
description: 'Force specific Node.js version regardless of config files'
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
node-version:
|
||||
description: 'Installed Node.js version'
|
||||
value: ${{ steps.setup.outputs.node-version }}
|
||||
package-manager:
|
||||
description: 'Selected package manager'
|
||||
value: ${{ steps.package-manager-resolution.outputs.final-package-manager }}
|
||||
node-path:
|
||||
description: 'Path to Node.js installation'
|
||||
value: ${{ steps.final-outputs.outputs.node-path }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: bash
|
||||
env:
|
||||
DEFAULT_VERSION: ${{ inputs.default-version }}
|
||||
FORCE_VERSION: ${{ inputs.force-version }}
|
||||
PACKAGE_MANAGER: ${{ inputs.package-manager }}
|
||||
REGISTRY_URL: ${{ inputs.registry-url }}
|
||||
NODE_MIRROR: ${{ inputs.node-mirror }}
|
||||
AUTH_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Validate default-version format
|
||||
if [[ -n "$DEFAULT_VERSION" ]]; then
|
||||
if ! [[ "$DEFAULT_VERSION" =~ ^[0-9]+(\.[0-9]+(\.[0-9]+)?)?$ ]]; then
|
||||
echo "::error::Invalid default-version format: '$DEFAULT_VERSION'. Expected format: X or X.Y or X.Y.Z (e.g., 22, 20.9, 18.17.1)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for reasonable version range (prevent malicious inputs)
|
||||
major_version=$(echo "$DEFAULT_VERSION" | cut -d'.' -f1)
|
||||
if [ "$major_version" -lt 14 ] || [ "$major_version" -gt 30 ]; then
|
||||
echo "::error::Invalid default-version: '$DEFAULT_VERSION'. Node.js major version should be between 14 and 30"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate force-version format if provided
|
||||
if [[ -n "$FORCE_VERSION" ]]; then
|
||||
if ! [[ "$FORCE_VERSION" =~ ^[0-9]+(\.[0-9]+(\.[0-9]+)?)?$ ]]; then
|
||||
echo "::error::Invalid force-version format: '$FORCE_VERSION'. Expected format: X or X.Y or X.Y.Z (e.g., 22, 20.9, 18.17.1)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for reasonable version range
|
||||
major_version=$(echo "$FORCE_VERSION" | cut -d'.' -f1)
|
||||
if [ "$major_version" -lt 14 ] || [ "$major_version" -gt 30 ]; then
|
||||
echo "::error::Invalid force-version: '$FORCE_VERSION'. Node.js major version should be between 14 and 30"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate package-manager
|
||||
case "$PACKAGE_MANAGER" in
|
||||
"npm"|"yarn"|"pnpm"|"bun"|"auto")
|
||||
# Valid package managers
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid package-manager: '$PACKAGE_MANAGER'. Must be one of: npm, yarn, pnpm, bun, auto"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate registry-url format (basic URL validation)
|
||||
if [[ "$REGISTRY_URL" != "https://"* ]] && [[ "$REGISTRY_URL" != "http://"* ]]; then
|
||||
echo "::error::Invalid registry-url: '$REGISTRY_URL'. Must be a valid HTTP/HTTPS URL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate node-mirror format if provided
|
||||
if [[ -n "$NODE_MIRROR" ]]; then
|
||||
if [[ "$NODE_MIRROR" != "https://"* ]] && [[ "$NODE_MIRROR" != "http://"* ]]; then
|
||||
echo "::error::Invalid node-mirror: '$NODE_MIRROR'. Must be a valid HTTP/HTTPS URL"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate auth token format if provided (basic check for NPM tokens)
|
||||
if [[ -n "$AUTH_TOKEN" ]]; then
|
||||
if [[ "$AUTH_TOKEN" == *";"* ]] || [[ "$AUTH_TOKEN" == *"&&"* ]] || [[ "$AUTH_TOKEN" == *"|"* ]]; then
|
||||
echo "::error::Invalid token format: command injection patterns not allowed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Input validation completed successfully"
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Parse Node.js Version
|
||||
id: version
|
||||
uses: ivuorinen/actions/version-file-parser@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'node'
|
||||
tool-versions-key: 'nodejs'
|
||||
dockerfile-image: 'node'
|
||||
version-file: '.nvmrc'
|
||||
validation-regex: '^[0-9]+(\.[0-9]+)*$'
|
||||
default-version: ${{ inputs.force-version != '' && inputs.force-version || inputs.default-version }}
|
||||
|
||||
- name: Resolve Package Manager
|
||||
id: package-manager-resolution
|
||||
shell: bash
|
||||
env:
|
||||
INPUT_PM: ${{ inputs.package-manager }}
|
||||
DETECTED_PM: ${{ steps.version.outputs.package-manager }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
input_pm="$INPUT_PM"
|
||||
detected_pm="$DETECTED_PM"
|
||||
final_pm=""
|
||||
|
||||
if [ "$input_pm" = "auto" ]; then
|
||||
if [ -n "$detected_pm" ]; then
|
||||
final_pm="$detected_pm"
|
||||
echo "Auto-detected package manager: $final_pm"
|
||||
else
|
||||
final_pm="npm"
|
||||
echo "No package manager detected, using default: $final_pm"
|
||||
fi
|
||||
else
|
||||
final_pm="$input_pm"
|
||||
echo "Using specified package manager: $final_pm"
|
||||
fi
|
||||
|
||||
echo "final-package-manager=$final_pm" >> $GITHUB_OUTPUT
|
||||
echo "Final package manager: $final_pm"
|
||||
|
||||
- name: Setup Node.js
|
||||
id: setup
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: ${{ steps.version.outputs.detected-version }}
|
||||
registry-url: ${{ inputs.registry-url }}
|
||||
|
||||
- name: Enable Corepack
|
||||
id: corepack
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Enabling Corepack for package manager management..."
|
||||
corepack enable
|
||||
echo "✅ Corepack enabled successfully"
|
||||
|
||||
- name: Set Auth Token
|
||||
if: inputs.token != ''
|
||||
shell: bash
|
||||
env:
|
||||
TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
# Sanitize token by removing newlines to prevent env var injection
|
||||
sanitized_token="$(echo "$TOKEN" | tr -d '\n\r')"
|
||||
printf 'NODE_AUTH_TOKEN=%s\n' "$sanitized_token" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Setup Package Manager
|
||||
shell: bash
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.package-manager-resolution.outputs.final-package-manager }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
package_manager="$PACKAGE_MANAGER"
|
||||
echo "Setting up package manager: $package_manager"
|
||||
|
||||
case "$package_manager" in
|
||||
"pnpm")
|
||||
echo "Installing PNPM via Corepack..."
|
||||
corepack prepare pnpm@latest --activate
|
||||
echo "✅ PNPM installed successfully"
|
||||
;;
|
||||
"yarn")
|
||||
echo "Installing Yarn via Corepack..."
|
||||
corepack prepare yarn@stable --activate
|
||||
echo "✅ Yarn installed successfully"
|
||||
;;
|
||||
"bun")
|
||||
# Bun installation handled by separate step below
|
||||
echo "Bun will be installed via official setup-bun action"
|
||||
;;
|
||||
"npm")
|
||||
echo "Using built-in NPM"
|
||||
;;
|
||||
*)
|
||||
echo "::warning::Unknown package manager: $package_manager, using NPM"
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Setup Bun
|
||||
if: steps.package-manager-resolution.outputs.final-package-manager == 'bun'
|
||||
uses: oven-sh/setup-bun@735343b667d3e6f658f44d0eca948eb6282f2b76 # v2.0.2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Set Final Outputs
|
||||
id: final-outputs
|
||||
shell: bash
|
||||
run: |
|
||||
echo "node-path=$(which node)" >> $GITHUB_OUTPUT
|
||||
@@ -1,45 +0,0 @@
|
||||
---
|
||||
# Validation rules for node-setup action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 83% (5/6 inputs)
|
||||
#
|
||||
# This file defines validation rules for the node-setup GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
# action is used.
|
||||
#
|
||||
|
||||
schema_version: '1.0'
|
||||
action: node-setup
|
||||
description: Sets up Node.js environment with version detection and package manager configuration.
|
||||
generator_version: 1.0.0
|
||||
required_inputs: []
|
||||
optional_inputs:
|
||||
- default-version
|
||||
- force-version
|
||||
- node-mirror
|
||||
- package-manager
|
||||
- registry-url
|
||||
- token
|
||||
conventions:
|
||||
default-version: semantic_version
|
||||
force-version: semantic_version
|
||||
package-manager: boolean
|
||||
registry-url: url
|
||||
token: github_token
|
||||
overrides:
|
||||
package-manager: package_manager_enum
|
||||
statistics:
|
||||
total_inputs: 6
|
||||
validated_inputs: 5
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 83
|
||||
validation_coverage: 83
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: false
|
||||
has_token_validation: true
|
||||
has_version_validation: true
|
||||
has_file_validation: false
|
||||
has_security_validation: true
|
||||
@@ -42,109 +42,40 @@ class CustomValidator(BaseValidator):
|
||||
self.add_error("Input 'npm_token' is required")
|
||||
valid = False
|
||||
elif inputs["npm_token"]:
|
||||
token = inputs["npm_token"]
|
||||
# Check for NPM classic token format first
|
||||
if token.startswith("npm_"):
|
||||
# NPM classic token format: npm_ followed by 36+ alphanumeric characters
|
||||
if not re.match(r"^npm_[a-zA-Z0-9]{36,}$", token):
|
||||
self.add_error("Invalid NPM token format")
|
||||
valid = False
|
||||
# Also check for injection
|
||||
result = self.security_validator.validate_no_injection(token, "npm_token")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
else:
|
||||
# Otherwise validate as GitHub token
|
||||
result = self.token_validator.validate_github_token(token, required=True)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self._validate_npm_token(inputs["npm_token"])
|
||||
|
||||
# Validate registry-url
|
||||
if inputs.get("registry-url"):
|
||||
url = inputs["registry-url"]
|
||||
if not self.is_github_expression(url):
|
||||
# Must be http or https URL
|
||||
if not url.startswith(("http://", "https://")):
|
||||
self.add_error("Registry URL must use http or https protocol")
|
||||
valid = False
|
||||
else:
|
||||
# Validate URL format
|
||||
result = self.network_validator.validate_url(url, "registry-url")
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self._validate_registry_url(inputs["registry-url"])
|
||||
|
||||
# Validate scope
|
||||
if inputs.get("scope"):
|
||||
scope = inputs["scope"]
|
||||
if not self.is_github_expression(scope):
|
||||
# Scope must start with @ and contain only valid characters
|
||||
if not scope.startswith("@"):
|
||||
self.add_error("Scope must start with @ symbol")
|
||||
valid = False
|
||||
elif not re.match(r"^@[a-z0-9][a-z0-9\-_.]*$", scope):
|
||||
self.add_error(
|
||||
"Invalid scope format: must be @org-name with lowercase "
|
||||
"letters, numbers, hyphens, dots, and underscores"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Check for injection
|
||||
result = self.security_validator.validate_no_injection(scope, "scope")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self._validate_scope(inputs["scope"])
|
||||
|
||||
# Validate access
|
||||
if inputs.get("access"):
|
||||
access = inputs["access"]
|
||||
if not self.is_github_expression(access):
|
||||
valid_access = ["public", "restricted", "private"]
|
||||
if access and access not in valid_access:
|
||||
self.add_error(
|
||||
f"Invalid access level: {access}. Must be one of: {', '.join(valid_access)}"
|
||||
)
|
||||
valid = False
|
||||
valid &= self.validate_enum(
|
||||
inputs["access"], "access", ["public", "restricted", "private"]
|
||||
)
|
||||
|
||||
# Validate boolean inputs (only always-auth and include-merged-tags are strict)
|
||||
for field in ["always-auth", "include-merged-tags"]:
|
||||
if inputs.get(field):
|
||||
result = self.boolean_validator.validate_boolean(inputs[field], field)
|
||||
for error in self.boolean_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.boolean_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.boolean_validator, "validate_boolean", inputs[field], field
|
||||
)
|
||||
|
||||
# provenance and dry-run accept any value (npm handles them)
|
||||
# No validation needed for these
|
||||
|
||||
# Validate package-version
|
||||
if inputs.get("package-version"):
|
||||
result = self.version_validator.validate_semantic_version(
|
||||
inputs["package-version"], "package-version"
|
||||
valid &= self.validate_with(
|
||||
self.version_validator,
|
||||
"validate_semantic_version",
|
||||
inputs["package-version"],
|
||||
"package-version",
|
||||
)
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.version_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate tag
|
||||
if inputs.get("tag"):
|
||||
@@ -161,16 +92,57 @@ class CustomValidator(BaseValidator):
|
||||
# Validate working-directory and ignore-scripts as file paths
|
||||
for field in ["working-directory", "ignore-scripts"]:
|
||||
if inputs.get(field):
|
||||
result = self.file_validator.validate_path(inputs[field], field)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.file_validator, "validate_path", inputs[field], field
|
||||
)
|
||||
|
||||
return valid
|
||||
|
||||
def _validate_npm_token(self, token: str) -> bool:
|
||||
"""Validate NPM token format."""
|
||||
# Check for NPM classic token format first
|
||||
if token.startswith("npm_"):
|
||||
# NPM classic token format: npm_ followed by 36+ alphanumeric characters
|
||||
if not re.match(r"^npm_[a-zA-Z0-9]{36,}$", token):
|
||||
self.add_error("Invalid NPM token format")
|
||||
return False
|
||||
# Also check for injection
|
||||
return self.validate_with(
|
||||
self.security_validator, "validate_no_injection", token, "npm_token"
|
||||
)
|
||||
# Otherwise validate as GitHub token
|
||||
return self.validate_with(
|
||||
self.token_validator, "validate_github_token", token, required=True
|
||||
)
|
||||
|
||||
def _validate_registry_url(self, url: str) -> bool:
|
||||
"""Validate registry URL format."""
|
||||
if self.is_github_expression(url):
|
||||
return True
|
||||
# Must be http or https URL
|
||||
if not url.startswith(("http://", "https://")):
|
||||
self.add_error("Registry URL must use http or https protocol")
|
||||
return False
|
||||
# Validate URL format
|
||||
return self.validate_with(self.network_validator, "validate_url", url, "registry-url")
|
||||
|
||||
def _validate_scope(self, scope: str) -> bool:
|
||||
"""Validate NPM scope format."""
|
||||
if self.is_github_expression(scope):
|
||||
return True
|
||||
# Scope must start with @ and contain only valid characters
|
||||
if not scope.startswith("@"):
|
||||
self.add_error("Scope must start with @ symbol")
|
||||
return False
|
||||
if not re.match(r"^@[a-z0-9][a-z0-9\-_.]*$", scope):
|
||||
self.add_error(
|
||||
"Invalid scope format: must be @org-name with lowercase "
|
||||
"letters, numbers, hyphens, dots, and underscores"
|
||||
)
|
||||
return False
|
||||
# Check for injection
|
||||
return self.validate_with(self.security_validator, "validate_no_injection", scope, "scope")
|
||||
|
||||
def get_required_inputs(self) -> list[str]:
|
||||
"""Get list of required inputs."""
|
||||
return ["npm_token"]
|
||||
|
||||
@@ -100,24 +100,76 @@ runs:
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Detect Package Manager
|
||||
id: detect-pm
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Detect package manager from lockfiles
|
||||
if [ -f bun.lockb ]; then
|
||||
package_manager="bun"
|
||||
elif [ -f pnpm-lock.yaml ]; then
|
||||
package_manager="pnpm"
|
||||
elif [ -f yarn.lock ]; then
|
||||
package_manager="yarn"
|
||||
else
|
||||
package_manager="npm"
|
||||
fi
|
||||
|
||||
printf 'package-manager=%s\n' "$package_manager" >> "$GITHUB_OUTPUT"
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
id: node-setup
|
||||
uses: ivuorinen/actions/node-setup@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
corepack enable
|
||||
|
||||
- name: Install Package Manager
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
pnpm)
|
||||
corepack prepare pnpm@latest --activate
|
||||
;;
|
||||
yarn)
|
||||
corepack prepare yarn@stable --activate
|
||||
;;
|
||||
bun|npm)
|
||||
# Bun installed separately, npm built-in
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Setup Bun
|
||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||
uses: oven-sh/setup-bun@b7a1c7ccf290d58743029c4f6903da283811b979 # v2.1.0
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
type: 'npm'
|
||||
paths: 'node_modules'
|
||||
key-files: 'package-lock.json,yarn.lock,pnpm-lock.yaml,bun.lockb'
|
||||
key-prefix: 'npm-publish-${{ steps.node-setup.outputs.package-manager }}'
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-npm-publish-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-npm-publish-${{ steps.detect-pm.outputs.package-manager }}-
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ optional_inputs:
|
||||
- token
|
||||
conventions:
|
||||
npm_token: github_token
|
||||
package-version: semantic_version
|
||||
package-version: strict_semantic_version
|
||||
registry-url: url
|
||||
scope: scope
|
||||
token: github_token
|
||||
|
||||
55
package-lock.json
generated
55
package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"js-yaml": "^4.1.0",
|
||||
"markdown-table": "^3.0.3",
|
||||
"markdown-table-formatter": "^1.6.0",
|
||||
"markdownlint-cli2": "^0.19.0",
|
||||
"markdownlint-cli2": "^0.20.0",
|
||||
"prettier": "^3.3.3",
|
||||
"yaml-lint": "^1.7.0"
|
||||
},
|
||||
@@ -661,10 +661,23 @@
|
||||
"node": "6.* || 8.* || >= 10.*"
|
||||
}
|
||||
},
|
||||
"node_modules/get-east-asian-width": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
|
||||
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "10.4.5",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
|
||||
"integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
|
||||
"version": "10.5.0",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
|
||||
"integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
@@ -1051,9 +1064,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/markdownlint": {
|
||||
"version": "0.39.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.39.0.tgz",
|
||||
"integrity": "sha512-Xt/oY7bAiHwukL1iru2np5LIkhwD19Y7frlsiDILK62v3jucXCD6JXlZlwMG12HZOR+roHIVuJZrfCkOhp6k3g==",
|
||||
"version": "0.40.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.40.0.tgz",
|
||||
"integrity": "sha512-UKybllYNheWac61Ia7T6fzuQNDZimFIpCg2w6hHjgV1Qu0w1TV0LlSgryUGzM0bkKQCBhy2FDhEELB73Kb0kAg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1064,7 +1077,8 @@
|
||||
"micromark-extension-gfm-footnote": "2.1.0",
|
||||
"micromark-extension-gfm-table": "2.1.1",
|
||||
"micromark-extension-math": "3.1.0",
|
||||
"micromark-util-types": "2.0.2"
|
||||
"micromark-util-types": "2.0.2",
|
||||
"string-width": "8.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
@@ -1074,9 +1088,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/markdownlint-cli2": {
|
||||
"version": "0.19.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.19.0.tgz",
|
||||
"integrity": "sha512-0+g7Fi/Y3qfvwfhJr77CpC/dEEoc4k7SvumlnL1tb68O+7fjKtIUG7aKzNUQIMXTVi8x63jcfXg4swz/ZYKyCw==",
|
||||
"version": "0.20.0",
|
||||
"resolved": "https://registry.npmjs.org/markdownlint-cli2/-/markdownlint-cli2-0.20.0.tgz",
|
||||
"integrity": "sha512-esPk+8Qvx/f0bzI7YelUeZp+jCtFOk3KjZ7s9iBQZ6HlymSXoTtWGiIRZP05/9Oy2ehIoIjenVwndxGtxOIJYQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
@@ -1085,7 +1099,7 @@
|
||||
"js-yaml": "4.1.1",
|
||||
"jsonc-parser": "3.3.1",
|
||||
"markdown-it": "14.1.0",
|
||||
"markdownlint": "0.39.0",
|
||||
"markdownlint": "0.40.0",
|
||||
"markdownlint-cli2-formatter-default": "0.0.6",
|
||||
"micromatch": "4.0.8"
|
||||
},
|
||||
@@ -1112,6 +1126,23 @@
|
||||
"markdownlint-cli2": ">=0.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/markdownlint/node_modules/string-width": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
|
||||
"integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"get-east-asian-width": "^1.3.0",
|
||||
"strip-ansi": "^7.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/mdurl": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"js-yaml": "^4.1.0",
|
||||
"markdown-table": "^3.0.3",
|
||||
"markdown-table-formatter": "^1.6.0",
|
||||
"markdownlint-cli2": "^0.19.0",
|
||||
"markdownlint-cli2": "^0.20.0",
|
||||
"prettier": "^3.3.3",
|
||||
"yaml-lint": "^1.7.0"
|
||||
},
|
||||
|
||||
@@ -1,228 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Custom validator for php-composer action."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Add validate-inputs directory to path to import validators
|
||||
validate_inputs_path = Path(__file__).parent.parent / "validate-inputs"
|
||||
sys.path.insert(0, str(validate_inputs_path))
|
||||
|
||||
from validators.base import BaseValidator
|
||||
from validators.boolean import BooleanValidator
|
||||
from validators.file import FileValidator
|
||||
from validators.numeric import NumericValidator
|
||||
from validators.security import SecurityValidator
|
||||
from validators.token import TokenValidator
|
||||
from validators.version import VersionValidator
|
||||
|
||||
|
||||
class CustomValidator(BaseValidator):
|
||||
"""Custom validator for php-composer action."""
|
||||
|
||||
def __init__(self, action_type: str = "php-composer") -> None:
|
||||
"""Initialize php-composer validator."""
|
||||
super().__init__(action_type)
|
||||
self.boolean_validator = BooleanValidator()
|
||||
self.file_validator = FileValidator()
|
||||
self.numeric_validator = NumericValidator()
|
||||
self.security_validator = SecurityValidator()
|
||||
self.token_validator = TokenValidator()
|
||||
self.version_validator = VersionValidator()
|
||||
|
||||
def validate_inputs(self, inputs: dict[str, str]) -> bool:
|
||||
"""Validate php-composer action inputs."""
|
||||
valid = True
|
||||
|
||||
# Validate required input: php
|
||||
if "php" not in inputs or not inputs["php"]:
|
||||
self.add_error("Input 'php' is required")
|
||||
valid = False
|
||||
elif inputs["php"]:
|
||||
php_version = inputs["php"]
|
||||
if not self.is_github_expression(php_version):
|
||||
# PHP version validation with minimum version check
|
||||
result = self.version_validator.validate_php_version(php_version, "php")
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.version_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
elif php_version and not php_version.startswith("$"):
|
||||
# Additional check for minimum PHP version (7.0)
|
||||
try:
|
||||
parts = php_version.split(".")
|
||||
major = int(parts[0])
|
||||
minor = int(parts[1]) if len(parts) > 1 else 0
|
||||
if major < 7 or (major == 7 and minor < 0):
|
||||
self.add_error("PHP version must be 7.0 or higher")
|
||||
valid = False
|
||||
except (ValueError, IndexError):
|
||||
pass # Already handled by validate_php_version
|
||||
|
||||
# Validate extensions (empty string is invalid)
|
||||
if "extensions" in inputs:
|
||||
extensions = inputs["extensions"]
|
||||
if extensions == "":
|
||||
self.add_error("Extensions cannot be empty string")
|
||||
valid = False
|
||||
elif extensions:
|
||||
if not self.is_github_expression(extensions):
|
||||
# Extensions should be comma-separated list (spaces allowed after commas)
|
||||
if not re.match(r"^[a-zA-Z0-9_-]+(\s*,\s*[a-zA-Z0-9_-]+)*$", extensions):
|
||||
self.add_error("Invalid extensions format: must be comma-separated list")
|
||||
valid = False
|
||||
|
||||
# Check for injection
|
||||
result = self.security_validator.validate_no_injection(extensions, "extensions")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate tools (empty string is invalid)
|
||||
if "tools" in inputs:
|
||||
tools = inputs["tools"]
|
||||
if tools == "":
|
||||
self.add_error("Tools cannot be empty string")
|
||||
valid = False
|
||||
elif tools:
|
||||
if not self.is_github_expression(tools):
|
||||
# Tools should be comma-separated list with optional version constraints
|
||||
# Allow: letters, numbers, dash, underscore, colon, dot, caret, tilde, @, /
|
||||
# @ symbol allows Composer stability flags like dev-master@dev
|
||||
# / allows vendor/package format like monolog/monolog@dev
|
||||
# spaces after commas
|
||||
if not re.match(
|
||||
r"^[a-zA-Z0-9_:.@/\-^~]+(\s*,\s*[a-zA-Z0-9_:.@/\-^~]+)*$", tools
|
||||
):
|
||||
self.add_error("Invalid tools format: must be comma-separated list")
|
||||
valid = False
|
||||
|
||||
# Check for injection
|
||||
result = self.security_validator.validate_no_injection(tools, "tools")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate composer-version (empty string is invalid, only 1 or 2 accepted)
|
||||
if "composer-version" in inputs:
|
||||
composer_version = inputs["composer-version"]
|
||||
if composer_version == "":
|
||||
self.add_error("Composer version cannot be empty string")
|
||||
valid = False
|
||||
elif composer_version:
|
||||
if not self.is_github_expression(composer_version) and composer_version not in [
|
||||
"1",
|
||||
"2",
|
||||
]:
|
||||
self.add_error("Composer version must be 1 or 2")
|
||||
valid = False
|
||||
|
||||
# Validate stability
|
||||
if inputs.get("stability"):
|
||||
stability = inputs["stability"]
|
||||
if not self.is_github_expression(stability):
|
||||
valid_stabilities = ["stable", "RC", "beta", "alpha", "dev", "snapshot"]
|
||||
if stability not in valid_stabilities:
|
||||
self.add_error(
|
||||
f"Invalid stability: {stability}. "
|
||||
f"Must be one of: {', '.join(valid_stabilities)}"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Check for injection
|
||||
result = self.security_validator.validate_no_injection(stability, "stability")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate cache-directories (empty string is invalid, accepts directory paths)
|
||||
if "cache-directories" in inputs:
|
||||
cache_dirs = inputs["cache-directories"]
|
||||
if cache_dirs == "":
|
||||
self.add_error("Cache directories cannot be empty string")
|
||||
valid = False
|
||||
elif cache_dirs:
|
||||
if not self.is_github_expression(cache_dirs):
|
||||
# Should be comma-separated list of directories
|
||||
dirs = cache_dirs.split(",")
|
||||
for dir_path in dirs:
|
||||
dir_path = dir_path.strip()
|
||||
if dir_path:
|
||||
result = self.file_validator.validate_file_path(
|
||||
dir_path, "cache-directories"
|
||||
)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate token (empty string is invalid)
|
||||
if "token" in inputs:
|
||||
token = inputs["token"]
|
||||
if token == "":
|
||||
self.add_error("Token cannot be empty string")
|
||||
valid = False
|
||||
elif token:
|
||||
result = self.token_validator.validate_github_token(token, required=False)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate max-retries
|
||||
if inputs.get("max-retries"):
|
||||
result = self.numeric_validator.validate_numeric_range(
|
||||
inputs["max-retries"], min_val=1, max_val=10, name="max-retries"
|
||||
)
|
||||
for error in self.numeric_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.numeric_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate args (empty string is invalid, checks for injection if provided)
|
||||
if "args" in inputs:
|
||||
args = inputs["args"]
|
||||
if args == "":
|
||||
self.add_error("Args cannot be empty string")
|
||||
valid = False
|
||||
elif args:
|
||||
if not self.is_github_expression(args):
|
||||
# Check for command injection patterns
|
||||
result = self.security_validator.validate_no_injection(args, "args")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
def get_required_inputs(self) -> list[str]:
|
||||
"""Get list of required inputs."""
|
||||
return ["php"]
|
||||
|
||||
def get_validation_rules(self) -> dict:
|
||||
"""Get validation rules."""
|
||||
rules_path = Path(__file__).parent / "rules.yml"
|
||||
return self.load_rules(rules_path)
|
||||
@@ -1,94 +0,0 @@
|
||||
# ivuorinen/actions/php-composer
|
||||
|
||||
## Run Composer Install
|
||||
|
||||
### Description
|
||||
|
||||
Runs Composer install on a repository with advanced caching and configuration.
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|---------------------|---------------------------------------------------------------|----------|-----------------------------------------------------|
|
||||
| `php` | <p>PHP Version to use.</p> | `true` | `8.4` |
|
||||
| `extensions` | <p>Comma-separated list of PHP extensions to install</p> | `false` | `mbstring, xml, zip, curl, json` |
|
||||
| `tools` | <p>Comma-separated list of Composer tools to install</p> | `false` | `composer:v2` |
|
||||
| `args` | <p>Arguments to pass to Composer.</p> | `false` | `--no-progress --prefer-dist --optimize-autoloader` |
|
||||
| `composer-version` | <p>Composer version to use (1 or 2)</p> | `false` | `2` |
|
||||
| `stability` | <p>Minimum stability (stable, RC, beta, alpha, dev)</p> | `false` | `stable` |
|
||||
| `cache-directories` | <p>Additional directories to cache (comma-separated)</p> | `false` | `""` |
|
||||
| `token` | <p>GitHub token for private repository access</p> | `false` | `""` |
|
||||
| `max-retries` | <p>Maximum number of retry attempts for Composer commands</p> | `false` | `3` |
|
||||
|
||||
### Outputs
|
||||
|
||||
| name | description |
|
||||
|--------------------|-------------------------------------------------|
|
||||
| `lock` | <p>composer.lock or composer.json file hash</p> |
|
||||
| `php-version` | <p>Installed PHP version</p> |
|
||||
| `composer-version` | <p>Installed Composer version</p> |
|
||||
| `cache-hit` | <p>Indicates if there was a cache hit</p> |
|
||||
|
||||
### Runs
|
||||
|
||||
This action is a `composite` action.
|
||||
|
||||
### Usage
|
||||
|
||||
```yaml
|
||||
- uses: ivuorinen/actions/php-composer@main
|
||||
with:
|
||||
php:
|
||||
# PHP Version to use.
|
||||
#
|
||||
# Required: true
|
||||
# Default: 8.4
|
||||
|
||||
extensions:
|
||||
# Comma-separated list of PHP extensions to install
|
||||
#
|
||||
# Required: false
|
||||
# Default: mbstring, xml, zip, curl, json
|
||||
|
||||
tools:
|
||||
# Comma-separated list of Composer tools to install
|
||||
#
|
||||
# Required: false
|
||||
# Default: composer:v2
|
||||
|
||||
args:
|
||||
# Arguments to pass to Composer.
|
||||
#
|
||||
# Required: false
|
||||
# Default: --no-progress --prefer-dist --optimize-autoloader
|
||||
|
||||
composer-version:
|
||||
# Composer version to use (1 or 2)
|
||||
#
|
||||
# Required: false
|
||||
# Default: 2
|
||||
|
||||
stability:
|
||||
# Minimum stability (stable, RC, beta, alpha, dev)
|
||||
#
|
||||
# Required: false
|
||||
# Default: stable
|
||||
|
||||
cache-directories:
|
||||
# Additional directories to cache (comma-separated)
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
token:
|
||||
# GitHub token for private repository access
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
|
||||
max-retries:
|
||||
# Maximum number of retry attempts for Composer commands
|
||||
#
|
||||
# Required: false
|
||||
# Default: 3
|
||||
```
|
||||
@@ -1,228 +0,0 @@
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-action.json
|
||||
# permissions:
|
||||
# - contents: read # Required for checking out repository
|
||||
---
|
||||
name: Run Composer Install
|
||||
description: 'Runs Composer install on a repository with advanced caching and configuration.'
|
||||
author: 'Ismo Vuorinen'
|
||||
|
||||
branding:
|
||||
icon: server
|
||||
color: gray-dark
|
||||
|
||||
inputs:
|
||||
php:
|
||||
description: 'PHP Version to use.'
|
||||
required: true
|
||||
default: '8.4'
|
||||
extensions:
|
||||
description: 'Comma-separated list of PHP extensions to install'
|
||||
required: false
|
||||
default: 'mbstring, xml, zip, curl, json'
|
||||
tools:
|
||||
description: 'Comma-separated list of Composer tools to install'
|
||||
required: false
|
||||
default: 'composer:v2'
|
||||
args:
|
||||
description: 'Arguments to pass to Composer.'
|
||||
required: false
|
||||
default: '--no-progress --prefer-dist --optimize-autoloader'
|
||||
composer-version:
|
||||
description: 'Composer version to use (1 or 2)'
|
||||
required: false
|
||||
default: '2'
|
||||
stability:
|
||||
description: 'Minimum stability (stable, RC, beta, alpha, dev)'
|
||||
required: false
|
||||
default: 'stable'
|
||||
cache-directories:
|
||||
description: 'Additional directories to cache (comma-separated)'
|
||||
required: false
|
||||
default: ''
|
||||
token:
|
||||
description: 'GitHub token for private repository access'
|
||||
required: false
|
||||
default: ''
|
||||
max-retries:
|
||||
description: 'Maximum number of retry attempts for Composer commands'
|
||||
required: false
|
||||
default: '3'
|
||||
|
||||
outputs:
|
||||
lock:
|
||||
description: 'composer.lock or composer.json file hash'
|
||||
value: ${{ steps.hash.outputs.lock }}
|
||||
php-version:
|
||||
description: 'Installed PHP version'
|
||||
value: ${{ steps.php.outputs.version }}
|
||||
composer-version:
|
||||
description: 'Installed Composer version'
|
||||
value: ${{ steps.composer.outputs.version }}
|
||||
cache-hit:
|
||||
description: 'Indicates if there was a cache hit'
|
||||
value: ${{ steps.composer-cache.outputs.cache-hit }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Mask Secrets
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
run: |
|
||||
echo "::add-mask::$GITHUB_TOKEN"
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
action-type: php-composer
|
||||
|
||||
- name: Setup PHP
|
||||
id: php
|
||||
uses: shivammathur/setup-php@bf6b4fbd49ca58e4608c9c89fba0b8d90bd2a39f # 2.35.5
|
||||
with:
|
||||
php-version: ${{ inputs.php }}
|
||||
extensions: ${{ inputs.extensions }}
|
||||
tools: ${{ inputs.tools }}
|
||||
coverage: none
|
||||
ini-values: memory_limit=1G, max_execution_time=600
|
||||
fail-fast: true
|
||||
|
||||
- name: Get Dependency Hashes
|
||||
id: hash
|
||||
shell: bash
|
||||
env:
|
||||
CACHE_DIRECTORIES: ${{ inputs.cache-directories }}
|
||||
COMPOSER_LOCK_HASH: ${{ hashFiles('**/composer.lock') }}
|
||||
COMPOSER_JSON_HASH: ${{ hashFiles('**/composer.json') }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Function to calculate directory hash
|
||||
calculate_dir_hash() {
|
||||
local dir=$1
|
||||
if [ -d "$dir" ]; then
|
||||
find "$dir" -type f -exec sha256sum {} \; | sort | sha256sum | cut -d' ' -f1
|
||||
fi
|
||||
}
|
||||
|
||||
# Get composer.lock hash or composer.json hash
|
||||
if [ -f composer.lock ]; then
|
||||
echo "lock=$COMPOSER_LOCK_HASH" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "lock=$COMPOSER_JSON_HASH" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Calculate additional directory hashes
|
||||
if [ -n "$CACHE_DIRECTORIES" ]; then
|
||||
IFS=',' read -ra DIRS <<< "$CACHE_DIRECTORIES"
|
||||
for dir in "${DIRS[@]}"; do
|
||||
dir_hash=$(calculate_dir_hash "$dir")
|
||||
if [ -n "$dir_hash" ]; then
|
||||
echo "${dir}_hash=$dir_hash" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
- name: Configure Composer
|
||||
id: composer
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
STABILITY: ${{ inputs.stability }}
|
||||
COMPOSER_VERSION: ${{ inputs.composer-version }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Configure Composer environment
|
||||
composer config --global process-timeout 600
|
||||
composer config --global allow-plugins true
|
||||
composer config --global github-oauth.github.com "$GITHUB_TOKEN"
|
||||
|
||||
if [ "$STABILITY" != "stable" ]; then
|
||||
composer config minimum-stability "$STABILITY"
|
||||
fi
|
||||
|
||||
# Verify Composer installation
|
||||
composer_full_version=$(composer --version | grep -oP 'Composer version \K[0-9]+\.[0-9]+\.[0-9]+')
|
||||
if [ -z "$composer_full_version" ]; then
|
||||
echo "::error::Failed to detect Composer version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract major version for comparison
|
||||
composer_major_version=${composer_full_version%%.*}
|
||||
expected_version="$COMPOSER_VERSION"
|
||||
|
||||
echo "Detected Composer version: $composer_full_version (major: $composer_major_version)"
|
||||
|
||||
if [ "$composer_major_version" != "$expected_version" ]; then
|
||||
echo "::error::Composer major version mismatch. Expected $expected_version.x, got $composer_full_version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Store full version for output
|
||||
echo "version=$composer_full_version" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Log Composer configuration
|
||||
echo "Composer Configuration:"
|
||||
composer config --list
|
||||
|
||||
- name: Cache Composer packages
|
||||
id: composer-cache
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
type: 'composer'
|
||||
paths: vendor,~/.composer/cache${{ inputs.cache-directories != "" && format(",{0}", inputs.cache-directories) || "" }}
|
||||
key-prefix: 'php-${{ inputs.php }}-composer-${{ inputs.composer-version }}'
|
||||
key-files: 'composer.lock,composer.json'
|
||||
restore-keys: |
|
||||
${{ runner.os }}-php-${{ inputs.php }}-composer-${{ inputs.composer-version }}-
|
||||
${{ runner.os }}-php-${{ inputs.php }}-composer-
|
||||
${{ runner.os }}-php-${{ inputs.php }}-
|
||||
|
||||
- name: Clear Composer Cache Before Final Attempt
|
||||
if: steps.composer-cache.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "Clearing Composer cache to ensure clean installation..."
|
||||
composer clear-cache
|
||||
|
||||
- name: Install Dependencies
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
retry_wait_seconds: 30
|
||||
command: composer install ${{ inputs.args }}
|
||||
|
||||
- name: Verify Installation
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Verify vendor directory
|
||||
if [ ! -d "vendor" ]; then
|
||||
echo "::error::vendor directory not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify autoloader
|
||||
if [ ! -f "vendor/autoload.php" ]; then
|
||||
echo "::error::autoload.php not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate Optimized Autoloader
|
||||
if: success()
|
||||
shell: bash
|
||||
run: |-
|
||||
set -euo pipefail
|
||||
composer dump-autoload --optimize --classmap-authoritative
|
||||
@@ -1,47 +0,0 @@
|
||||
---
|
||||
# Validation rules for php-composer action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 56% (5/9 inputs)
|
||||
#
|
||||
# This file defines validation rules for the php-composer GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
# action is used.
|
||||
#
|
||||
|
||||
schema_version: '1.0'
|
||||
action: php-composer
|
||||
description: Runs Composer install on a repository with advanced caching and configuration.
|
||||
generator_version: 1.0.0
|
||||
required_inputs:
|
||||
- php
|
||||
optional_inputs:
|
||||
- args
|
||||
- cache-directories
|
||||
- composer-version
|
||||
- extensions
|
||||
- max-retries
|
||||
- stability
|
||||
- token
|
||||
- tools
|
||||
conventions:
|
||||
cache-directories: boolean
|
||||
composer-version: semantic_version
|
||||
max-retries: numeric_range_1_10
|
||||
php: semantic_version
|
||||
token: github_token
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 9
|
||||
validated_inputs: 5
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 56
|
||||
validation_coverage: 56
|
||||
auto_detected: true
|
||||
manual_review_required: true
|
||||
quality_indicators:
|
||||
has_required_inputs: true
|
||||
has_token_validation: true
|
||||
has_version_validation: true
|
||||
has_file_validation: false
|
||||
has_security_validation: true
|
||||
@@ -1,134 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Custom validator for php-laravel-phpunit action."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
# Add validate-inputs directory to path to import validators
|
||||
validate_inputs_path = Path(__file__).parent.parent / "validate-inputs"
|
||||
sys.path.insert(0, str(validate_inputs_path))
|
||||
|
||||
from validators.base import BaseValidator
|
||||
from validators.file import FileValidator
|
||||
from validators.token import TokenValidator
|
||||
from validators.version import VersionValidator
|
||||
|
||||
|
||||
class CustomValidator(BaseValidator):
|
||||
"""Custom validator for php-laravel-phpunit action."""
|
||||
|
||||
def __init__(self, action_type: str = "php-laravel-phpunit") -> None:
|
||||
"""Initialize php-laravel-phpunit validator."""
|
||||
super().__init__(action_type)
|
||||
self.version_validator = VersionValidator()
|
||||
self.file_validator = FileValidator()
|
||||
self.token_validator = TokenValidator()
|
||||
|
||||
def validate_inputs(self, inputs: dict[str, str]) -> bool:
|
||||
"""Validate php-laravel-phpunit action inputs."""
|
||||
valid = True
|
||||
|
||||
# Validate php-version if provided and not empty
|
||||
if inputs.get("php-version"):
|
||||
value = inputs["php-version"]
|
||||
# Special case: "latest" is allowed
|
||||
if value != "latest":
|
||||
result = self.version_validator.validate_php_version(value, "php-version")
|
||||
|
||||
# Propagate errors from the version validator
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
|
||||
self.version_validator.clear_errors()
|
||||
|
||||
if not result:
|
||||
valid = False
|
||||
# Validate php-version-file if provided
|
||||
if inputs.get("php-version-file"):
|
||||
result = self.file_validator.validate_file_path(
|
||||
inputs["php-version-file"], "php-version-file"
|
||||
)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate extensions if provided
|
||||
if inputs.get("extensions"):
|
||||
value = inputs["extensions"]
|
||||
# Basic validation for PHP extensions list
|
||||
if ";" in value and not value.startswith("${{"):
|
||||
self.add_error(f"Invalid extensions format in extensions: {value}")
|
||||
valid = False
|
||||
# Check for dangerous characters and invalid format (@ is not valid in PHP extensions)
|
||||
if any(char in value for char in ["`", "$", "&", "|", ">", "<", "@", "\n", "\r"]):
|
||||
self.add_error(f"Invalid characters in extensions: {value}")
|
||||
valid = False
|
||||
|
||||
# Validate coverage if provided
|
||||
if inputs.get("coverage"):
|
||||
value = inputs["coverage"]
|
||||
# Valid coverage drivers for PHPUnit
|
||||
valid_coverage = ["none", "xdebug", "xdebug3", "pcov"]
|
||||
if value not in valid_coverage:
|
||||
# Check for command injection attempts
|
||||
if any(char in value for char in [";", "`", "$", "&", "|", ">", "<", "\n", "\r"]):
|
||||
self.add_error(f"Command injection attempt in coverage: {value}")
|
||||
valid = False
|
||||
elif value and not value.startswith("${{"):
|
||||
self.add_error(
|
||||
f"Invalid coverage driver: {value}. "
|
||||
f"Must be one of: {', '.join(valid_coverage)}"
|
||||
)
|
||||
valid = False
|
||||
|
||||
# Validate token if provided
|
||||
if inputs.get("token"):
|
||||
result = self.token_validator.validate_github_token(inputs["token"])
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
def get_required_inputs(self) -> list[str]:
|
||||
"""Get list of required inputs."""
|
||||
return []
|
||||
|
||||
def get_validation_rules(self) -> dict:
|
||||
"""Get validation rules."""
|
||||
return {
|
||||
"php-version": {
|
||||
"type": "php_version",
|
||||
"required": False,
|
||||
"description": "PHP version to use",
|
||||
},
|
||||
"php-version-file": {
|
||||
"type": "file",
|
||||
"required": False,
|
||||
"description": "PHP version file",
|
||||
},
|
||||
"extensions": {
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": "PHP extensions to install",
|
||||
},
|
||||
"coverage": {
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": "Coverage driver",
|
||||
},
|
||||
"token": {
|
||||
"type": "token",
|
||||
"required": False,
|
||||
"description": "GitHub token",
|
||||
},
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
# ivuorinen/actions/php-laravel-phpunit
|
||||
|
||||
## Laravel Setup and Composer test
|
||||
|
||||
### Description
|
||||
|
||||
Setup PHP, install dependencies, generate key, create database and run composer test
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|--------------------|-----------------------------------------------------------------------------------------------------------------------|----------|---------------------------------------------|
|
||||
| `php-version` | <p>PHP Version to use, see https://github.com/marketplace/actions/setup-php-action#php-version-optional</p> | `false` | `latest` |
|
||||
| `php-version-file` | <p>PHP Version file to use, see https://github.com/marketplace/actions/setup-php-action#php-version-file-optional</p> | `false` | `.php-version` |
|
||||
| `extensions` | <p>PHP extensions to install, see https://github.com/marketplace/actions/setup-php-action#extensions-optional</p> | `false` | `mbstring, intl, json, pdo_sqlite, sqlite3` |
|
||||
| `coverage` | <p>Specify code-coverage driver, see https://github.com/marketplace/actions/setup-php-action#coverage-optional</p> | `false` | `none` |
|
||||
| `token` | <p>GitHub token for authentication</p> | `false` | `""` |
|
||||
|
||||
### Outputs
|
||||
|
||||
| name | description |
|
||||
|--------------------|------------------------------------------------|
|
||||
| `php-version` | <p>The PHP version that was setup</p> |
|
||||
| `php-version-file` | <p>The PHP version file that was used</p> |
|
||||
| `extensions` | <p>The PHP extensions that were installed</p> |
|
||||
| `coverage` | <p>The code-coverage driver that was setup</p> |
|
||||
|
||||
### Runs
|
||||
|
||||
This action is a `composite` action.
|
||||
|
||||
### Usage
|
||||
|
||||
```yaml
|
||||
- uses: ivuorinen/actions/php-laravel-phpunit@main
|
||||
with:
|
||||
php-version:
|
||||
# PHP Version to use, see https://github.com/marketplace/actions/setup-php-action#php-version-optional
|
||||
#
|
||||
# Required: false
|
||||
# Default: latest
|
||||
|
||||
php-version-file:
|
||||
# PHP Version file to use, see https://github.com/marketplace/actions/setup-php-action#php-version-file-optional
|
||||
#
|
||||
# Required: false
|
||||
# Default: .php-version
|
||||
|
||||
extensions:
|
||||
# PHP extensions to install, see https://github.com/marketplace/actions/setup-php-action#extensions-optional
|
||||
#
|
||||
# Required: false
|
||||
# Default: mbstring, intl, json, pdo_sqlite, sqlite3
|
||||
|
||||
coverage:
|
||||
# Specify code-coverage driver, see https://github.com/marketplace/actions/setup-php-action#coverage-optional
|
||||
#
|
||||
# Required: false
|
||||
# Default: none
|
||||
|
||||
token:
|
||||
# GitHub token for authentication
|
||||
#
|
||||
# Required: false
|
||||
# Default: ""
|
||||
```
|
||||
@@ -1,135 +0,0 @@
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-action.json
|
||||
# permissions:
|
||||
# - contents: read # Required for checking out repository
|
||||
---
|
||||
name: Laravel Setup and Composer test
|
||||
description: 'Setup PHP, install dependencies, generate key, create database and run composer test'
|
||||
author: 'Ismo Vuorinen'
|
||||
|
||||
branding:
|
||||
icon: 'terminal'
|
||||
color: 'blue'
|
||||
|
||||
inputs:
|
||||
php-version:
|
||||
description: 'PHP Version to use, see https://github.com/marketplace/actions/setup-php-action#php-version-optional'
|
||||
required: false
|
||||
default: 'latest'
|
||||
php-version-file:
|
||||
description: 'PHP Version file to use, see https://github.com/marketplace/actions/setup-php-action#php-version-file-optional'
|
||||
required: false
|
||||
default: '.php-version'
|
||||
extensions:
|
||||
description: 'PHP extensions to install, see https://github.com/marketplace/actions/setup-php-action#extensions-optional'
|
||||
required: false
|
||||
default: 'mbstring, intl, json, pdo_sqlite, sqlite3'
|
||||
coverage:
|
||||
description: 'Specify code-coverage driver, see https://github.com/marketplace/actions/setup-php-action#coverage-optional'
|
||||
required: false
|
||||
default: 'none'
|
||||
token:
|
||||
description: 'GitHub token for authentication'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
outputs:
|
||||
php-version:
|
||||
description: 'The PHP version that was setup'
|
||||
value: ${{ steps.setup-php.outputs.php-version }}
|
||||
php-version-file:
|
||||
description: 'The PHP version file that was used'
|
||||
value: ${{ steps.setup-php.outputs.php-version-file }}
|
||||
extensions:
|
||||
description: 'The PHP extensions that were installed'
|
||||
value: ${{ steps.setup-php.outputs.extensions }}
|
||||
coverage:
|
||||
description: 'The code-coverage driver that was setup'
|
||||
value: ${{ steps.setup-php.outputs.coverage }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Mask Secrets
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
if [ -n "$GITHUB_TOKEN" ]; then
|
||||
echo "::add-mask::$GITHUB_TOKEN"
|
||||
fi
|
||||
|
||||
- name: Detect PHP Version
|
||||
id: php-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'php'
|
||||
default-version: ${{ inputs.php-version }}
|
||||
|
||||
- uses: shivammathur/setup-php@bf6b4fbd49ca58e4608c9c89fba0b8d90bd2a39f # 2.35.5
|
||||
id: setup-php
|
||||
with:
|
||||
php-version: ${{ steps.php-version.outputs.detected-version }}
|
||||
extensions: ${{ inputs.extensions }}
|
||||
coverage: ${{ inputs.coverage }}
|
||||
|
||||
- uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
with:
|
||||
token: ${{ inputs.token != '' && inputs.token || github.token }}
|
||||
|
||||
- name: 'Check file existence'
|
||||
id: check_files
|
||||
uses: andstor/file-existence-action@076e0072799f4942c8bc574a82233e1e4d13e9d6 # v3.0.0
|
||||
with:
|
||||
files: 'package.json, artisan'
|
||||
|
||||
- name: Copy .env
|
||||
if: steps.check_files.outputs.files_exists == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
php -r "file_exists('.env') || copy('.env.example', '.env');"
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.check_files.outputs.files_exists == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
composer install -q --no-ansi --no-interaction --no-scripts --no-progress --prefer-dist
|
||||
|
||||
- name: Generate key
|
||||
if: steps.check_files.outputs.files_exists == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
php artisan key:generate
|
||||
|
||||
- name: Directory Permissions
|
||||
if: steps.check_files.outputs.files_exists == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
chmod -R 777 storage bootstrap/cache
|
||||
|
||||
- name: Create Database
|
||||
if: steps.check_files.outputs.files_exists == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
mkdir -p database
|
||||
touch database/database.sqlite
|
||||
|
||||
- name: Execute composer test (Unit and Feature tests)
|
||||
if: steps.check_files.outputs.files_exists == 'true'
|
||||
shell: bash
|
||||
env:
|
||||
DB_CONNECTION: sqlite
|
||||
DB_DATABASE: database/database.sqlite
|
||||
run: |-
|
||||
set -euo pipefail
|
||||
|
||||
composer test
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
# Validation rules for php-laravel-phpunit action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 100% (5/5 inputs)
|
||||
#
|
||||
# This file defines validation rules for the php-laravel-phpunit GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
# action is used.
|
||||
#
|
||||
|
||||
schema_version: '1.0'
|
||||
action: php-laravel-phpunit
|
||||
description: Setup PHP, install dependencies, generate key, create database and run composer test
|
||||
generator_version: 1.0.0
|
||||
required_inputs: []
|
||||
optional_inputs:
|
||||
- coverage
|
||||
- extensions
|
||||
- php-version
|
||||
- php-version-file
|
||||
- token
|
||||
conventions:
|
||||
coverage: coverage_driver
|
||||
extensions: php_extensions
|
||||
php-version: semantic_version
|
||||
php-version-file: file_path
|
||||
token: github_token
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 5
|
||||
validated_inputs: 5
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: false
|
||||
has_token_validation: true
|
||||
has_version_validation: true
|
||||
has_file_validation: true
|
||||
has_security_validation: true
|
||||
@@ -33,59 +33,31 @@ class CustomValidator(BaseValidator):
|
||||
# Validate token (optional)
|
||||
if inputs.get("token"):
|
||||
token = inputs["token"]
|
||||
result = self.token_validator.validate_github_token(token)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
valid &= self.validate_with(self.token_validator, "validate_github_token", token)
|
||||
# Also check for variable expansion
|
||||
if not self.is_github_expression(token):
|
||||
result = self.security_validator.validate_no_injection(token, "token")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.security_validator, "validate_no_injection", token, "token"
|
||||
)
|
||||
|
||||
# Validate email (optional, empty means use default)
|
||||
if "email" in inputs and inputs["email"] and inputs["email"] != "":
|
||||
if inputs.get("email"):
|
||||
email = inputs["email"]
|
||||
result = self.network_validator.validate_email(email, "email")
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
valid &= self.validate_with(self.network_validator, "validate_email", email, "email")
|
||||
# Also check for shell metacharacters (but allow @ and .)
|
||||
if not self.is_github_expression(email):
|
||||
# Only check for dangerous shell metacharacters, not @ or .
|
||||
dangerous_chars = [";", "&", "|", "`", "$", "(", ")", "<", ">", "\n", "\r"]
|
||||
for char in dangerous_chars:
|
||||
if char in email:
|
||||
self.add_error(f"email: Contains dangerous character '{char}'")
|
||||
valid = False
|
||||
break
|
||||
if any(char in email for char in dangerous_chars):
|
||||
self.add_error("email: Contains dangerous shell metacharacter")
|
||||
valid = False
|
||||
|
||||
# Validate username (optional)
|
||||
if inputs.get("username"):
|
||||
username = inputs["username"]
|
||||
if not self.is_github_expression(username):
|
||||
# Check for injection
|
||||
result = self.security_validator.validate_no_injection(username, "username")
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Check username length (GitHub usernames are max 39 characters)
|
||||
valid &= self.validate_with(
|
||||
self.security_validator, "validate_no_injection", username, "username"
|
||||
)
|
||||
if len(username) > 39:
|
||||
self.add_error("Username is too long (max 39 characters)")
|
||||
valid = False
|
||||
|
||||
@@ -4,24 +4,33 @@
|
||||
|
||||
### Description
|
||||
|
||||
Run PHPUnit tests on the repository
|
||||
Run PHPUnit tests with optional Laravel setup and Composer dependency management
|
||||
|
||||
### Inputs
|
||||
|
||||
| name | description | required | default |
|
||||
|------------|----------------------------------------|----------|-----------------------------|
|
||||
| `token` | <p>GitHub token for authentication</p> | `false` | `""` |
|
||||
| `username` | <p>GitHub username for commits</p> | `false` | `github-actions` |
|
||||
| `email` | <p>GitHub email for commits</p> | `false` | `github-actions@github.com` |
|
||||
| name | description | required | default |
|
||||
|-----------------|----------------------------------------------------------------------------------------------------------------|----------|-----------------------------------------------------|
|
||||
| `framework` | <p>Framework detection mode (auto=detect Laravel via artisan, laravel=force Laravel, generic=no framework)</p> | `false` | `auto` |
|
||||
| `php-version` | <p>PHP Version to use (latest, 8.4, 8.3, etc.)</p> | `false` | `latest` |
|
||||
| `extensions` | <p>PHP extensions to install (comma-separated)</p> | `false` | `mbstring, intl, json, pdo_sqlite, sqlite3` |
|
||||
| `coverage` | <p>Code-coverage driver (none, xdebug, pcov)</p> | `false` | `none` |
|
||||
| `composer-args` | <p>Arguments to pass to Composer install</p> | `false` | `--no-progress --prefer-dist --optimize-autoloader` |
|
||||
| `max-retries` | <p>Maximum number of retry attempts for Composer commands</p> | `false` | `3` |
|
||||
| `token` | <p>GitHub token for authentication</p> | `false` | `""` |
|
||||
| `username` | <p>GitHub username for commits</p> | `false` | `github-actions` |
|
||||
| `email` | <p>GitHub email for commits</p> | `false` | `github-actions@github.com` |
|
||||
|
||||
### Outputs
|
||||
|
||||
| name | description |
|
||||
|-----------------|--------------------------------------------------------|
|
||||
| `test_status` | <p>Test execution status (success/failure/skipped)</p> |
|
||||
| `tests_run` | <p>Number of tests executed</p> |
|
||||
| `tests_passed` | <p>Number of tests passed</p> |
|
||||
| `coverage_path` | <p>Path to coverage report</p> |
|
||||
| name | description |
|
||||
|--------------------|------------------------------------------------|
|
||||
| `framework` | <p>Detected framework (laravel or generic)</p> |
|
||||
| `php-version` | <p>The PHP version that was setup</p> |
|
||||
| `composer-version` | <p>Installed Composer version</p> |
|
||||
| `cache-hit` | <p>Indicates if there was a cache hit</p> |
|
||||
| `test-status` | <p>Test execution status (success/failure)</p> |
|
||||
| `tests-run` | <p>Number of tests executed</p> |
|
||||
| `tests-passed` | <p>Number of tests passed</p> |
|
||||
|
||||
### Runs
|
||||
|
||||
@@ -32,6 +41,42 @@ This action is a `composite` action.
|
||||
```yaml
|
||||
- uses: ivuorinen/actions/php-tests@main
|
||||
with:
|
||||
framework:
|
||||
# Framework detection mode (auto=detect Laravel via artisan, laravel=force Laravel, generic=no framework)
|
||||
#
|
||||
# Required: false
|
||||
# Default: auto
|
||||
|
||||
php-version:
|
||||
# PHP Version to use (latest, 8.4, 8.3, etc.)
|
||||
#
|
||||
# Required: false
|
||||
# Default: latest
|
||||
|
||||
extensions:
|
||||
# PHP extensions to install (comma-separated)
|
||||
#
|
||||
# Required: false
|
||||
# Default: mbstring, intl, json, pdo_sqlite, sqlite3
|
||||
|
||||
coverage:
|
||||
# Code-coverage driver (none, xdebug, pcov)
|
||||
#
|
||||
# Required: false
|
||||
# Default: none
|
||||
|
||||
composer-args:
|
||||
# Arguments to pass to Composer install
|
||||
#
|
||||
# Required: false
|
||||
# Default: --no-progress --prefer-dist --optimize-autoloader
|
||||
|
||||
max-retries:
|
||||
# Maximum number of retry attempts for Composer commands
|
||||
#
|
||||
# Required: false
|
||||
# Default: 3
|
||||
|
||||
token:
|
||||
# GitHub token for authentication
|
||||
#
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# - contents: read # Required for checking out repository
|
||||
---
|
||||
name: PHP Tests
|
||||
description: Run PHPUnit tests on the repository
|
||||
description: Run PHPUnit tests with optional Laravel setup and Composer dependency management
|
||||
author: Ismo Vuorinen
|
||||
|
||||
branding:
|
||||
@@ -11,6 +11,30 @@ branding:
|
||||
color: green
|
||||
|
||||
inputs:
|
||||
framework:
|
||||
description: 'Framework detection mode (auto=detect Laravel via artisan, laravel=force Laravel, generic=no framework)'
|
||||
required: false
|
||||
default: 'auto'
|
||||
php-version:
|
||||
description: 'PHP Version to use (latest, 8.4, 8.3, etc.)'
|
||||
required: false
|
||||
default: 'latest'
|
||||
extensions:
|
||||
description: 'PHP extensions to install (comma-separated)'
|
||||
required: false
|
||||
default: 'mbstring, intl, json, pdo_sqlite, sqlite3'
|
||||
coverage:
|
||||
description: 'Code-coverage driver (none, xdebug, pcov)'
|
||||
required: false
|
||||
default: 'none'
|
||||
composer-args:
|
||||
description: 'Arguments to pass to Composer install'
|
||||
required: false
|
||||
default: '--no-progress --prefer-dist --optimize-autoloader'
|
||||
max-retries:
|
||||
description: 'Maximum number of retry attempts for Composer commands'
|
||||
required: false
|
||||
default: '3'
|
||||
token:
|
||||
description: 'GitHub token for authentication'
|
||||
required: false
|
||||
@@ -25,56 +49,123 @@ inputs:
|
||||
default: 'github-actions@github.com'
|
||||
|
||||
outputs:
|
||||
test_status:
|
||||
description: 'Test execution status (success/failure/skipped)'
|
||||
framework:
|
||||
description: 'Detected framework (laravel or generic)'
|
||||
value: ${{ steps.detect-framework.outputs.framework }}
|
||||
php-version:
|
||||
description: 'The PHP version that was setup'
|
||||
value: ${{ steps.setup-php.outputs.php-version }}
|
||||
composer-version:
|
||||
description: 'Installed Composer version'
|
||||
value: ${{ steps.composer-config.outputs.version }}
|
||||
cache-hit:
|
||||
description: 'Indicates if there was a cache hit'
|
||||
value: ${{ steps.composer-cache.outputs.cache-hit }}
|
||||
test-status:
|
||||
description: 'Test execution status (success/failure)'
|
||||
value: ${{ steps.test.outputs.status }}
|
||||
tests_run:
|
||||
tests-run:
|
||||
description: 'Number of tests executed'
|
||||
value: ${{ steps.test.outputs.tests_run }}
|
||||
tests_passed:
|
||||
tests-passed:
|
||||
description: 'Number of tests passed'
|
||||
value: ${{ steps.test.outputs.tests_passed }}
|
||||
coverage_path:
|
||||
description: 'Path to coverage report'
|
||||
value: 'coverage.xml'
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: bash
|
||||
- name: Mask Secrets
|
||||
shell: sh
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -eu
|
||||
if [ -n "$GITHUB_TOKEN" ]; then
|
||||
echo "::add-mask::$GITHUB_TOKEN"
|
||||
fi
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: sh
|
||||
env:
|
||||
FRAMEWORK: ${{ inputs.framework }}
|
||||
PHP_VERSION: ${{ inputs.php-version }}
|
||||
COVERAGE: ${{ inputs.coverage }}
|
||||
MAX_RETRIES: ${{ inputs.max-retries }}
|
||||
EMAIL: ${{ inputs.email }}
|
||||
USERNAME: ${{ inputs.username }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Validate GitHub token format (basic validation)
|
||||
if [[ -n "$GITHUB_TOKEN" ]]; then
|
||||
# Skip validation for GitHub expressions (they'll be resolved at runtime)
|
||||
if ! [[ "$GITHUB_TOKEN" =~ ^gh[efpousr]_[a-zA-Z0-9]{36}$ ]] && ! [[ "$GITHUB_TOKEN" =~ ^\$\{\{ ]]; then
|
||||
echo "::warning::GitHub token format may be invalid. Expected format: gh*_36characters"
|
||||
fi
|
||||
# Validate framework mode
|
||||
case "$FRAMEWORK" in
|
||||
auto|laravel|generic)
|
||||
echo "Framework mode: $FRAMEWORK"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid framework: '$FRAMEWORK'. Must be 'auto', 'laravel', or 'generic'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate PHP version format
|
||||
if [ "$PHP_VERSION" != "latest" ]; then
|
||||
case "$PHP_VERSION" in
|
||||
[0-9]*\.[0-9]*\.[0-9]*)
|
||||
# X.Y.Z format (e.g., 8.3.0)
|
||||
;;
|
||||
[0-9]*\.[0-9]*)
|
||||
# X.Y format (e.g., 8.4)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid php-version format: '$PHP_VERSION'. Expected format: X.Y or X.Y.Z (e.g., 8.4, 8.3.0)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate email format (basic check)
|
||||
if [[ "$EMAIL" != *"@"* ]] || [[ "$EMAIL" != *"."* ]]; then
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
# Validate coverage driver
|
||||
case "$COVERAGE" in
|
||||
none|xdebug|pcov)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid coverage driver: '$COVERAGE'. Must be 'none', 'xdebug', or 'pcov'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate max retries (must be digits only)
|
||||
case "$MAX_RETRIES" in
|
||||
*[!0-9]*)
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
# Validate max retries range
|
||||
if [ "$MAX_RETRIES" -le 0 ] || [ "$MAX_RETRIES" -gt 10 ]; then
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate username format (prevent command injection)
|
||||
if [[ "$USERNAME" == *";"* ]] || [[ "$USERNAME" == *"&&"* ]] || [[ "$USERNAME" == *"|"* ]]; then
|
||||
echo "::error::Invalid username: '$USERNAME'. Command injection patterns not allowed"
|
||||
exit 1
|
||||
fi
|
||||
# Validate email format (must contain @ and .)
|
||||
case "$EMAIL" in
|
||||
*@*.*) ;;
|
||||
*)
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate username length
|
||||
username="$USERNAME"
|
||||
if [ ${#username} -gt 39 ]; then
|
||||
echo "::error::Username too long: ${#username} characters. GitHub usernames are max 39 characters"
|
||||
# Validate username format (reject command injection patterns)
|
||||
case "$USERNAME" in
|
||||
*";"*|*"&&"*|*"|"*)
|
||||
echo "::error::Invalid username: '$USERNAME'. Command injection patterns not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ ${#USERNAME} -gt 39 ]; then
|
||||
echo "::error::Username too long: ${#USERNAME} characters. GitHub usernames are max 39 characters"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -85,37 +176,328 @@ runs:
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Composer Install
|
||||
uses: ivuorinen/actions/php-composer@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
- name: Detect Framework
|
||||
id: detect-framework
|
||||
shell: sh
|
||||
env:
|
||||
FRAMEWORK_MODE: ${{ inputs.framework }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
framework="generic"
|
||||
|
||||
if [ "$FRAMEWORK_MODE" = "laravel" ]; then
|
||||
framework="laravel"
|
||||
echo "Framework mode forced to Laravel"
|
||||
elif [ "$FRAMEWORK_MODE" = "auto" ]; then
|
||||
if [ -f "artisan" ]; then
|
||||
framework="laravel"
|
||||
echo "Detected Laravel framework (artisan file found)"
|
||||
else
|
||||
echo "No Laravel framework detected (no artisan file)"
|
||||
fi
|
||||
else
|
||||
echo "Framework mode set to generic"
|
||||
fi
|
||||
|
||||
printf 'framework=%s\n' "$framework" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Detect PHP Version
|
||||
id: detect-php-version
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: ${{ inputs.php-version }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for php..." >&2
|
||||
version=$(awk '/^php[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in .tool-versions: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for php..." >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "php:" | head -1 | \
|
||||
sed -n -E "s/.*php:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in Dockerfile: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for php..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*php:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in devcontainer: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .php-version file
|
||||
if [ -z "$detected_version" ] && [ -f .php-version ]; then
|
||||
echo "Checking .php-version..." >&2
|
||||
version=$(tr -d '\r' < .php-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in .php-version: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse composer.json
|
||||
if [ -z "$detected_version" ] && [ -f composer.json ]; then
|
||||
echo "Checking composer.json..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.require.php // empty' composer.json 2>/dev/null | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p')
|
||||
if [ -z "$version" ]; then
|
||||
version=$(jq -r '.config.platform.php // empty' composer.json 2>/dev/null | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p')
|
||||
fi
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found PHP version in composer.json: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping composer.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default PHP version: $detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected PHP version: $detected_version" >&2
|
||||
|
||||
- name: Setup PHP
|
||||
id: setup-php
|
||||
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # 2.36.0
|
||||
with:
|
||||
php-version: ${{ steps.detect-php-version.outputs.detected-version }}
|
||||
extensions: ${{ inputs.extensions }}
|
||||
coverage: ${{ inputs.coverage }}
|
||||
ini-values: memory_limit=1G, max_execution_time=600
|
||||
fail-fast: true
|
||||
|
||||
- name: Configure Composer
|
||||
id: composer-config
|
||||
shell: sh
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Configure Composer environment
|
||||
composer config --global process-timeout 600
|
||||
composer config --global allow-plugins true
|
||||
composer config --global github-oauth.github.com "$GITHUB_TOKEN"
|
||||
|
||||
# Verify Composer installation
|
||||
composer_full_version=$(composer --version | sed -n 's/.*Composer version \([0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*\).*/\1/p' || echo "")
|
||||
if [ -z "$composer_full_version" ]; then
|
||||
echo "::error::Failed to detect Composer version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Detected Composer version: $composer_full_version"
|
||||
printf 'version=%s\n' "$composer_full_version" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Log Composer configuration
|
||||
echo "Composer Configuration:"
|
||||
composer config --list
|
||||
|
||||
- name: Cache Composer packages
|
||||
id: composer-cache
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: |
|
||||
vendor
|
||||
~/.composer/cache
|
||||
key: ${{ runner.os }}-php-${{ steps.setup-php.outputs.php-version }}-composer-${{ hashFiles('composer.lock', 'composer.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-php-${{ steps.setup-php.outputs.php-version }}-composer-
|
||||
${{ runner.os }}-php-${{ steps.setup-php.outputs.php-version }}-
|
||||
${{ runner.os }}-php-
|
||||
|
||||
- name: Clear Composer Cache Before Install
|
||||
if: steps.composer-cache.outputs.cache-hit != 'true'
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
echo "Clearing Composer cache to ensure clean installation..."
|
||||
composer clear-cache
|
||||
|
||||
- name: Install Composer Dependencies
|
||||
uses: step-security/retry@e1d59ce1f574b32f0915e3a8df055cfe9f99be5d # v3.0.4
|
||||
with:
|
||||
timeout_minutes: 10
|
||||
max_attempts: ${{ inputs.max-retries }}
|
||||
retry_wait_seconds: 30
|
||||
command: composer install ${{ inputs.composer-args }}
|
||||
|
||||
- name: Verify Composer Installation
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Verify vendor directory
|
||||
if [ ! -d "vendor" ]; then
|
||||
echo "::error::vendor directory not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify autoloader
|
||||
if [ ! -f "vendor/autoload.php" ]; then
|
||||
echo "::error::autoload.php not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Composer installation verified"
|
||||
|
||||
- name: Laravel Setup - Copy .env
|
||||
if: steps.detect-framework.outputs.framework == 'laravel'
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
php -r "file_exists('.env') || copy('.env.example', '.env');"
|
||||
echo "✅ Laravel .env file configured"
|
||||
|
||||
- name: Laravel Setup - Generate Key
|
||||
if: steps.detect-framework.outputs.framework == 'laravel'
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
php artisan key:generate
|
||||
echo "✅ Laravel application key generated"
|
||||
|
||||
- name: Laravel Setup - Directory Permissions
|
||||
if: steps.detect-framework.outputs.framework == 'laravel'
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
chmod -R 777 storage bootstrap/cache
|
||||
echo "✅ Laravel directory permissions configured"
|
||||
|
||||
- name: Laravel Setup - Create Database
|
||||
if: steps.detect-framework.outputs.framework == 'laravel'
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
mkdir -p database
|
||||
touch database/database.sqlite
|
||||
echo "✅ Laravel SQLite database created"
|
||||
|
||||
- name: Run PHPUnit Tests
|
||||
id: test
|
||||
shell: bash
|
||||
run: |-
|
||||
set -euo pipefail
|
||||
shell: sh
|
||||
env:
|
||||
IS_LARAVEL: ${{ steps.detect-framework.outputs.framework == 'laravel' }}
|
||||
DB_CONNECTION: sqlite
|
||||
DB_DATABASE: database/database.sqlite
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
echo "Running PHPUnit tests..."
|
||||
|
||||
# Run PHPUnit and capture results
|
||||
phpunit_exit_code=0
|
||||
phpunit_output=$(vendor/bin/phpunit --verbose 2>&1) || phpunit_exit_code=$?
|
||||
if [ "$IS_LARAVEL" = "true" ] && [ -f "composer.json" ] && grep -q '"test"' composer.json; then
|
||||
echo "Running Laravel tests via composer test..."
|
||||
phpunit_output=$(composer test 2>&1) || phpunit_exit_code=$?
|
||||
elif [ -f "vendor/bin/phpunit" ]; then
|
||||
echo "Running PHPUnit directly..."
|
||||
phpunit_output=$(vendor/bin/phpunit 2>&1) || phpunit_exit_code=$?
|
||||
else
|
||||
echo "::error::PHPUnit not found. Ensure Composer dependencies are installed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "$phpunit_output"
|
||||
|
||||
# Parse test results from output
|
||||
tests_run=$(echo "$phpunit_output" | grep -E "Tests:|tests" | head -1 | grep -oE '[0-9]+' | head -1 || echo "0")
|
||||
tests_passed=$(echo "$phpunit_output" | grep -oE 'OK.*[0-9]+ tests' | grep -oE '[0-9]+' || echo "0")
|
||||
# Parse test results from output - handle various PHPUnit formats
|
||||
tests_run="0"
|
||||
tests_passed="0"
|
||||
|
||||
# Pattern 1: "OK (N test(s), M assertions)" - success case (handles both singular and plural)
|
||||
if echo "$phpunit_output" | grep -qE 'OK \([0-9]+ tests?,'; then
|
||||
tests_run=$(echo "$phpunit_output" | grep -oE 'OK \([0-9]+ tests?,' | grep -oE '[0-9]+' | head -1)
|
||||
tests_passed="$tests_run"
|
||||
# Pattern 2: "Tests: N" line - failure/error/skipped case
|
||||
elif echo "$phpunit_output" | grep -qE '^Tests:'; then
|
||||
tests_run=$(echo "$phpunit_output" | grep -E '^Tests:' | grep -oE '[0-9]+' | head -1)
|
||||
|
||||
# Calculate passed from failures and errors
|
||||
failures=$(echo "$phpunit_output" | grep -oE 'Failures: [0-9]+' | grep -oE '[0-9]+' | head -1 || echo "0")
|
||||
errors=$(echo "$phpunit_output" | grep -oE 'Errors: [0-9]+' | grep -oE '[0-9]+' | head -1 || echo "0")
|
||||
tests_passed=$((tests_run - failures - errors))
|
||||
|
||||
# Ensure non-negative
|
||||
if [ "$tests_passed" -lt 0 ]; then
|
||||
tests_passed="0"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Determine status
|
||||
if [ $phpunit_exit_code -eq 0 ]; then
|
||||
status="success"
|
||||
echo "✅ Tests passed: $tests_passed/$tests_run"
|
||||
else
|
||||
status="failure"
|
||||
echo "❌ Tests failed"
|
||||
fi
|
||||
|
||||
# Output results
|
||||
echo "tests_run=$tests_run" >> $GITHUB_OUTPUT
|
||||
echo "tests_passed=$tests_passed" >> $GITHUB_OUTPUT
|
||||
echo "status=$status" >> $GITHUB_OUTPUT
|
||||
echo "coverage_path=coverage.xml" >> $GITHUB_OUTPUT
|
||||
printf 'tests_run=%s\n' "$tests_run" >> "$GITHUB_OUTPUT"
|
||||
printf 'tests_passed=%s\n' "$tests_passed" >> "$GITHUB_OUTPUT"
|
||||
printf 'status=%s\n' "$status" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Exit with original code to maintain test failure behavior
|
||||
exit $phpunit_exit_code
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for php-tests action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 100% (3/3 inputs)
|
||||
# Coverage: 89% (8/9 inputs)
|
||||
#
|
||||
# This file defines validation rules for the php-tests GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -11,29 +11,40 @@
|
||||
|
||||
schema_version: '1.0'
|
||||
action: php-tests
|
||||
description: Run PHPUnit tests on the repository
|
||||
description: Run PHPUnit tests with optional Laravel setup and Composer dependency management
|
||||
generator_version: 1.0.0
|
||||
required_inputs: []
|
||||
optional_inputs:
|
||||
- composer-args
|
||||
- coverage
|
||||
- email
|
||||
- extensions
|
||||
- framework
|
||||
- max-retries
|
||||
- php-version
|
||||
- token
|
||||
- username
|
||||
conventions:
|
||||
coverage: coverage_driver
|
||||
email: email
|
||||
extensions: php_extensions
|
||||
framework: framework_mode
|
||||
max-retries: numeric_range_1_10
|
||||
php-version: semantic_version
|
||||
token: github_token
|
||||
username: username
|
||||
overrides: {}
|
||||
statistics:
|
||||
total_inputs: 3
|
||||
validated_inputs: 3
|
||||
total_inputs: 9
|
||||
validated_inputs: 8
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
coverage_percentage: 89
|
||||
validation_coverage: 89
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
has_required_inputs: false
|
||||
has_token_validation: true
|
||||
has_version_validation: false
|
||||
has_version_validation: true
|
||||
has_file_validation: false
|
||||
has_security_validation: true
|
||||
|
||||
@@ -40,7 +40,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: pr-lint
|
||||
token: ${{ inputs.token }}
|
||||
@@ -54,13 +54,9 @@ runs:
|
||||
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
ref: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref_name }}
|
||||
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
# If you use VALIDATE_ALL_CODEBASE = true, you can remove this line to
|
||||
# improve performance
|
||||
fetch-depth: 0
|
||||
|
||||
# ╭──────────────────────────────────────────────────────────╮
|
||||
# │ Install packages for linting │
|
||||
# ╰──────────────────────────────────────────────────────────╯
|
||||
@@ -74,41 +70,129 @@ runs:
|
||||
|
||||
if [ -f package.json ]; then
|
||||
printf '%s\n' "found=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check if packageManager field is set (for corepack)
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
has_package_manager=$(jq -r '.packageManager // empty' package.json 2>/dev/null || printf '')
|
||||
if [ -n "$has_package_manager" ]; then
|
||||
printf '%s\n' "has-package-manager=true" >> "$GITHUB_OUTPUT"
|
||||
printf 'Found packageManager field: %s\n' "$has_package_manager"
|
||||
else
|
||||
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
# Fallback: check with grep if jq not available
|
||||
# Use robust pattern to verify non-empty value
|
||||
if grep -q '"packageManager"[[:space:]]*:[[:space:]]*"[^"]\+"' package.json 2>/dev/null; then
|
||||
printf '%s\n' "has-package-manager=true" >> "$GITHUB_OUTPUT"
|
||||
printf '%s\n' "Found packageManager field in package.json"
|
||||
else
|
||||
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# Explicitly set has-package-manager to false when package.json doesn't exist
|
||||
printf '%s\n' "has-package-manager=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Setup Node.js environment
|
||||
- name: Detect Package Manager
|
||||
if: steps.detect-node.outputs.found == 'true'
|
||||
id: node-setup
|
||||
uses: ivuorinen/actions/node-setup@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
id: detect-pm
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Detect package manager from lockfiles
|
||||
if [ -f bun.lockb ]; then
|
||||
package_manager="bun"
|
||||
elif [ -f pnpm-lock.yaml ]; then
|
||||
package_manager="pnpm"
|
||||
elif [ -f yarn.lock ]; then
|
||||
package_manager="yarn"
|
||||
else
|
||||
package_manager="npm"
|
||||
fi
|
||||
|
||||
printf 'package-manager=%s\n' "$package_manager" >> "$GITHUB_OUTPUT"
|
||||
printf 'Detected package manager: %s\n' "$package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
if: steps.detect-node.outputs.found == 'true'
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
if: steps.detect-node.outputs.found == 'true' && steps.detect-node.outputs.has-package-manager == 'true'
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
corepack enable
|
||||
printf '%s\n' "Corepack enabled - package manager will be installed automatically from package.json"
|
||||
|
||||
- name: Install Package Manager (Fallback)
|
||||
if: steps.detect-node.outputs.found == 'true' && steps.detect-node.outputs.has-package-manager == 'false'
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
printf 'No packageManager field found, using detected package manager: %s\n' "$PACKAGE_MANAGER"
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
pnpm)
|
||||
corepack enable
|
||||
corepack prepare pnpm@latest --activate
|
||||
;;
|
||||
yarn)
|
||||
corepack enable
|
||||
corepack prepare yarn@stable --activate
|
||||
;;
|
||||
bun|npm)
|
||||
# Bun installed separately, npm built-in
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Setup Bun
|
||||
if: steps.detect-node.outputs.found == 'true' && steps.detect-pm.outputs.package-manager == 'bun'
|
||||
uses: oven-sh/setup-bun@b7a1c7ccf290d58743029c4f6903da283811b979 # v2.1.0
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
if: steps.detect-node.outputs.found == 'true'
|
||||
id: node-cache
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
type: 'npm'
|
||||
paths: 'node_modules'
|
||||
key-files: 'package-lock.json,yarn.lock,pnpm-lock.yaml,bun.lockb'
|
||||
key-prefix: 'pr-lint-${{ steps.node-setup.outputs.package-manager }}'
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-pr-lint-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pr-lint-${{ steps.detect-pm.outputs.package-manager }}-
|
||||
|
||||
- name: Install Node Dependencies
|
||||
if: steps.detect-node.outputs.found == 'true' && steps.node-cache.outputs.cache-hit != 'true'
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
echo "Installing dependencies using $PACKAGE_MANAGER..."
|
||||
printf 'Installing dependencies using %s...\n' "$PACKAGE_MANAGER"
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
"pnpm")
|
||||
pnpm install --frozen-lockfile
|
||||
;;
|
||||
"yarn")
|
||||
if [ -f ".yarnrc.yml" ]; then
|
||||
# Detect Yarn version by checking actual version output
|
||||
# Yarn 2+ (Berry) uses --immutable, Yarn 1.x (Classic) uses --frozen-lockfile
|
||||
yarn_version=$(yarn --version 2>/dev/null || printf '1.0.0')
|
||||
if printf '%s' "$yarn_version" | grep -q '^[2-9]'; then
|
||||
# Yarn 2+ (Berry) - use --immutable
|
||||
yarn install --immutable
|
||||
else
|
||||
# Yarn 1.x (Classic) - use --frozen-lockfile
|
||||
yarn install --frozen-lockfile
|
||||
fi
|
||||
;;
|
||||
@@ -120,7 +204,7 @@ runs:
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "✅ Dependencies installed successfully"
|
||||
printf '✅ Dependencies installed successfully\n'
|
||||
|
||||
# PHP tests if composer.json exists
|
||||
- name: Detect composer.json
|
||||
@@ -136,19 +220,128 @@ runs:
|
||||
- name: Detect PHP Version
|
||||
if: steps.detect-php.outputs.found == 'true'
|
||||
id: php-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'php'
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: '8.4'
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
printf 'Checking .tool-versions for php...\n' >&2
|
||||
version=$(awk '/^php[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found PHP version in .tool-versions: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
printf 'Checking Dockerfile for php...\n' >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "php:" | head -1 | \
|
||||
sed -n -E "s/.*php:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found PHP version in Dockerfile: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
printf 'Checking devcontainer.json for php...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*php:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found PHP version in devcontainer: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
printf 'jq not found; skipping devcontainer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .php-version file
|
||||
if [ -z "$detected_version" ] && [ -f .php-version ]; then
|
||||
printf 'Checking .php-version...\n' >&2
|
||||
version=$(tr -d '\r' < .php-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found PHP version in .php-version: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse composer.json
|
||||
if [ -z "$detected_version" ] && [ -f composer.json ]; then
|
||||
printf 'Checking composer.json...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.require.php // empty' composer.json 2>/dev/null | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p')
|
||||
if [ -z "$version" ]; then
|
||||
version=$(jq -r '.config.platform.php // empty' composer.json 2>/dev/null | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p')
|
||||
fi
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found PHP version in composer.json: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
printf 'jq not found; skipping composer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
printf 'Using default PHP version: %s\n' "$detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
printf 'Final detected PHP version: %s\n' "$detected_version" >&2
|
||||
|
||||
- name: Setup PHP
|
||||
if: steps.detect-php.outputs.found == 'true'
|
||||
uses: shivammathur/setup-php@bf6b4fbd49ca58e4608c9c89fba0b8d90bd2a39f # 2.35.5
|
||||
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # 2.36.0
|
||||
with:
|
||||
php-version: ${{ steps.php-version.outputs.detected-version }}
|
||||
tools: composer
|
||||
coverage: none
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Setup problem matchers for PHP
|
||||
if: steps.detect-php.outputs.found == 'true'
|
||||
@@ -158,7 +351,8 @@ runs:
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
echo "::add-matcher::$RUNNER_TOOL_CACHE/php.json"
|
||||
matcher_path=$(printf '%s' "$RUNNER_TOOL_CACHE/php.json" | tr -d '\n\r')
|
||||
printf '%s\n' "::add-matcher::$matcher_path"
|
||||
|
||||
- name: Install PHP dependencies
|
||||
if: steps.detect-php.outputs.found == 'true'
|
||||
@@ -182,13 +376,117 @@ runs:
|
||||
- name: Detect Python Version
|
||||
if: steps.detect-python.outputs.found == 'true'
|
||||
id: python-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'python'
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: '3.14'
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
printf 'Checking .tool-versions for python...\n' >&2
|
||||
version=$(awk '/^python[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Python version in .tool-versions: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
printf 'Checking Dockerfile for python...\n' >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "python:" | head -1 | \
|
||||
sed -n -E "s/.*python:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Python version in Dockerfile: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
printf 'Checking devcontainer.json for python...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*python:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Python version in devcontainer: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
printf 'jq not found; skipping devcontainer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .python-version file
|
||||
if [ -z "$detected_version" ] && [ -f .python-version ]; then
|
||||
printf 'Checking .python-version...\n' >&2
|
||||
version=$(tr -d '\r' < .python-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Python version in .python-version: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse pyproject.toml
|
||||
if [ -z "$detected_version" ] && [ -f pyproject.toml ]; then
|
||||
printf 'Checking pyproject.toml...\n' >&2
|
||||
if grep -q '^\[project\]' pyproject.toml; then
|
||||
version=$(grep -A 20 '^\[project\]' pyproject.toml | grep -E '^\s*requires-python[[:space:]]*=' | sed -n 's/[^0-9]*\([0-9]\+\.[0-9]\+\(\.[0-9]\+\)\?\).*/\1/p' | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Python version in pyproject.toml: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
printf 'Using default Python version: %s\n' "$detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
printf 'Final detected Python version: %s\n' "$detected_version" >&2
|
||||
|
||||
- name: Setup Python
|
||||
if: steps.detect-python.outputs.found == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'pip'
|
||||
@@ -215,13 +513,115 @@ runs:
|
||||
- name: Detect Go Version
|
||||
if: steps.detect-go.outputs.found == 'true'
|
||||
id: go-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'go'
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: '1.25'
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
printf 'Checking .tool-versions for golang...\n' >&2
|
||||
version=$(awk '/^golang[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Go version in .tool-versions: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
printf 'Checking Dockerfile for golang...\n' >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "golang:" | head -1 | \
|
||||
sed -n -E "s/.*golang:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Go version in Dockerfile: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
printf 'Checking devcontainer.json for golang...\n' >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*golang:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Go version in devcontainer: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
printf 'jq not found; skipping devcontainer.json parsing\n' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .go-version file
|
||||
if [ -z "$detected_version" ] && [ -f .go-version ]; then
|
||||
printf 'Checking .go-version...\n' >&2
|
||||
version=$(tr -d '\r' < .go-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Go version in .go-version: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse go.mod
|
||||
if [ -z "$detected_version" ] && [ -f go.mod ]; then
|
||||
printf 'Checking go.mod...\n' >&2
|
||||
version=$(grep -E '^go[[:space:]]+[0-9]' go.mod | awk '{print $2}' | head -1 || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
printf 'Found Go version in go.mod: %s\n' "$version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
printf 'Using default Go version: %s\n' "$detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
printf 'Final detected Go version: %s\n' "$detected_version" >&2
|
||||
|
||||
- name: Setup Go
|
||||
if: steps.detect-go.outputs.found == 'true'
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
go-version: ${{ steps.go-version.outputs.detected-version }}
|
||||
cache: true
|
||||
@@ -232,7 +632,7 @@ runs:
|
||||
- name: MegaLinter
|
||||
# You can override MegaLinter flavor used to have faster performances
|
||||
# More info at https://megalinter.io/latest/flavors/
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@62c799d895af9bcbca5eacfebca29d527f125a57 # v9.1.0
|
||||
uses: oxsecurity/megalinter/flavors/cupcake@42bb470545e359597e7f12156947c436e4e3fb9a # v9.3.0
|
||||
id: ml
|
||||
|
||||
# All available variables are described in documentation
|
||||
@@ -250,11 +650,7 @@ runs:
|
||||
# github.event_name == 'push' &&
|
||||
# contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)
|
||||
# }}
|
||||
VALIDATE_ALL_CODEBASE: >-
|
||||
${{
|
||||
github.event_name == 'push' &&
|
||||
contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)
|
||||
}}
|
||||
VALIDATE_ALL_CODEBASE: false
|
||||
|
||||
GITHUB_TOKEN: ${{ inputs.token || github.token }}
|
||||
|
||||
@@ -262,7 +658,7 @@ runs:
|
||||
#
|
||||
# When active, APPLY_FIXES must also be defined as environment variable
|
||||
# (in .github/workflows/mega-linter.yml or other CI tool)
|
||||
APPLY_FIXES: all
|
||||
APPLY_FIXES: none
|
||||
|
||||
# Decide which event triggers application of fixes in a commit or a PR
|
||||
# (pull_request, push, all)
|
||||
@@ -278,124 +674,13 @@ runs:
|
||||
# Uncomment to disable copy-paste and spell checks
|
||||
DISABLE: COPYPASTE,SPELL
|
||||
|
||||
# Export env vars to make them available for subsequent expressions
|
||||
- name: Export Apply Fixes Variables
|
||||
shell: sh
|
||||
run: |
|
||||
echo "APPLY_FIXES_EVENT=pull_request" >> "$GITHUB_ENV"
|
||||
echo "APPLY_FIXES_MODE=commit" >> "$GITHUB_ENV"
|
||||
|
||||
# Upload MegaLinter artifacts
|
||||
- name: Archive production artifacts
|
||||
if: success() || failure()
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: MegaLinter reports
|
||||
include-hidden-files: 'true'
|
||||
path: |
|
||||
megalinter-reports
|
||||
mega-linter.log
|
||||
|
||||
# Set APPLY_FIXES_IF var for use in future steps
|
||||
- name: Set APPLY_FIXES_IF var
|
||||
shell: sh
|
||||
env:
|
||||
APPLY_FIXES_CONDITION: >-
|
||||
${{
|
||||
steps.ml.outputs.has_updated_sources == 1 &&
|
||||
(env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) &&
|
||||
(github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository)
|
||||
}}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Sanitize by removing newlines to prevent env var injection
|
||||
sanitized_condition="$(echo "$APPLY_FIXES_CONDITION" | tr -d '\n\r')"
|
||||
printf 'APPLY_FIXES_IF=%s\n' "$sanitized_condition" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set APPLY_FIXES_IF_* vars for use in future steps
|
||||
- name: Set APPLY_FIXES_IF_* vars
|
||||
shell: sh
|
||||
env:
|
||||
APPLY_FIXES_IF_PR_CONDITION: ${{ env.APPLY_FIXES_IF == 'true' && env.APPLY_FIXES_MODE == 'pull_request' }}
|
||||
APPLY_FIXES_IF_COMMIT_CONDITION: ${{ env.APPLY_FIXES_IF == 'true' && env.APPLY_FIXES_MODE == 'commit' && (!contains(fromJSON('["refs/heads/main", "refs/heads/master"]'), github.ref)) }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Sanitize by removing newlines to prevent env var injection
|
||||
sanitized_pr="$(echo "$APPLY_FIXES_IF_PR_CONDITION" | tr -d '\n\r')"
|
||||
sanitized_commit="$(echo "$APPLY_FIXES_IF_COMMIT_CONDITION" | tr -d '\n\r')"
|
||||
|
||||
printf 'APPLY_FIXES_IF_PR=%s\n' "$sanitized_pr" >> "${GITHUB_ENV}"
|
||||
printf 'APPLY_FIXES_IF_COMMIT=%s\n' "$sanitized_commit" >> "${GITHUB_ENV}"
|
||||
|
||||
# Create pull request if applicable
|
||||
# (for now works only on PR from same repository, not from forks)
|
||||
- name: Create Pull Request with applied fixes
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
id: cpr
|
||||
if: env.APPLY_FIXES_IF_PR == 'true'
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
commit-message: 'style: apply linter fixes'
|
||||
title: 'style: apply linter fixes'
|
||||
labels: bot
|
||||
|
||||
- name: Create PR output
|
||||
if: env.APPLY_FIXES_IF_PR == 'true'
|
||||
shell: sh
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
PR_URL: ${{ steps.cpr.outputs.pull-request-url }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
echo "PR Number - $PR_NUMBER"
|
||||
echo "PR URL - $PR_URL"
|
||||
|
||||
# Push new commit if applicable
|
||||
# (for now works only on PR from same repository, not from forks)
|
||||
- name: Prepare commit
|
||||
if: env.APPLY_FIXES_IF_COMMIT == 'true'
|
||||
shell: sh
|
||||
env:
|
||||
BRANCH_REF: >-
|
||||
${{
|
||||
github.event.pull_request.head.ref ||
|
||||
github.head_ref ||
|
||||
github.ref_name
|
||||
}}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Fix .git directory ownership after MegaLinter container execution
|
||||
sudo chown -Rc "$UID" .git/
|
||||
|
||||
# Ensure we're on the correct branch (not in detached HEAD state)
|
||||
# This is necessary because MegaLinter may leave the repo in a detached HEAD state
|
||||
current_branch=$(git rev-parse --abbrev-ref HEAD)
|
||||
if [ "$current_branch" = "HEAD" ]; then
|
||||
echo "Repository is in detached HEAD state, checking out $BRANCH_REF"
|
||||
# Validate branch reference to prevent command injection
|
||||
if ! git check-ref-format --branch "$BRANCH_REF"; then
|
||||
echo "::error::Invalid branch reference format: $BRANCH_REF"
|
||||
exit 1
|
||||
fi
|
||||
git checkout "$BRANCH_REF"
|
||||
else
|
||||
echo "Repository is on branch: $current_branch"
|
||||
fi
|
||||
|
||||
- name: Commit and push applied linter fixes
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
if: env.APPLY_FIXES_IF_COMMIT == 'true'
|
||||
with:
|
||||
branch: >-
|
||||
${{
|
||||
github.event.pull_request.head.ref ||
|
||||
github.head_ref ||
|
||||
github.ref
|
||||
}}
|
||||
commit_message: 'style: apply linter fixes'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
commit_user_email: ${{ inputs.email }}
|
||||
|
||||
@@ -34,74 +34,45 @@ class CustomValidator(BaseValidator):
|
||||
|
||||
# Validate pre-commit-config if provided
|
||||
if "pre-commit-config" in inputs:
|
||||
result = self.file_validator.validate_file_path(
|
||||
inputs["pre-commit-config"], "pre-commit-config"
|
||||
valid &= self.validate_with(
|
||||
self.file_validator,
|
||||
"validate_file_path",
|
||||
inputs["pre-commit-config"],
|
||||
"pre-commit-config",
|
||||
)
|
||||
for error in self.file_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.file_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate base-branch if provided (just check for injection)
|
||||
if inputs.get("base-branch"):
|
||||
# Check for dangerous characters that could cause shell injection
|
||||
result = self.security_validator.validate_no_injection(
|
||||
inputs["base-branch"], "base-branch"
|
||||
valid &= self.validate_with(
|
||||
self.security_validator,
|
||||
"validate_no_injection",
|
||||
inputs["base-branch"],
|
||||
"base-branch",
|
||||
)
|
||||
for error in self.security_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.security_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
# Validate token if provided
|
||||
if inputs.get("token"):
|
||||
result = self.token_validator.validate_github_token(inputs["token"])
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
valid &= self.validate_with(
|
||||
self.token_validator, "validate_github_token", inputs["token"]
|
||||
)
|
||||
|
||||
# Validate commit_user if provided (allow spaces for Git usernames)
|
||||
# Check both underscore and hyphen versions since inputs can have either
|
||||
commit_user_key = (
|
||||
"commit_user"
|
||||
if "commit_user" in inputs
|
||||
else "commit-user"
|
||||
if "commit-user" in inputs
|
||||
else None
|
||||
)
|
||||
commit_user_key = self.get_key_variant(inputs, "commit_user", "commit-user")
|
||||
if commit_user_key and inputs[commit_user_key]:
|
||||
# Check for dangerous injection patterns
|
||||
value = inputs[commit_user_key]
|
||||
if any(char in value for char in [";", "&", "|", "`", "$", "(", ")", "\n", "\r"]):
|
||||
if any(c in value for c in [";", "&", "|", "`", "$", "(", ")", "\n", "\r"]):
|
||||
self.add_error(f"{commit_user_key}: Contains potentially dangerous characters")
|
||||
valid = False
|
||||
|
||||
# Validate commit_email if provided
|
||||
# Check both underscore and hyphen versions
|
||||
commit_email_key = (
|
||||
"commit_email"
|
||||
if "commit_email" in inputs
|
||||
else "commit-email"
|
||||
if "commit-email" in inputs
|
||||
else None
|
||||
)
|
||||
commit_email_key = self.get_key_variant(inputs, "commit_email", "commit-email")
|
||||
if commit_email_key and inputs[commit_email_key]:
|
||||
result = self.network_validator.validate_email(
|
||||
inputs[commit_email_key], commit_email_key
|
||||
valid &= self.validate_with(
|
||||
self.network_validator,
|
||||
"validate_email",
|
||||
inputs[commit_email_key],
|
||||
commit_email_key,
|
||||
)
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ runs:
|
||||
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'pre-commit'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -83,7 +83,7 @@ runs:
|
||||
- name: Push pre-commit fixes
|
||||
id: push-fixes
|
||||
if: always() # Push changes even when pre-commit fails
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style(pre-commit): autofix'
|
||||
commit_user_name: ${{ inputs.commit_user }}
|
||||
|
||||
@@ -91,7 +91,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
MODE: ${{ inputs.mode }}
|
||||
WORKING_DIRECTORY: ${{ inputs.working-directory }}
|
||||
@@ -107,7 +107,7 @@ runs:
|
||||
EMAIL: ${{ inputs.email }}
|
||||
USERNAME: ${{ inputs.username }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
# Validate mode
|
||||
case "$MODE" in
|
||||
@@ -127,38 +127,52 @@ runs:
|
||||
fi
|
||||
|
||||
# Validate working directory path security
|
||||
if [[ "$WORKING_DIRECTORY" == *".."* ]]; then
|
||||
echo "::error::Invalid working directory path: '$WORKING_DIRECTORY'. Path traversal not allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$WORKING_DIRECTORY" in
|
||||
*..*)
|
||||
echo "::error::Invalid working directory path: '$WORKING_DIRECTORY'. Path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Validate Prettier version format
|
||||
if [[ -n "$PRETTIER_VERSION" ]] && [[ "$PRETTIER_VERSION" != "latest" ]]; then
|
||||
if ! [[ "$PRETTIER_VERSION" =~ ^[0-9]+(\.[0-9]+(\.[0-9]+)?)?(-[a-zA-Z0-9.-]+)?$ ]]; then
|
||||
echo "::error::Invalid prettier-version format: '$PRETTIER_VERSION'. Expected format: X.Y.Z or 'latest'"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$PRETTIER_VERSION" ] && [ "$PRETTIER_VERSION" != "latest" ]; then
|
||||
case "$PRETTIER_VERSION" in
|
||||
[0-9]*.[0-9]*|[0-9]*.[0-9]*.[0-9]*|[0-9]*.[0-9]*.[0-9]*-*)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid prettier-version format: '$PRETTIER_VERSION'. Expected format: X.Y.Z or 'latest'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate config file path
|
||||
if [[ "$CONFIG_FILE" != ".prettierrc" ]] && [[ "$CONFIG_FILE" == *".."* ]]; then
|
||||
echo "::error::Invalid config file path: '$CONFIG_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
if [ "$CONFIG_FILE" != ".prettierrc" ]; then
|
||||
case "$CONFIG_FILE" in
|
||||
*..*)
|
||||
echo "::error::Invalid config file path: '$CONFIG_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate ignore file path
|
||||
if [[ "$IGNORE_FILE" != ".prettierignore" ]] && [[ "$IGNORE_FILE" == *".."* ]]; then
|
||||
echo "::error::Invalid ignore file path: '$IGNORE_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
if [ "$IGNORE_FILE" != ".prettierignore" ]; then
|
||||
case "$IGNORE_FILE" in
|
||||
*..*)
|
||||
echo "::error::Invalid ignore file path: '$IGNORE_FILE'. Path traversal not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Validate boolean inputs
|
||||
validate_boolean() {
|
||||
local value="$1"
|
||||
local name="$2"
|
||||
value="$1"
|
||||
name="$2"
|
||||
|
||||
case "${value,,}" in
|
||||
true|false)
|
||||
case "$value" in
|
||||
true|True|TRUE|false|False|FALSE)
|
||||
;;
|
||||
*)
|
||||
echo "::error::Invalid boolean value for $name: '$value'. Expected: true or false"
|
||||
@@ -181,17 +195,27 @@ runs:
|
||||
esac
|
||||
|
||||
# Validate max retries
|
||||
if ! [[ "$MAX_RETRIES" =~ ^[0-9]+$ ]] || [ "$MAX_RETRIES" -le 0 ] || [ "$MAX_RETRIES" -gt 10 ]; then
|
||||
case "$MAX_RETRIES" in
|
||||
''|*[!0-9]*)
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$MAX_RETRIES" -le 0 ] || [ "$MAX_RETRIES" -gt 10 ]; then
|
||||
echo "::error::Invalid max-retries: '$MAX_RETRIES'. Must be a positive integer between 1 and 10"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate email and username for fix mode
|
||||
if [ "$MODE" = "fix" ]; then
|
||||
if [[ "$EMAIL" != *"@"* ]] || [[ "$EMAIL" != *"."* ]]; then
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
exit 1
|
||||
fi
|
||||
case "$EMAIL" in
|
||||
*@*.*) ;;
|
||||
*)
|
||||
echo "::error::Invalid email format: '$EMAIL'. Expected valid email address"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
username="$USERNAME"
|
||||
|
||||
@@ -200,20 +224,26 @@ runs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ "$username" =~ ^[a-zA-Z0-9-]+$ ]]; then
|
||||
echo "::error::Invalid username characters in '$username'. Only letters, digits, and hyphens allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
*[!a-zA-Z0-9-]*)
|
||||
echo "::error::Invalid username characters in '$username'. Only letters, digits, and hyphens allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$username" == -* ]] || [[ "$username" == *- ]]; then
|
||||
echo "::error::Invalid username '$username'. Cannot start or end with hyphen"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
-*|*-)
|
||||
echo "::error::Invalid username '$username'. Cannot start or end with hyphen"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$username" == *--* ]]; then
|
||||
echo "::error::Invalid username '$username'. Consecutive hyphens not allowed"
|
||||
exit 1
|
||||
fi
|
||||
case "$username" in
|
||||
*--*)
|
||||
echo "::error::Invalid username '$username'. Consecutive hyphens not allowed"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
echo "Input validation completed successfully"
|
||||
@@ -223,26 +253,79 @@ runs:
|
||||
with:
|
||||
token: ${{ inputs.token || github.token }}
|
||||
|
||||
- name: Node Setup
|
||||
id: node-setup
|
||||
uses: ivuorinen/actions/node-setup@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
- name: Detect Package Manager
|
||||
id: detect-pm
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Detect package manager from lockfiles
|
||||
if [ -f bun.lockb ]; then
|
||||
package_manager="bun"
|
||||
elif [ -f pnpm-lock.yaml ]; then
|
||||
package_manager="pnpm"
|
||||
elif [ -f yarn.lock ]; then
|
||||
package_manager="yarn"
|
||||
else
|
||||
package_manager="npm"
|
||||
fi
|
||||
|
||||
printf 'package-manager=%s\n' "$package_manager" >> "$GITHUB_OUTPUT"
|
||||
echo "Detected package manager: $package_manager"
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24'
|
||||
|
||||
- name: Enable Corepack
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
corepack enable
|
||||
|
||||
- name: Install Package Manager
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
case "$PACKAGE_MANAGER" in
|
||||
pnpm)
|
||||
corepack prepare pnpm@latest --activate
|
||||
;;
|
||||
yarn)
|
||||
corepack prepare yarn@stable --activate
|
||||
;;
|
||||
bun|npm)
|
||||
# Bun installed separately, npm built-in
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Setup Bun
|
||||
if: steps.detect-pm.outputs.package-manager == 'bun'
|
||||
uses: oven-sh/setup-bun@b7a1c7ccf290d58743029c4f6903da283811b979 # v2.1.0
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Cache Node Dependencies
|
||||
id: cache
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
type: 'npm'
|
||||
paths: 'node_modules'
|
||||
key-files: 'package-lock.json,yarn.lock,pnpm-lock.yaml,bun.lockb'
|
||||
key-prefix: 'prettier-lint-${{ inputs.mode }}-${{ steps.node-setup.outputs.package-manager }}'
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-prettier-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-${{ hashFiles('package-lock.json', 'yarn.lock', 'pnpm-lock.yaml', 'bun.lockb') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-prettier-lint-${{ inputs.mode }}-${{ steps.detect-pm.outputs.package-manager }}-
|
||||
${{ runner.os }}-prettier-lint-${{ inputs.mode }}-
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Installing dependencies using $PACKAGE_MANAGER..."
|
||||
|
||||
@@ -269,12 +352,12 @@ runs:
|
||||
|
||||
- name: Install Prettier Plugins
|
||||
if: inputs.plugins != ''
|
||||
shell: bash
|
||||
shell: sh
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
PLUGINS: ${{ inputs.plugins }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Installing Prettier plugins: $PLUGINS"
|
||||
|
||||
@@ -301,16 +384,16 @@ runs:
|
||||
- name: Run Prettier Check
|
||||
if: inputs.mode == 'check'
|
||||
id: check
|
||||
shell: bash
|
||||
shell: sh
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
CONFIG_FILE: ${{ inputs.config-file }}
|
||||
CACHE: ${{ inputs.cache }}
|
||||
FAIL_ON_ERROR: ${{ inputs.fail-on-error }}
|
||||
FILE_PATTERN: ${{ inputs.file-pattern }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Running Prettier check mode..."
|
||||
|
||||
@@ -358,13 +441,13 @@ runs:
|
||||
- name: Run Prettier Fix
|
||||
if: inputs.mode == 'fix'
|
||||
id: fix
|
||||
shell: bash
|
||||
shell: sh
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
env:
|
||||
PACKAGE_MANAGER: ${{ steps.node-setup.outputs.package-manager }}
|
||||
PACKAGE_MANAGER: ${{ steps.detect-pm.outputs.package-manager }}
|
||||
FILE_PATTERN: ${{ inputs.file-pattern }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
set -eu
|
||||
|
||||
echo "Running Prettier fix mode..."
|
||||
|
||||
@@ -385,7 +468,7 @@ runs:
|
||||
|
||||
- name: Commit and Push Fixes
|
||||
if: inputs.mode == 'fix' && success()
|
||||
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: autofix Prettier formatting'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Validation rules for prettier-lint action
|
||||
# Generated by update-validators.py v1.0.0 - DO NOT EDIT MANUALLY
|
||||
# Schema version: 1.0
|
||||
# Coverage: 86% (12/14 inputs)
|
||||
# Coverage: 100% (14/14 inputs)
|
||||
#
|
||||
# This file defines validation rules for the prettier-lint GitHub Action.
|
||||
# Rules are automatically applied by validate-inputs action when this
|
||||
@@ -34,21 +34,24 @@ conventions:
|
||||
config-file: file_path
|
||||
email: email
|
||||
fail-on-error: boolean
|
||||
file-pattern: path_list
|
||||
ignore-file: file_path
|
||||
max-retries: numeric_range_1_10
|
||||
mode: mode_enum
|
||||
plugins: linter_list
|
||||
prettier-version: semantic_version
|
||||
report-format: report_format
|
||||
token: github_token
|
||||
username: username
|
||||
working-directory: file_path
|
||||
overrides: {}
|
||||
overrides:
|
||||
mode: mode_enum
|
||||
statistics:
|
||||
total_inputs: 14
|
||||
validated_inputs: 12
|
||||
validated_inputs: 14
|
||||
skipped_inputs: 0
|
||||
coverage_percentage: 86
|
||||
validation_coverage: 86
|
||||
coverage_percentage: 100
|
||||
validation_coverage: 100
|
||||
auto_detected: true
|
||||
manual_review_required: false
|
||||
quality_indicators:
|
||||
|
||||
@@ -31,68 +31,42 @@ class CustomValidator(BaseValidator):
|
||||
valid = True
|
||||
|
||||
# Validate python-version if provided
|
||||
if "python-version" in inputs or "python_version" in inputs:
|
||||
key = "python-version" if "python-version" in inputs else "python_version"
|
||||
value = inputs[key]
|
||||
|
||||
# Empty string should fail validation
|
||||
if value == "":
|
||||
version_key = self.get_key_variant(inputs, "python-version", "python_version")
|
||||
if version_key:
|
||||
value = inputs[version_key]
|
||||
if not value:
|
||||
self.add_error("Python version cannot be empty")
|
||||
valid = False
|
||||
elif value:
|
||||
result = self.version_validator.validate_python_version(value, key)
|
||||
|
||||
# Propagate errors from the version validator
|
||||
for error in self.version_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
|
||||
self.version_validator.clear_errors()
|
||||
|
||||
if not result:
|
||||
valid = False
|
||||
else:
|
||||
valid &= self.validate_with(
|
||||
self.version_validator, "validate_python_version", value, version_key
|
||||
)
|
||||
|
||||
# Validate username
|
||||
if "username" in inputs:
|
||||
if inputs.get("username"):
|
||||
username = inputs["username"]
|
||||
if username:
|
||||
# Check username length (GitHub usernames are max 39 characters)
|
||||
if len(username) > 39:
|
||||
self.add_error("Username is too long (max 39 characters)")
|
||||
valid = False
|
||||
# Check for command injection patterns
|
||||
if ";" in username or "`" in username or "$" in username:
|
||||
self.add_error("Username contains potentially dangerous characters")
|
||||
valid = False
|
||||
if len(username) > 39:
|
||||
self.add_error("Username is too long (max 39 characters)")
|
||||
valid = False
|
||||
if ";" in username or "`" in username or "$" in username:
|
||||
self.add_error("Username contains potentially dangerous characters")
|
||||
valid = False
|
||||
|
||||
# Validate email
|
||||
if "email" in inputs:
|
||||
email = inputs["email"]
|
||||
if email:
|
||||
result = self.network_validator.validate_email(email, "email")
|
||||
for error in self.network_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.network_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
if inputs.get("email"):
|
||||
valid &= self.validate_with(
|
||||
self.network_validator, "validate_email", inputs["email"], "email"
|
||||
)
|
||||
|
||||
# Validate token
|
||||
if "token" in inputs:
|
||||
if inputs.get("token"):
|
||||
token = inputs["token"]
|
||||
if token:
|
||||
# Check for variable expansion (but allow GitHub Actions expressions)
|
||||
if "${" in token and not token.startswith("${{ ") and not token.endswith(" }}"):
|
||||
self.add_error("Token contains potentially dangerous variable expansion")
|
||||
valid = False
|
||||
else:
|
||||
result = self.token_validator.validate_github_token(token)
|
||||
for error in self.token_validator.errors:
|
||||
if error not in self.errors:
|
||||
self.add_error(error)
|
||||
self.token_validator.clear_errors()
|
||||
if not result:
|
||||
valid = False
|
||||
# Check for variable expansion (but allow GitHub Actions expressions)
|
||||
if "${" in token and not token.startswith("${{ ") and not token.endswith(" }}"):
|
||||
self.add_error("Token contains potentially dangerous variable expansion")
|
||||
valid = False
|
||||
else:
|
||||
valid &= self.validate_with(self.token_validator, "validate_github_token", token)
|
||||
|
||||
return valid
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ runs:
|
||||
steps:
|
||||
- name: Validate Inputs
|
||||
id: validate
|
||||
uses: ivuorinen/actions/validate-inputs@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
uses: ivuorinen/actions/validate-inputs@5cc7373a22402ee8985376bc713f00e09b5b2edb
|
||||
with:
|
||||
action-type: 'python-lint-fix'
|
||||
token: ${{ inputs.token }}
|
||||
@@ -84,13 +84,147 @@ runs:
|
||||
|
||||
- name: Detect Python Version
|
||||
id: python-version
|
||||
uses: ivuorinen/actions/language-version-detect@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
language: 'python'
|
||||
default-version: ${{ inputs.python-version }}
|
||||
shell: sh
|
||||
env:
|
||||
DEFAULT_VERSION: "${{ inputs.python-version || '3.11' }}"
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
# Function to validate version format
|
||||
validate_version() {
|
||||
version=$1
|
||||
case "$version" in
|
||||
[0-9]*\.[0-9]* | [0-9]*\.[0-9]*\.[0-9]*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Function to clean version string
|
||||
clean_version() {
|
||||
printf '%s' "$1" | sed 's/^[vV]//' | tr -d ' \n\r'
|
||||
}
|
||||
|
||||
detected_version=""
|
||||
|
||||
# Parse .tool-versions file
|
||||
if [ -f .tool-versions ]; then
|
||||
echo "Checking .tool-versions for python..." >&2
|
||||
version=$(awk '/^python[[:space:]]/ {gsub(/#.*/, ""); print $2; exit}' .tool-versions 2>/dev/null || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in .tool-versions: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse Dockerfile
|
||||
if [ -z "$detected_version" ] && [ -f Dockerfile ]; then
|
||||
echo "Checking Dockerfile for python..." >&2
|
||||
version=$(grep -iF "FROM" Dockerfile | grep -F "python:" | head -1 | \
|
||||
sed -n -E "s/.*python:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in Dockerfile: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse devcontainer.json
|
||||
if [ -z "$detected_version" ] && [ -f .devcontainer/devcontainer.json ]; then
|
||||
echo "Checking devcontainer.json for python..." >&2
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
version=$(jq -r '.image // empty' .devcontainer/devcontainer.json 2>/dev/null | sed -n -E "s/.*python:([0-9]+(\.[0-9]+)*)(-[^:]*)?.*/\1/p" || echo "")
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in devcontainer: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "jq not found; skipping devcontainer.json parsing" >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse .python-version file
|
||||
if [ -z "$detected_version" ] && [ -f .python-version ]; then
|
||||
echo "Checking .python-version..." >&2
|
||||
version=$(tr -d '\r' < .python-version | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in .python-version: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Parse pyproject.toml
|
||||
if [ -z "$detected_version" ] && [ -f pyproject.toml ]; then
|
||||
echo "Checking pyproject.toml..." >&2
|
||||
if grep -q '^\[project\]' pyproject.toml; then
|
||||
version=$(grep -A 20 '^\[project\]' pyproject.toml | grep -E '^[[:space:]]*requires-python[[:space:]]*=' | sed -n -E 's/[^0-9]*([0-9]+\.[0-9]+(\.[0-9]+)?).*/\1/p' | head -1)
|
||||
if [ -n "$version" ]; then
|
||||
version=$(clean_version "$version")
|
||||
if validate_version "$version"; then
|
||||
echo "Found Python version in pyproject.toml: $version" >&2
|
||||
detected_version="$version"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use default version if nothing detected
|
||||
if [ -z "$detected_version" ]; then
|
||||
detected_version="$DEFAULT_VERSION"
|
||||
echo "Using default Python version: $detected_version" >&2
|
||||
fi
|
||||
|
||||
# Set output
|
||||
printf 'detected-version=%s\n' "$detected_version" >> "$GITHUB_OUTPUT"
|
||||
echo "Final detected Python version: $detected_version" >&2
|
||||
|
||||
- name: Detect Package Manager
|
||||
id: package-manager
|
||||
shell: sh
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Detect Python package manager based on lock files and config
|
||||
package_manager="pip"
|
||||
|
||||
if [ -f "uv.lock" ]; then
|
||||
# uv uses pip-compatible caching, so we use 'pip' as cache type
|
||||
package_manager="pip"
|
||||
echo "Detected uv (using pip-compatible caching)" >&2
|
||||
elif [ -f "poetry.lock" ]; then
|
||||
package_manager="poetry"
|
||||
echo "Detected Poetry" >&2
|
||||
elif [ -f "Pipfile.lock" ] || [ -f "Pipfile" ]; then
|
||||
package_manager="pipenv"
|
||||
echo "Detected Pipenv" >&2
|
||||
elif [ -f "requirements.txt" ] || [ -f "requirements-dev.txt" ] || [ -f "setup.py" ] || [ -f "pyproject.toml" ]; then
|
||||
package_manager="pip"
|
||||
echo "Detected pip" >&2
|
||||
else
|
||||
package_manager="pip"
|
||||
echo "No package manager detected, defaulting to pip" >&2
|
||||
fi
|
||||
|
||||
printf 'package-manager=%s\n' "$package_manager" >> "$GITHUB_OUTPUT"
|
||||
echo "Using package manager: $package_manager" >&2
|
||||
|
||||
- name: Setup Python (pip)
|
||||
if: steps.package-manager.outputs.package-manager == 'pip'
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'pip'
|
||||
@@ -99,6 +233,27 @@ runs:
|
||||
**/requirements-dev.txt
|
||||
**/pyproject.toml
|
||||
**/setup.py
|
||||
**/uv.lock
|
||||
|
||||
- name: Setup Python (pipenv)
|
||||
if: steps.package-manager.outputs.package-manager == 'pipenv'
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'pipenv'
|
||||
cache-dependency-path: |
|
||||
**/Pipfile
|
||||
**/Pipfile.lock
|
||||
|
||||
- name: Setup Python (poetry)
|
||||
if: steps.package-manager.outputs.package-manager == 'poetry'
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ steps.python-version.outputs.detected-version }}
|
||||
cache: 'poetry'
|
||||
cache-dependency-path: |
|
||||
**/poetry.lock
|
||||
**/pyproject.toml
|
||||
|
||||
- name: Check for Python Files
|
||||
id: check-files
|
||||
@@ -116,18 +271,8 @@ runs:
|
||||
fi
|
||||
printf '%s\n' "result=found" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Cache Python Dependencies
|
||||
if: steps.check-files.outputs.result == 'found'
|
||||
id: cache-pip
|
||||
uses: ivuorinen/actions/common-cache@0fa9a68f07a1260b321f814202658a6089a43d42
|
||||
with:
|
||||
type: 'pip'
|
||||
paths: '~/.cache/pip'
|
||||
key-files: 'requirements*.txt,pyproject.toml,setup.py,setup.cfg'
|
||||
key-prefix: 'python-lint-fix'
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.check-files.outputs.result == 'found' && steps.cache-pip.outputs.cache-hit != 'true'
|
||||
if: steps.check-files.outputs.result == 'found'
|
||||
id: install
|
||||
shell: sh
|
||||
env:
|
||||
@@ -150,22 +295,6 @@ runs:
|
||||
flake8 --version || exit 1
|
||||
autopep8 --version || exit 1
|
||||
|
||||
- name: Activate Virtual Environment (Cache Hit)
|
||||
if: steps.check-files.outputs.result == 'found' && steps.cache-pip.outputs.cache-hit == 'true'
|
||||
shell: sh
|
||||
env:
|
||||
FLAKE8_VERSION: ${{ inputs.flake8-version }}
|
||||
AUTOPEP8_VERSION: ${{ inputs.autopep8-version }}
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
# Create virtual environment if it doesn't exist from cache
|
||||
if [ ! -d ".venv" ]; then
|
||||
python -m venv .venv
|
||||
. .venv/bin/activate
|
||||
pip install "flake8==$FLAKE8_VERSION" "flake8-sarif==0.6.0" "autopep8==$AUTOPEP8_VERSION"
|
||||
fi
|
||||
|
||||
- name: Run flake8
|
||||
if: steps.check-files.outputs.result == 'found'
|
||||
id: lint
|
||||
@@ -232,7 +361,7 @@ runs:
|
||||
|
||||
- name: Commit Fixes
|
||||
if: ${{ fromJSON(steps.fix.outputs.fixed_count) > 0 }}
|
||||
uses: stefanzweifel/git-auto-commit-action@be7095c202abcf573b09f20541e0ee2f6a3a9d9b # v5.0.1
|
||||
uses: stefanzweifel/git-auto-commit-action@04702edda442b2e678b25b537cec683a1493fcb9 # v7.1.0
|
||||
with:
|
||||
commit_message: 'style: apply python lint fixes'
|
||||
commit_user_name: ${{ inputs.username }}
|
||||
@@ -241,7 +370,7 @@ runs:
|
||||
|
||||
- name: Upload SARIF Report
|
||||
if: steps.check-files.outputs.result == 'found'
|
||||
uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
sarif_file: ${{ inputs.working-directory }}/reports/flake8.sarif
|
||||
category: 'python-lint'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user