fix(ci): combine workflows to security-suite Remove legacy GitHub Actions workflows and add security-suite workflow (#58)

* fix(ci): combine workflows to security-suite
* fix(ci): tweak permissions
This commit is contained in:
2025-02-27 13:19:25 +02:00
committed by GitHub
parent 89b6c7942d
commit 2661996471
5 changed files with 320 additions and 944 deletions

View File

@@ -1,43 +0,0 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Scorecard
on:
schedule:
- cron: '0 2 * * 0' # Run every Sunday at 2:00 AM
push:
branches: [main]
permissions: read-all
jobs:
analysis:
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:
security-events: write
id-token: write
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Run analysis
uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1
with:
results_file: results.sarif
results_format: sarif
publish_results: true
- name: Upload artifact
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with:
name: SARIF file
path: results.sarif
retention-days: 5
- name: Upload to code-scanning
uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
with:
sarif_file: results.sarif

View File

@@ -1,184 +0,0 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Security Metrics Collection
on:
workflow_run:
workflows: ['Security Checks']
types:
- completed
schedule:
- cron: '0 0 * * 0' # Every Sunday at 00:00
permissions:
contents: read
actions: read
pull-requests: read
jobs:
collect-metrics:
runs-on: ubuntu-latest
permissions:
contents: write
issues: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Collect Metrics
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
async function collectMetrics() {
const metrics = {
timestamp: new Date().toISOString(),
weekly: {
scans: 0,
vulnerabilities: {
critical: 0,
high: 0,
medium: 0,
low: 0
},
fixes: {
submitted: 0,
merged: 0
},
meanTimeToFix: null // Initialize as null instead of 0
}
};
try {
// Collect scan metrics
const scans = await github.rest.actions.listWorkflowRuns({
owner: context.repo.owner,
repo: context.repo.repo,
workflow_id: 'security.yml',
created: `>${new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString()}`
});
metrics.weekly.scans = scans.data.total_count;
// Collect vulnerability metrics
const vulnIssues = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
labels: 'security',
state: 'all',
since: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString()
});
// Calculate vulnerability metrics
vulnIssues.data.forEach(issue => {
if (issue.labels.find(l => l.name === 'critical')) metrics.weekly.vulnerabilities.critical++;
if (issue.labels.find(l => l.name === 'high')) metrics.weekly.vulnerabilities.high++;
if (issue.labels.find(l => l.name === 'medium')) metrics.weekly.vulnerabilities.medium++;
if (issue.labels.find(l => l.name === 'low')) metrics.weekly.vulnerabilities.low++;
});
// Calculate fix metrics
const fixPRs = await github.rest.pulls.list({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'all',
labels: 'security-fix'
});
metrics.weekly.fixes.submitted = fixPRs.data.length;
const mergedPRs = fixPRs.data.filter(pr => pr.merged_at);
metrics.weekly.fixes.merged = mergedPRs.length;
// Calculate mean time to fix only if there are merged PRs
if (mergedPRs.length > 0) {
const fixTimes = mergedPRs.map(pr => {
const mergedAt = new Date(pr.merged_at);
const createdAt = new Date(pr.created_at);
return mergedAt - createdAt;
});
const totalTime = fixTimes.reduce((a, b) => a + b, 0);
// Convert to hours and round to 2 decimal places
metrics.weekly.meanTimeToFix = Number((totalTime / (fixTimes.length * 3600000)).toFixed(2));
}
// Save metrics
fs.writeFileSync('security-metrics.json', JSON.stringify(metrics, null, 2));
// Generate report
const report = generateReport(metrics);
// Create/update metrics dashboard
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: '📊 Weekly Security Metrics Report',
body: report,
labels: ['metrics', 'security']
});
} catch (error) {
core.setFailed(`Failed to collect metrics: ${error.message}`);
}
}
function generateReport(metrics) {
const formatDuration = (hours) => {
if (hours === null) return 'N/A';
return `${hours} hours`;
};
return `## 📊 Weekly Security Metrics Report
### Timeline
- Report Generated: ${new Date().toISOString()}
- Period: Last 7 days
### Security Scans
- Total Scans Run: ${metrics.weekly.scans}
### Vulnerabilities
- Critical: ${metrics.weekly.vulnerabilities.critical}
- High: ${metrics.weekly.vulnerabilities.high}
- Medium: ${metrics.weekly.vulnerabilities.medium}
- Low: ${metrics.weekly.vulnerabilities.low}
### Fixes
- PRs Submitted: ${metrics.weekly.fixes.submitted}
- PRs Merged: ${metrics.weekly.fixes.merged}
- Mean Time to Fix: ${formatDuration(metrics.weekly.meanTimeToFix)}
### Summary
${generateSummary(metrics)}
> This report was automatically generated by the security metrics workflow.`;
}
function generateSummary(metrics) {
const total = Object.values(metrics.weekly.vulnerabilities).reduce((a, b) => a + b, 0);
const fixRate = metrics.weekly.fixes.merged / metrics.weekly.fixes.submitted || 0;
let summary = [];
if (total === 0) {
summary.push('✅ No vulnerabilities detected this week.');
} else {
summary.push(`⚠️ Detected ${total} total vulnerabilities.`);
if (metrics.weekly.vulnerabilities.critical > 0) {
summary.push(`🚨 ${metrics.weekly.vulnerabilities.critical} critical vulnerabilities require immediate attention!`);
}
}
if (metrics.weekly.fixes.submitted > 0) {
summary.push(`🔧 Fix rate: ${(fixRate * 100).toFixed(1)}%`);
}
if (metrics.weekly.meanTimeToFix !== null) {
summary.push(`⏱️ Average time to fix: ${metrics.weekly.meanTimeToFix} hours`);
}
return summary.join('\n');
}
collectMetrics();

320
.github/workflows/security-suite.yml vendored Normal file
View File

@@ -0,0 +1,320 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Security Suite
on:
schedule:
- cron: '55 23 * * 0' # Every Sunday at 23:55
workflow_dispatch:
pull_request:
paths:
- '**/package.json'
- '**/package-lock.json'
- '**/yarn.lock'
- '**/pnpm-lock.yaml'
- '**/requirements.txt'
- '**/Dockerfile'
- '**/*.py'
- '**/*.js'
- '**/*.ts'
- '**/workflows/*.yml'
merge_group:
push:
branches: [main]
permissions: read-all
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
jobs:
security-checks:
name: Security Checks
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
security-events: write
pull-requests: write
statuses: write
issues: write
id-token: write
steps:
- name: Check Required Secrets
id: check-secrets
shell: bash
run: |
{
echo "run_snyk=false"
echo "run_slack=false"
echo "run_sonarcloud=false"
} >> "$GITHUB_OUTPUT"
# Check secrets
if [ -n "${{ secrets.SNYK_TOKEN }}" ]; then
echo "run_snyk=true" >> "$GITHUB_OUTPUT"
else
echo "::warning::SNYK_TOKEN not set - Snyk scans will be skipped"
fi
if [ -n "${{ secrets.SLACK_WEBHOOK }}" ]; then
echo "run_slack=true" >> "$GITHUB_OUTPUT"
else
echo "::warning::SLACK_WEBHOOK not set - Slack notifications will be skipped"
fi
if [ -n "${{ secrets.SONAR_TOKEN }}" ]; then
echo "run_sonarcloud=true" >> "$GITHUB_OUTPUT"
else
echo "::warning::SONAR_TOKEN not set - SonarCloud analysis will be skipped"
fi
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
# OWASP Dependency Check
- name: Run OWASP Dependency Check
uses: dependency-check/Dependency-Check_Action@3102a65fd5f36d0000297576acc56a475b0de98d # main
with:
project: 'GitHub Actions'
path: '.'
format: 'SARIF'
out: 'reports'
args: >
--enableRetired
--enableExperimental
--failOnCVSS 7
- name: Upload OWASP Results
uses: github/codeql-action/upload-sarif@9e8d0789d4a0fa9ceb6b1738f7e269594bdd67f0 # v3.28.9
with:
sarif_file: reports/dependency-check-report.sarif
category: owasp-dependency-check
# Snyk Analysis
- name: Setup Node.js
if: steps.check-secrets.outputs.run_snyk == 'true'
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
with:
node-version: 'lts/*'
cache: 'npm'
- name: Run Snyk Scan
id: snyk
if: steps.check-secrets.outputs.run_snyk == 'true'
uses: snyk/actions/node@cdb760004ba9ea4d525f2e043745dfe85bb9077e # master
continue-on-error: true
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
with:
args: --all-projects --sarif-file-output=snyk-results.sarif
- name: Upload Snyk Results
if: steps.check-secrets.outputs.run_snyk == 'true'
uses: github/codeql-action/upload-sarif@9e8d0789d4a0fa9ceb6b1738f7e269594bdd67f0 # v3.28.9
with:
sarif_file: snyk-results.sarif
category: snyk
# OSSF Scorecard
- name: Run Scorecard
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
with:
results_file: scorecard-results.sarif
results_format: sarif
publish_results: true
- name: Upload Scorecard Results
uses: github/codeql-action/upload-sarif@9e8d0789d4a0fa9ceb6b1738f7e269594bdd67f0 # v3.28.9
with:
sarif_file: scorecard-results.sarif
category: scorecard
# Analysis and Metrics
- name: Analyze Results
id: analysis
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
async function analyzeResults() {
const metrics = {
timestamp: new Date().toISOString(),
vulnerabilities: { critical: 0, high: 0, medium: 0, low: 0 },
scorecard: null,
trends: {},
tools: {}
};
function analyzeSarif(file, tool) {
if (!fs.existsSync(file)) return null;
try {
const data = JSON.parse(fs.readFileSync(file, 'utf8'));
const results = {
total: 0,
bySeverity: { critical: 0, high: 0, medium: 0, low: 0 },
details: []
};
data.runs.forEach(run => {
if (!run.results) return;
run.results.forEach(result => {
results.total++;
const severity = result.level === 'error' ? 'high' :
result.level === 'warning' ? 'medium' : 'low';
results.bySeverity[severity]++;
metrics.vulnerabilities[severity]++;
results.details.push({
title: result.message?.text || 'Unnamed issue',
severity,
location: result.locations?.[0]?.physicalLocation?.artifactLocation?.uri || 'Unknown',
description: result.message?.text || '',
ruleId: result.ruleId || ''
});
});
});
return results;
} catch (error) {
console.error(`Error analyzing ${tool} results:`, error);
return null;
}
}
// Analyze all SARIF files
metrics.tools = {
owasp: analyzeSarif('reports/dependency-check-report.sarif', 'OWASP'),
snyk: ${{ steps.check-secrets.outputs.run_snyk == 'true' }} ?
analyzeSarif('snyk-results.sarif', 'Snyk') : null,
scorecard: analyzeSarif('scorecard-results.sarif', 'Scorecard')
};
// Save results for other steps
fs.writeFileSync('security-results.json', JSON.stringify(metrics, null, 2));
// Set outputs for other steps
core.setOutput('total_critical', metrics.vulnerabilities.critical);
core.setOutput('total_high', metrics.vulnerabilities.high);
return metrics;
}
return await analyzeResults();
- name: Generate Reports
if: always()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
const metrics = JSON.parse(fs.readFileSync('security-results.json', 'utf8'));
// Find existing security report issue
const issues = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
labels: ['security-report'],
per_page: 1
});
const severityEmoji = {
critical: '🚨',
high: '⚠️',
medium: '⚡',
low: '📝'
};
// Generate report body
const report = `## Security Scan Report ${new Date().toISOString()}
### Summary
${Object.entries(metrics.vulnerabilities)
.map(([sev, count]) => `${severityEmoji[sev]} ${sev}: ${count}`)
.join('\n')}
### Tool Results
${Object.entries(metrics.tools)
.filter(([_, results]) => results)
.map(([tool, results]) => `
#### ${tool.toUpperCase()}
- Total issues: ${results.total}
${Object.entries(results.bySeverity)
.filter(([_, count]) => count > 0)
.map(([sev, count]) => `- ${sev}: ${count}`)
.join('\n')}
${results.details
.filter(issue => ['critical', 'high'].includes(issue.severity))
.map(issue => `- ${severityEmoji[issue.severity]} ${issue.title} (${issue.severity})
- Location: \`${issue.location}\`
- Rule: \`${issue.ruleId}\``)
.join('\n')}
`).join('\n')}
### Action Items
${metrics.vulnerabilities.critical + metrics.vulnerabilities.high > 0 ?
`- [ ] Address ${metrics.vulnerabilities.critical} critical and ${metrics.vulnerabilities.high} high severity issues
- [ ] Review automated fix PRs
- [ ] Update dependencies with known vulnerabilities` :
'✅ No critical or high severity issues found'}
### Links
- [Workflow Run](${process.env.GITHUB_SERVER_URL}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId})
- [Security Overview](${process.env.GITHUB_SERVER_URL}/${context.repo.owner}/${context.repo.repo}/security)
> Last updated: ${new Date().toISOString()}`;
// Update or create issue
if (issues.data.length > 0) {
await github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issues.data[0].number,
body: report,
state: 'open'
});
} else {
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: '🔒 Security Scan Report',
body: report,
labels: ['security-report', 'automated'],
assignees: ['ivuorinen']
});
}
// Add summary to workflow
await core.summary
.addRaw(report)
.write();
- name: Archive Results
if: always()
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: security-results
path: |
reports/
*.sarif
security-results.json
retention-days: 30
- name: Notify on Failure
if: failure() && steps.check-secrets.outputs.run_slack == 'true'
run: |
curl -X POST -H 'Content-type: application/json' \
--data '{"text":"❌ Security checks failed! Check the logs for details."}' \
${{ secrets.SLACK_WEBHOOK }}

View File

@@ -1,229 +0,0 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Security Trends Analysis
on:
workflow_run:
workflows: ['Security Checks']
types:
- completed
permissions:
contents: read
actions: read
pull-requests: read
jobs:
analyze-trends:
runs-on: ubuntu-latest
permissions:
contents: write
issues: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: main
fetch-depth: 0
- name: Download latest results
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: security-reports-${{ github.event.workflow_run.id }}
path: latest-results
- name: Analyze Trends
id: analyze
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
const path = require('path');
const core = require('@actions/core');
async function collectMetrics() {
const metrics = {
timestamp: new Date().toISOString(),
weekly: {
scans: 0,
vulnerabilities: {
critical: 0,
high: 0,
medium: 0,
low: 0
},
fixes: {
submitted: 0,
merged: 0
},
meanTimeToFix: null // Initialize as null instead of 0
},
history: [],
trends: {
vulnerabilities: {
trend: 'stable',
percentage: 0
},
fixRate: {
trend: 'stable',
percentage: 0
}
}
};
try {
// Check if the artifacts exist
const files = fs.readdirSync('latest-results');
const requiredFiles = ['vulnerability-summary.json', 'security-badge.json', 'security-report.md'];
const missingFiles = requiredFiles.filter(file => !files.includes(file));
if (missingFiles.length > 0) {
throw new Error(`Missing required files: ${missingFiles.join(', ')}`);
}
// Read the latest results
const vulnSummary = JSON.parse(fs.readFileSync('latest-results/vulnerability-summary.json', 'utf8'));
const securityBadge = JSON.parse(fs.readFileSync('latest-results/security-badge.json', 'utf8'));
const securityReport = fs.readFileSync('latest-results/security-report.md', 'utf8');
// Update metrics
metrics.weekly.vulnerabilities = vulnSummary.total;
// Fetch history data if it exists
try {
const historyFile = 'security-metrics-history.json';
if (fs.existsSync(historyFile)) {
const history = JSON.parse(fs.readFileSync(historyFile, 'utf8'));
metrics.history = history.slice(-12); // Säilytetään 12 viikon historia
}
} catch (error) {
console.log('No existing history found, starting fresh');
}
// Collect PR metrics with security-fix label
const prs = await github.rest.pulls.list({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'all',
labels: ['security-fix'],
since: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString()
});
// Compute metrics
const securityFixes = prs.data.filter(pr => pr.labels.some(label => label.name === 'security-fix'));
metrics.weekly.fixes = {
submitted: securityFixes.length,
merged: securityFixes.filter(pr => pr.merged_at !== null).length,
automated: securityFixes.filter(pr => pr.labels.some(label => label.name === 'automated-pr')).length
};
// Calculate trends
if (metrics.history.length > 0) {
const lastWeek = metrics.history[metrics.history.length - 1];
const totalVulns = Object.values(metrics.weekly.vulnerabilities).reduce((a, b) => a + b, 0);
const lastWeekVulns = Object.values(lastWeek.weekly.vulnerabilities).reduce((a, b) => a + b, 0);
metrics.trends.vulnerabilities = {
trend: totalVulns < lastWeekVulns ? 'improving' : totalVulns > lastWeekVulns ? 'worsening' : 'stable',
percentage: lastWeekVulns ? ((totalVulns - lastWeekVulns) / lastWeekVulns * 100).toFixed(1) : 0
};
}
// Update history
metrics.history.push({
timestamp: metrics.timestamp,
weekly: metrics.weekly
});
// Save metrics and history
fs.writeFileSync('security-metrics-history.json', JSON.stringify(metrics.history, null, 2));
fs.writeFileSync('security-metrics.json', JSON.stringify(metrics, null, 2));
// Create the report
const report = generateEnhancedReport(metrics, securityReport);
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: '📊 Weekly Security Metrics Report',
body: report,
labels: ['metrics', 'security']
});
} catch (error) {
core.setFailed(`Failed to process security artifacts: ${error.message}`);
throw error;
}
}
function generateEnhancedReport(metrics, securityReport) {
const formatTrend = (trend) => {
const icons = {
improving: '📈',
worsening: '📉',
stable: '➡️'
};
return icons[trend] || '➡️';
};
const formatDuration = (hours) => {
if (hours === null) return 'N/A';
return `${hours} hours`;
};
// Luodaan trendikuvaaja ASCII-grafiikkana
const generateTrendGraph = (history, metric) => {
const values = history.map(h => Object.values(h.weekly[metric]).reduce((a, b) => a + b, 0));
const max = Math.max(...values);
const min = Math.min(...values);
const range = max - min;
const height = 5;
return values.map(v => {
const normalized = range ? Math.floor((v - min) / range * height) : 0;
return '█'.repeat(normalized) + '░'.repeat(height - normalized);
}).join(' ');
};
return `## 📊 Weekly Security Metrics Report
### Timeline
- Report Generated: ${new Date().toISOString()}
- Period: Last 7 days
### Vulnerability Trends ${formatTrend(metrics.trends.vulnerabilities.trend)}
\`\`\`
${generateTrendGraph(metrics.history, 'vulnerabilities')}
\`\`\`
${metrics.trends.vulnerabilities.trend !== 'stable' ?
`Change: ${metrics.trends.vulnerabilities.percentage}% from last week` :
'No significant change from last week'}
### Current Status
- Critical: ${metrics.weekly.vulnerabilities.critical}
- High: ${metrics.weekly.vulnerabilities.high}
- Medium: ${metrics.weekly.vulnerabilities.medium}
- Low: ${metrics.weekly.vulnerabilities.low}
### Fix Metrics
- Security PRs Submitted: ${metrics.weekly.fixes.submitted}
- Automated Fixes: ${metrics.weekly.fixes.automated}
- Successfully Merged: ${metrics.weekly.fixes.merged}
- Mean Time to Fix: ${formatDuration(metrics.weekly.meanTimeToFix)}
### Detailed Reports
- [Full Security Report](${process.env.GITHUB_SERVER_URL}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId})
- [Latest Scan Results](${process.env.GITHUB_SERVER_URL}/${context.repo.owner}/${context.repo.repo}/security/advisories)
> This report was automatically generated by the security metrics workflow.`;
}
collectMetrics();
- name: Cleanup
if: always()
shell: bash
run: |
# Remove temporary files but keep the history
rm -f trend-report.md security-trends.svg
echo "Cleaned up temporary files"

View File

@@ -1,488 +0,0 @@
---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: Security Checks
on:
schedule:
- cron: '0 0 * * *' # Every day at 00:00
workflow_dispatch:
pull_request:
paths:
- '**/package.json'
- '**/package-lock.json'
- '**/yarn.lock'
- '**/pnpm-lock.yaml'
- '**/requirements.txt'
- '**/Dockerfile'
- '**/*.py'
- '**/*.js'
- '**/*.ts'
merge_group:
permissions:
contents: read
actions: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
jobs:
security:
name: Security Analysis
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
security-events: write
issues: write
pull-requests: write
steps:
- name: Check Required Secrets
id: check-secrets
shell: bash
run: |
# Initialize flags
{
echo "run_snyk=false"
echo "run_slack=false"
echo "run_sonarcloud=false"
} >> "$GITHUB_OUTPUT"
# Check secrets
if [ -n "${{ secrets.SNYK_TOKEN }}" ]; then
echo "run_snyk=true" >> "$GITHUB_OUTPUT"
echo "Snyk token available"
else
echo "::warning::SNYK_TOKEN not set - Snyk scans will be skipped"
fi
if [ -n "${{ secrets.SLACK_WEBHOOK }}" ]; then
echo "run_slack=true" >> "$GITHUB_OUTPUT"
echo "Slack webhook available"
else
echo "::warning::SLACK_WEBHOOK not set - Slack notifications will be skipped"
fi
if [ -n "${{ secrets.SONAR_TOKEN }}" ]; then
echo "run_sonarcloud=true" >> "$GITHUB_OUTPUT"
echo "SonarCloud token available"
else
echo "::warning::SONAR_TOKEN not set - SonarCloud analysis will be skipped"
fi
- name: Checkout Repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0 # Full history for better analysis
- name: Run OWASP Dependency Check
uses: dependency-check/Dependency-Check_Action@3102a65fd5f36d0000297576acc56a475b0de98d # main
with:
project: 'GitHub Actions'
path: '.'
format: 'SARIF'
out: 'reports'
args: >
--enableRetired
--enableExperimental
--failOnCVSS 7
- name: Upload OWASP Results
uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
with:
sarif_file: reports/dependency-check-report.sarif
category: owasp-dependency-check
- name: Setup Node.js
if: steps.check-secrets.outputs.run_snyk == 'true'
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0
with:
node-version: 'lts/*'
cache: 'npm'
- name: Run Snyk Scan
id: snyk
if: steps.check-secrets.outputs.run_snyk == 'true'
uses: snyk/actions/node@cdb760004ba9ea4d525f2e043745dfe85bb9077e # master
continue-on-error: true # Don't fail the workflow, we'll handle results
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
with:
args: --all-projects --sarif-file-output=snyk-results.sarif
- name: Upload Snyk Results
if: steps.check-secrets.outputs.run_snyk == 'true'
uses: github/codeql-action/upload-sarif@b56ba49b26e50535fa1e7f7db0f4f7b4bf65d80d # v3.28.10
with:
sarif_file: snyk-results.sarif
category: snyk
- name: Analyze Vulnerabilities
id: vuln-analysis
if: always()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
async function analyzeSarif(filePath, tool) {
if (!fs.existsSync(filePath)) return null;
try {
const sarif = JSON.parse(fs.readFileSync(filePath, 'utf8'));
let counts = { critical: 0, high: 0, medium: 0, low: 0 };
sarif.runs.forEach(run => {
run.results?.forEach(result => {
let severity;
if (tool === 'snyk') {
severity = result.ruleId.includes('critical') ? 'critical' :
result.ruleId.includes('high') ? 'high' :
result.ruleId.includes('medium') ? 'medium' : 'low';
} else {
severity = result.level === 'error' ? 'high' :
result.level === 'warning' ? 'medium' : 'low';
}
counts[severity]++;
});
});
return counts;
} catch (error) {
console.error(`Error analyzing ${tool} results:`, error);
return null;
}
}
try {
// Analyze results from different tools
const results = {
owasp: await analyzeSarif('reports/dependency-check-report.sarif', 'owasp'),
snyk: ${{ steps.check-secrets.outputs.run_snyk == 'true' }} ?
await analyzeSarif('snyk-results.sarif', 'snyk') : null
};
// Calculate totals
const summary = {
timestamp: new Date().toISOString(),
results,
total: {
critical: Object.values(results).reduce((sum, r) => sum + (r?.critical || 0), 0),
high: Object.values(results).reduce((sum, r) => sum + (r?.high || 0), 0),
medium: Object.values(results).reduce((sum, r) => sum + (r?.medium || 0), 0),
low: Object.values(results).reduce((sum, r) => sum + (r?.low || 0), 0)
}
};
// Save summary
fs.writeFileSync('vulnerability-summary.json', JSON.stringify(summary, null, 2));
// Set outputs for other steps
core.setOutput('critical_count', summary.total.critical);
core.setOutput('high_count', summary.total.high);
// Create/update status badge
const badge = {
schemaVersion: 1,
label: 'vulnerabilities',
message: `critical: ${summary.total.critical} high: ${summary.total.high}`,
color: summary.total.critical > 0 ? 'red' :
summary.total.high > 0 ? 'orange' : 'green'
};
fs.writeFileSync('security-badge.json', JSON.stringify(badge));
// Generate markdown report
const report = `## Security Scan Results
### Summary
- Critical: ${summary.total.critical}
- High: ${summary.total.high}
- Medium: ${summary.total.medium}
- Low: ${summary.total.low}
### Tool-specific Results
${Object.entries(results)
.filter(([_, r]) => r)
.map(([tool, r]) => `
#### ${tool.toUpperCase()}
- Critical: ${r.critical}
- High: ${r.high}
- Medium: ${r.medium}
- Low: ${r.low}
`).join('\n')}
`;
fs.writeFileSync('security-report.md', report);
// Write job summary
await core.summary
.addRaw(report)
.write();
// Exit with error if critical/high vulnerabilities found
if (summary.total.critical > 0 || summary.total.high > 0) {
core.setFailed(`Found ${summary.total.critical} critical and ${summary.total.high} high severity vulnerabilities`);
}
} catch (error) {
core.setFailed(`Analysis failed: ${error.message}`);
}
- name: Archive Security Reports
if: always()
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with:
name: security-reports-${{ github.run_id }}
path: |
reports/
snyk-results.sarif
vulnerability-summary.json
security-report.md
security-badge.json
retention-days: 30
- name: Create Fix PRs
if: always() && (steps.vuln-analysis.outputs.critical_count > 0 || steps.vuln-analysis.outputs.high_count > 0)
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
continue-on-error: true
with:
script: |
const fs = require('fs');
async function createFixPR(vulnerability) {
const branchName = `security/fix-${vulnerability.id}`;
try {
// Create branch
await exec.exec('git', ['checkout', '-b', branchName]);
// Apply fixes based on vulnerability type
if (vulnerability.tool === 'snyk') {
await exec.exec('npx', ['snyk', 'fix']);
} else if (vulnerability.tool === 'owasp') {
// Update dependencies to fixed versions
if (fs.existsSync('package.json')) {
await exec.exec('npm', ['audit', 'fix']);
}
}
// Check if there are changes
const { stdout: status } = await exec.getExecOutput('git', ['status', '--porcelain']);
if (!status) {
console.log('No changes to commit');
return null;
}
// Commit changes
await exec.exec('git', ['config', 'user.name', 'fiximus']);
await exec.exec('git', ['config', 'user.email', 'github-bot@ivuorinen.net']);
await exec.exec('git', ['add', '.']);
await exec.exec('git', ['commit', '-m', `fix: ${vulnerability.title}`]);
await exec.exec('git', ['push', 'origin', branchName]);
// Create PR
const pr = await github.rest.pulls.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: `🔒 Security: ${vulnerability.title}`,
body: generatePRBody(vulnerability),
head: branchName,
base: 'main'
});
// Add labels
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.data.number,
labels: ['security-fix', 'automated-pr', 'dependencies']
});
return pr.data.html_url;
} catch (error) {
console.error(`Failed to create fix PR: ${error.message}`);
return null;
}
}
function generatePRBody(vulnerability) {
return `## Security Fix
### Vulnerability Details
- ID: ${vulnerability.id}
- Severity: ${vulnerability.severity}
- Tool: ${vulnerability.tool}
### Changes Made
${vulnerability.fixes || 'Dependency updates to resolve security vulnerabilities'}
### Testing
- [ ] Verify fix resolves the vulnerability
- [ ] Run security scan to confirm fix
- [ ] Test affected functionality
### Notes
- This PR was automatically generated
- Please review changes carefully
- Additional manual changes may be needed
> Generated by security workflow`;
}
try {
// Process vulnerabilities from both tools
const vulnFiles = ['snyk-results.sarif', 'reports/dependency-check-report.sarif'];
const fixableVulnerabilities = [];
for (const file of vulnFiles) {
if (fs.existsSync(file)) {
const sarif = JSON.parse(fs.readFileSync(file, 'utf8'));
const tool = file.includes('snyk') ? 'snyk' : 'owasp';
sarif.runs.forEach(run => {
run.results?.forEach(result => {
if (result.level === 'error' || result.level === 'critical') {
fixableVulnerabilities.push({
id: result.ruleId,
title: result.message.text,
severity: result.level,
tool,
fixes: result.fixes
});
}
});
});
}
}
// Create PRs for fixable vulnerabilities
const prUrls = [];
for (const vuln of fixableVulnerabilities) {
const prUrl = await createFixPR(vuln);
if (prUrl) prUrls.push(prUrl);
}
core.setOutput('fix_prs', prUrls.join('\n'));
if (prUrls.length > 0) {
console.log(`Created ${prUrls.length} fix PRs:`);
prUrls.forEach(url => console.log(`- ${url}`));
}
} catch (error) {
console.error('Failed to process vulnerabilities:', error);
}
- name: Notify on Failure
if: failure()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
const fs = require('fs');
try {
const { repo, owner } = context.repo;
const runUrl = `${process.env.GITHUB_SERVER_URL}/${owner}/${repo}/actions/runs/${process.env.GITHUB_RUN_ID}`;
// Read vulnerability summary if available
let vulnSummary = '';
if (fs.existsSync('vulnerability-summary.json')) {
const summary = JSON.parse(fs.readFileSync('vulnerability-summary.json', 'utf8'));
vulnSummary = `
### Vulnerability Counts
- Critical: ${summary.total.critical}
- High: ${summary.total.high}
- Medium: ${summary.total.medium}
- Low: ${summary.total.low}
`;
}
const message = `## 🚨 Security Check Failure
Security checks have failed in the workflow run.
### Details
- Run: [View Results](${runUrl})
- Timestamp: ${new Date().toISOString()}
${vulnSummary}
### Reports
Security scan reports are available in the workflow artifacts.
### Next Steps
1. Review the security reports
2. Address identified vulnerabilities
3. Re-run security checks
> This issue was automatically created by the security workflow.`;
// Create GitHub issue
const issue = await github.rest.issues.create({
owner,
repo,
title: `🚨 Security Check Failure - ${new Date().toISOString().split('T')[0]}`,
body: message,
labels: ['security', 'automated-issue', 'high-priority'],
assignees: ['ivuorinen']
});
// Send Slack notification if configured
if (process.env.SLACK_WEBHOOK) {
const fetch = require('node-fetch');
await fetch(process.env.SLACK_WEBHOOK, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
text: `🚨 Security checks failed in ${owner}/${repo}\nDetails: ${issue.data.html_url}`
})
});
}
} catch (error) {
console.error('Failed to send notifications:', error);
core.setFailed(`Notification failed: ${error.message}`);
}
- name: Cleanup Old Issues
if: always()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
try {
const { repo, owner } = context.repo;
const oldIssues = await github.rest.issues.listForRepo({
owner,
repo,
state: 'open',
labels: 'automated-issue,security',
sort: 'created',
direction: 'desc'
});
// Keep only the latest 3 issues
const issuesToClose = oldIssues.data.slice(3);
for (const issue of issuesToClose) {
await github.rest.issues.update({
owner,
repo,
issue_number: issue.number,
state: 'closed',
state_reason: 'completed'
});
await github.rest.issues.createComment({
owner,
repo,
issue_number: issue.number,
body: '🔒 Auto-closing this issue as newer security check results are available.'
});
}
} catch (error) {
console.error('Failed to cleanup old issues:', error);
}