Files
actions/.github/workflows/security-suite.yml

369 lines
15 KiB
YAML

---
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
name: PR Security Analysis
on:
pull_request:
paths:
- '**/package.json'
- '**/package-lock.json'
- '**/yarn.lock'
- '**/pnpm-lock.yaml'
- '**/requirements.txt'
- '**/Dockerfile'
- '**/*.py'
- '**/*.js'
- '**/*.ts'
- '**/*.yml'
- '**/*.yaml'
- '.github/workflows/**'
permissions:
contents: read
pull-requests: write
issues: write
actions: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
jobs:
security-analysis:
name: Security Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout PR
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v6-beta
with:
fetch-depth: 0
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.sha }}
- name: Fetch PR Base
run: |
set -euo pipefail
# Fetch the base ref from base repository with authentication (works for private repos and forked PRs)
# Using ref instead of SHA because git fetch requires ref names, not raw commit IDs
# Use authenticated URL to avoid 403/404 on private repositories
git fetch --no-tags --depth=1 \
"https://x-access-token:${{ github.token }}@github.com/${{ github.event.pull_request.base.repo.full_name }}" \
${{ github.event.pull_request.base.ref }}:refs/remotes/origin-base/${{ github.event.pull_request.base.ref }}
# Record the base commit for diffing without checking it out
# Keep PR head checked out so scanners analyze the new changes
BASE_REF="refs/remotes/origin-base/${{ github.event.pull_request.base.ref }}"
echo "BASE_REF=${BASE_REF}" >> $GITHUB_ENV
echo "Base ref: ${BASE_REF}"
git log -1 --oneline "${BASE_REF}"
- name: OWASP Dependency Check
# Only run on pull_request, not pull_request_target to prevent executing
# untrusted third-party actions against PR head from forks
if: github.event_name == 'pull_request'
uses: dependency-check/Dependency-Check_Action@3102a65fd5f36d0000297576acc56a475b0de98d # main
with:
project: 'PR Security Analysis'
path: '.'
format: 'JSON'
out: 'reports'
args: >
--enableRetired --enableExperimental --failOnCVSS 0
continue-on-error: true
- name: Semgrep Static Analysis
uses: semgrep/semgrep-action@713efdd345f3035192eaa63f56867b88e63e4e5d # v1
with:
config: 'auto'
generateSarif: 'true'
env:
SEMGREP_APP_TOKEN: ${{ github.event_name != 'pull_request_target' && secrets.SEMGREP_APP_TOKEN || '' }}
continue-on-error: true
- name: TruffleHog Secret Scan
uses: trufflesecurity/trufflehog@0f58ae7c5036094a1e3e750d18772af92821b503
with:
path: ./
base: ${{ env.BASE_REF }}
head: HEAD
extra_args: --debug --only-verified --json --output /tmp/trufflehog_output.json
continue-on-error: true
- name: Analyze Security Results
id: analyze
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |-
const fs = require('fs');
const path = require('path');
const findings = {
permissions: [],
actions: [],
secrets: [],
vulnerabilities: [],
dependencies: []
};
// Analyze GitHub Actions permission changes
const { execSync } = require('child_process');
const baseRef = process.env.BASE_REF;
try {
const changedWorkflows = execSync(
`git diff --name-only ${baseRef}...HEAD | grep -E "\.github/workflows/.*\.ya?ml$" || true`,
{ encoding: 'utf8' }
).trim().split('\n').filter(Boolean);
for (const workflow of changedWorkflows) {
if (!workflow) continue;
try {
const oldContent = execSync(`git show ${baseRef}:${workflow}`, { encoding: 'utf8' });
const newContent = fs.readFileSync(workflow, 'utf8');
// Simple permission extraction (could be enhanced with YAML parsing)
const oldPerms = oldContent.match(/permissions:\s*\n([\s\S]*?)(?=\n\w|\n$|$)/);
const newPerms = newContent.match(/permissions:\s*\n([\s\S]*?)(?=\n\w|\n$|$)/);
if (oldPerms?.[1] !== newPerms?.[1]) {
findings.permissions.push({
file: workflow,
old: oldPerms?.[1]?.trim() || 'None',
new: newPerms?.[1]?.trim() || 'None'
});
}
// Check for new actions
const oldActions = [...oldContent.matchAll(/uses:\s*([^\s\n]+)/g)].map(m => m[1]);
const newActions = [...newContent.matchAll(/uses:\s*([^\s\n]+)/g)].map(m => m[1]);
const addedActions = newActions.filter(action => !oldActions.includes(action));
if (addedActions.length > 0) {
findings.actions.push({
file: workflow,
added: addedActions
});
}
} catch (error) {
console.log(`Could not analyze ${workflow}: ${error.message}`);
}
}
} catch (error) {
console.log('No workflow changes detected');
}
// Parse OWASP Dependency Check results
try {
const owaspResults = JSON.parse(fs.readFileSync('reports/dependency-check-report.json', 'utf8'));
if (owaspResults.dependencies) {
owaspResults.dependencies.forEach(dep => {
if (dep.vulnerabilities && dep.vulnerabilities.length > 0) {
dep.vulnerabilities.forEach(vuln => {
findings.dependencies.push({
file: dep.fileName || 'Unknown',
cve: vuln.name,
severity: vuln.severity || 'Unknown',
description: vuln.description || 'No description'
});
});
}
});
}
} catch (error) {
console.log('No OWASP results found');
}
// Parse Semgrep SARIF results
try {
if (fs.existsSync('semgrep.sarif')) {
const sarifContent = JSON.parse(fs.readFileSync('semgrep.sarif', 'utf8'));
if (sarifContent.runs && sarifContent.runs[0] && sarifContent.runs[0].results) {
const run = sarifContent.runs[0];
const rules = run.tool?.driver?.rules || [];
run.results.forEach(result => {
const rule = rules.find(r => r.id === result.ruleId);
findings.vulnerabilities.push({
file: result.locations?.[0]?.physicalLocation?.artifactLocation?.uri || 'Unknown',
line: result.locations?.[0]?.physicalLocation?.region?.startLine || 0,
rule: result.ruleId,
severity: result.level?.toUpperCase() || 'INFO',
message: result.message?.text || rule?.shortDescription?.text || 'No description'
});
});
}
}
} catch (error) {
console.log('Semgrep SARIF parsing completed');
}
// Parse TruffleHog results (NDJSON format - one JSON object per line)
try {
const truffleOutput = execSync('cat /tmp/trufflehog_output.json || echo ""', { encoding: 'utf8' });
const truffleLines = truffleOutput.trim().split('\n').filter(line => line.length > 0);
truffleLines.forEach((line, index) => {
try {
const result = JSON.parse(line);
findings.secrets.push({
file: result.SourceMetadata?.Data?.Filesystem?.file || 'Unknown',
line: result.SourceMetadata?.Data?.Filesystem?.line || 0,
detector: result.DetectorName,
verified: result.Verified || false
});
} catch (parseError) {
// Log only safe metadata to avoid leaking secrets
console.log('Failed to parse TruffleHog line at index', index, '- Error:', parseError.message, '(line length:', line.length, 'chars)');
}
});
if (truffleLines.length === 0) {
console.log('No secrets detected');
}
} catch (error) {
console.log('No TruffleHog output file found');
}
// Generate clean comment sections
const sections = [];
// GitHub Actions Permissions Changes
if (findings.permissions.length > 0) {
const permSection = ['## 🔐 GitHub Actions Permissions Changes'];
findings.permissions.forEach(change => {
permSection.push(`**${change.file}**:`);
permSection.push('```diff');
permSection.push(`- ${change.old}`);
permSection.push(`+ ${change.new}`);
permSection.push('```');
});
sections.push(permSection.join('\n'));
}
// New/Changed Actions
if (findings.actions.length > 0) {
const actionSection = ['## 🎯 New GitHub Actions'];
findings.actions.forEach(change => {
actionSection.push(`**${change.file}**:`);
change.added.forEach(action => {
actionSection.push(`- \`${action}\``);
});
});
sections.push(actionSection.join('\n'));
}
// Secrets Detected
if (findings.secrets.length > 0) {
const secretSection = ['## 🔑 Secrets Detected'];
findings.secrets.forEach(secret => {
const verified = secret.verified ? '🚨 **VERIFIED**' : '⚠️ Potential';
secretSection.push(`- ${verified} ${secret.detector} in \`${secret.file}:${secret.line}\``);
});
sections.push(secretSection.join('\n'));
}
// Security Vulnerabilities
if (findings.vulnerabilities.length > 0) {
const vulnSection = ['## ⚠️ Security Vulnerabilities'];
const groupedBySeverity = findings.vulnerabilities.reduce((acc, vuln) => {
const sev = vuln.severity.toUpperCase();
if (!acc[sev]) acc[sev] = [];
acc[sev].push(vuln);
return acc;
}, {});
['ERROR', 'WARNING', 'INFO'].forEach(severity => {
if (groupedBySeverity[severity]) {
vulnSection.push(`\n**${severity} Severity:**`);
groupedBySeverity[severity].forEach(vuln => {
vulnSection.push(`- \`${vuln.file}:${vuln.line}\` - ${vuln.message}`);
vulnSection.push(` - Rule: \`${vuln.rule}\``);
});
}
});
sections.push(vulnSection.join('\n'));
}
// Dependency Issues
if (findings.dependencies.length > 0) {
const depSection = ['## 📦 Dependency Vulnerabilities'];
const groupedBySeverity = findings.dependencies.reduce((acc, dep) => {
const sev = dep.severity.toUpperCase();
if (!acc[sev]) acc[sev] = [];
acc[sev].push(dep);
return acc;
}, {});
['CRITICAL', 'HIGH', 'MEDIUM', 'LOW'].forEach(severity => {
if (groupedBySeverity[severity]) {
depSection.push(`\n**${severity} Severity:**`);
groupedBySeverity[severity].forEach(dep => {
depSection.push(`- **${dep.cve}** in \`${dep.file}\``);
depSection.push(` - ${dep.description.substring(0, 100)}...`);
});
}
});
sections.push(depSection.join('\n'));
}
// Count critical issues for output
const criticalCount =
findings.secrets.filter(s => s.verified).length +
(findings.vulnerabilities.filter(v => v.severity.toUpperCase() === 'ERROR').length || 0) +
(findings.dependencies.filter(d => d.severity.toUpperCase() === 'CRITICAL').length || 0);
// Export critical count as output
core.setOutput('critical_issues', criticalCount.toString());
// Generate final comment
let comment = '## ✅ Security Analysis\n\n';
if (sections.length === 0) {
comment += 'No security issues detected in this PR.';
} else {
comment += sections.join('\n\n');
}
// Find existing security comment
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number
});
const existingComment = comments.find(comment =>
comment.body.includes('Security Analysis') ||
comment.body.includes('🔐 GitHub Actions Permissions')
);
if (existingComment) {
// Update existing comment
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existingComment.id,
body: comment
});
} else {
// Create new comment
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: comment
});
}
- name: Check Critical Issues
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
CRITICAL_COUNT: ${{ steps.analyze.outputs.critical_issues || '0' }}
with:
script: |-
const criticalCount = parseInt(process.env.CRITICAL_COUNT || '0', 10);
if (criticalCount > 0) {
core.setFailed(`Found ${criticalCount} critical security issue(s). Please review and address them before merging.`);
} else {
console.log('No critical security issues found.');
}