remove embedded python that moved to defectdojo and enforce policy and change to standalone typescript

This commit is contained in:
Elizabeth W
2026-04-20 01:24:13 -06:00
parent 251070dd77
commit f0b937deb7
4 changed files with 153 additions and 111 deletions
-66
View File
@@ -1,66 +0,0 @@
{{- if .Values.pipeline.enabled }}
apiVersion: argoproj.io/v1alpha1
kind: ClusterWorkflowTemplate
metadata:
name: amp-security-pipeline-v1.0.0
spec:
templates:
- name: upload-defectdojo
container:
image: python:3.12-alpine
env:
- name: DEFECTDOJO_URL
valueFrom:
secretKeyRef:
name: amp-security-pipeline-secrets
key: DEFECTDOJO_URL
- name: DEFECTDOJO_API_TOKEN
valueFrom:
secretKeyRef:
name: amp-security-pipeline-secrets
key: DEFECTDOJO_API_TOKEN
command:
- sh
- -c
args:
- |
set -eu
python - <<'PY'
import json
import os
import pathlib
import urllib.request
base_url = os.environ["DEFECTDOJO_URL"].rstrip("/")
api_token = os.environ["DEFECTDOJO_API_TOKEN"]
product_name = os.environ.get("DEFECTDOJO_PRODUCT_NAME", "agentguard-ci")
scan_map = {
".sarif": "SARIF",
".json": "Generic Findings Import",
}
reports_dir = pathlib.Path("/workspace/reports")
for report in sorted(reports_dir.iterdir()):
if not report.is_file():
continue
scan_type = scan_map.get(report.suffix)
if not scan_type:
continue
req = urllib.request.Request(
f"{base_url}/api/v2/import-scan/",
data=json.dumps({
"scan_type": scan_type,
"product_name": product_name,
"file_name": report.name,
}).encode(),
headers={
"Authorization": f"Token {api_token}",
"Content-Type": "application/json",
},
method="POST",
)
urllib.request.urlopen(req)
PY
volumeMounts:
- name: workspace
mountPath: /workspace
{{- end }}
-45
View File
@@ -1,45 +0,0 @@
{{- if .Values.pipeline.enabled }}
apiVersion: argoproj.io/v1alpha1
kind: ClusterWorkflowTemplate
metadata:
name: amp-security-pipeline-v1.0.0
spec:
templates:
- name: upload-storage
container:
image: amazon/aws-cli:2.15.40
env:
- name: AWS_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: amp-security-pipeline-secrets
key: AWS_ACCESS_KEY_ID
- name: AWS_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: amp-security-pipeline-secrets
key: AWS_SECRET_ACCESS_KEY
- name: MINIO_ROOT_USER
valueFrom:
secretKeyRef:
name: amp-security-pipeline-secrets
key: MINIO_ROOT_USER
- name: MINIO_ROOT_PASSWORD
valueFrom:
secretKeyRef:
name: amp-security-pipeline-secrets
key: MINIO_ROOT_PASSWORD
command:
- sh
- -c
args:
- |
set -eu
repo_name="${REPO_NAME:-repo}"
commit_sha="${GIT_COMMIT_SHA:-unknown}"
report_date="$(date -u +%F)"
aws s3 sync /workspace/reports "s3://${REPORTS_BUCKET:-security-reports}/${repo_name}/${report_date}/${commit_sha}/"
volumeMounts:
- name: workspace
mountPath: /workspace
{{- end }}
+85
View File
@@ -0,0 +1,85 @@
import * as fs from 'node:fs';
import * as path from 'node:path';
export function checkReports(reportsDir: string, threshold: number): { name: string; score: number }[] {
const findings: { name: string; score: number }[] = [];
if (!fs.existsSync(reportsDir)) return findings;
const files = fs.readdirSync(reportsDir).sort();
for (const file of files) {
const fullPath = path.join(reportsDir, file);
if (!fs.statSync(fullPath).isFile()) continue;
const text = fs.readFileSync(fullPath, 'utf-8');
let data: any;
try {
data = JSON.parse(text);
} catch (e) {
console.error(`Error parsing ${file}: Invalid JSON`);
process.exitCode = 1;
continue;
}
if (file.endsWith('.sarif')) {
const runs = data.runs || [];
for (const run of runs) {
const results = run.results || [];
for (const result of results) {
const sev = result.properties?.['security-severity'];
if (sev === undefined) continue;
const score = parseFloat(sev);
if (isNaN(score)) continue;
if (score >= threshold) {
findings.push({ name: file, score });
}
}
}
} else if (file.endsWith('.json')) {
const items = data.findings || data.vulnerabilities || [];
for (const item of items) {
const rawScore = item.cvss || item.score;
if (rawScore === undefined) continue;
const score = parseFloat(rawScore);
if (isNaN(score)) continue;
if (score >= threshold) {
findings.push({ name: file, score });
}
}
}
}
return findings;
}
// Ensure the code runs when executed directly
import { fileURLToPath } from 'node:url';
if (process.argv[1] && fileURLToPath(import.meta.url) === process.argv[1]) {
const thresholdStr = process.env.FAIL_ON_CVSS;
if (!thresholdStr) {
console.error("FAIL_ON_CVSS environment variable is required.");
process.exit(1);
}
const threshold = parseFloat(thresholdStr);
if (isNaN(threshold)) {
console.error("FAIL_ON_CVSS must be a number.");
process.exit(1);
}
const reportsDir = "/workspace/reports";
const findings = checkReports(reportsDir, threshold);
if (findings.length > 0) {
for (const finding of findings) {
console.error(`${finding.name}: CVSS ${finding.score} >= ${threshold}`);
}
process.exit(1);
} else {
console.log(`No findings met or exceeded CVSS ${threshold}`);
}
}
+68
View File
@@ -0,0 +1,68 @@
import * as fs from 'node:fs';
import * as path from 'node:path';
import { fileURLToPath } from 'node:url';
export async function uploadReports() {
const baseUrl = (process.env.DEFECTDOJO_URL || "").replace(/\/$/, "");
const apiToken = process.env.DEFECTDOJO_API_TOKEN;
const productName = process.env.DEFECTDOJO_PRODUCT_NAME || "agentguard-ci";
if (!baseUrl || !apiToken) {
console.error("DEFECTDOJO_URL and DEFECTDOJO_API_TOKEN must be set.");
process.exit(1);
}
const scanMap: Record<string, string> = {
".sarif": "SARIF",
".json": "Generic Findings Import",
};
const reportsDir = "/workspace/reports";
if (!fs.existsSync(reportsDir)) {
console.log("No reports directory found.");
return;
}
const files = fs.readdirSync(reportsDir).sort();
for (const file of files) {
const fullPath = path.join(reportsDir, file);
if (!fs.statSync(fullPath).isFile()) continue;
const ext = path.extname(file);
const scanType = scanMap[ext];
if (!scanType) continue;
console.log(`Uploading ${file} as ${scanType}...`);
try {
const response = await fetch(`${baseUrl}/api/v2/import-scan/`, {
method: "POST",
headers: {
"Authorization": `Token ${apiToken}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
scan_type: scanType,
product_name: productName,
file_name: file,
})
});
if (!response.ok) {
const text = await response.text();
console.error(`Failed to upload ${file}: ${response.status} ${response.statusText} - ${text}`);
process.exitCode = 1;
} else {
console.log(`Successfully uploaded ${file}`);
}
} catch (e) {
console.error(`Network error uploading ${file}:`, e);
process.exitCode = 1;
}
}
}
if (process.argv[1] && fileURLToPath(import.meta.url) === process.argv[1]) {
uploadReports();
}