Add weekly vulnerability report upload feature
Implements a comprehensive system for uploading and processing weekly vulnerability reports that automatically splits multiple CVE IDs in a single cell into separate rows for easier filtering and analysis. Backend Changes: - Add weekly_reports table with migration - Create Excel processor helper using Python child_process - Implement API routes for upload, list, download, delete - Mount routes in server.js after multer initialization - Move split_cve_report.py to backend/scripts/ Frontend Changes: - Add WeeklyReportModal component with phase-based UI - Add "Weekly Report" button next to NVD Sync - Integrate modal into App.js with state management - Display existing reports with current report indicator - Download buttons for original and processed files Features: - Upload .xlsx files (editor/admin only) - Automatic CVE ID splitting via Python script - Store metadata in database + files on filesystem - Auto-archive previous reports (mark one as current) - Download both original and processed versions - Audit logging for all operations - Security: file validation, auth checks, path sanitization Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
93
backend/helpers/excelProcessor.js
Normal file
93
backend/helpers/excelProcessor.js
Normal file
@@ -0,0 +1,93 @@
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
/**
|
||||
* Process vulnerability report Excel file by splitting CVE IDs into separate rows
|
||||
* @param {string} inputPath - Path to original Excel file
|
||||
* @param {string} outputPath - Path for processed Excel file
|
||||
* @returns {Promise<{original_rows: number, processed_rows: number, output_path: string}>}
|
||||
*/
|
||||
function processVulnerabilityReport(inputPath, outputPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const scriptPath = path.join(__dirname, '..', 'scripts', 'split_cve_report.py');
|
||||
|
||||
// Verify script exists
|
||||
if (!fs.existsSync(scriptPath)) {
|
||||
return reject(new Error(`Python script not found: ${scriptPath}`));
|
||||
}
|
||||
|
||||
// Verify input file exists
|
||||
if (!fs.existsSync(inputPath)) {
|
||||
return reject(new Error(`Input file not found: ${inputPath}`));
|
||||
}
|
||||
|
||||
const python = spawn('python3', [scriptPath, inputPath, outputPath]);
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
let timedOut = false;
|
||||
|
||||
// 30 second timeout
|
||||
const timeout = setTimeout(() => {
|
||||
timedOut = true;
|
||||
python.kill();
|
||||
reject(new Error('Processing timed out. File may be too large or corrupted.'));
|
||||
}, 30000);
|
||||
|
||||
python.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
python.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
python.on('close', (code) => {
|
||||
clearTimeout(timeout);
|
||||
|
||||
if (timedOut) return;
|
||||
|
||||
if (code !== 0) {
|
||||
// Parse Python error messages
|
||||
if (stderr.includes('Sheet') && stderr.includes('not found')) {
|
||||
return reject(new Error('Invalid Excel file. Expected "Vulnerabilities" sheet with "CVE ID" column.'));
|
||||
}
|
||||
if (stderr.includes('pandas') || stderr.includes('openpyxl')) {
|
||||
return reject(new Error('Python dependencies missing. Run: pip3 install pandas openpyxl'));
|
||||
}
|
||||
return reject(new Error(`Python script failed: ${stderr || 'Unknown error'}`));
|
||||
}
|
||||
|
||||
// Parse output for row counts
|
||||
const originalMatch = stdout.match(/Original rows:\s*(\d+)/);
|
||||
const newMatch = stdout.match(/New rows:\s*(\d+)/);
|
||||
|
||||
if (!originalMatch || !newMatch) {
|
||||
return reject(new Error('Failed to parse row counts from Python output'));
|
||||
}
|
||||
|
||||
// Verify output file was created
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
return reject(new Error('Processed file was not created'));
|
||||
}
|
||||
|
||||
resolve({
|
||||
original_rows: parseInt(originalMatch[1]),
|
||||
processed_rows: parseInt(newMatch[1]),
|
||||
output_path: outputPath
|
||||
});
|
||||
});
|
||||
|
||||
python.on('error', (err) => {
|
||||
clearTimeout(timeout);
|
||||
if (err.code === 'ENOENT') {
|
||||
reject(new Error('Python 3 is required but not found. Please install Python.'));
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { processVulnerabilityReport };
|
||||
59
backend/migrations/add_weekly_reports_table.js
Normal file
59
backend/migrations/add_weekly_reports_table.js
Normal file
@@ -0,0 +1,59 @@
|
||||
// Migration: Add weekly_reports table for vulnerability report uploads
|
||||
|
||||
const sqlite3 = require('sqlite3').verbose();
|
||||
const path = require('path');
|
||||
|
||||
const dbPath = path.join(__dirname, '..', 'cve_database.db');
|
||||
const db = new sqlite3.Database(dbPath);
|
||||
|
||||
console.log('Running migration: add_weekly_reports_table');
|
||||
|
||||
db.serialize(() => {
|
||||
db.run(`
|
||||
CREATE TABLE IF NOT EXISTS weekly_reports (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
upload_date DATE NOT NULL,
|
||||
week_label VARCHAR(50),
|
||||
original_filename VARCHAR(255),
|
||||
processed_filename VARCHAR(255),
|
||||
original_file_path VARCHAR(500),
|
||||
processed_file_path VARCHAR(500),
|
||||
row_count_original INTEGER,
|
||||
row_count_processed INTEGER,
|
||||
uploaded_by INTEGER,
|
||||
uploaded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
is_current BOOLEAN DEFAULT 0,
|
||||
FOREIGN KEY (uploaded_by) REFERENCES users(id)
|
||||
)
|
||||
`, (err) => {
|
||||
if (err) {
|
||||
console.error('Error creating weekly_reports table:', err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('✓ Created weekly_reports table');
|
||||
});
|
||||
|
||||
db.run(`
|
||||
CREATE INDEX IF NOT EXISTS idx_weekly_reports_date
|
||||
ON weekly_reports(upload_date DESC)
|
||||
`, (err) => {
|
||||
if (err) {
|
||||
console.error('Error creating date index:', err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('✓ Created index on upload_date');
|
||||
});
|
||||
|
||||
db.run(`
|
||||
CREATE INDEX IF NOT EXISTS idx_weekly_reports_current
|
||||
ON weekly_reports(is_current)
|
||||
`, (err) => {
|
||||
if (err) {
|
||||
console.error('Error creating current index:', err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('✓ Created index on is_current');
|
||||
console.log('\nMigration completed successfully!');
|
||||
db.close();
|
||||
});
|
||||
});
|
||||
261
backend/routes/weeklyReports.js
Normal file
261
backend/routes/weeklyReports.js
Normal file
@@ -0,0 +1,261 @@
|
||||
const express = require('express');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const { requireAuth, requireRole } = require('../middleware/auth');
|
||||
const logAudit = require('../helpers/auditLog');
|
||||
const { processVulnerabilityReport } = require('../helpers/excelProcessor');
|
||||
|
||||
function createWeeklyReportsRouter(db, upload) {
|
||||
const router = express.Router();
|
||||
|
||||
// Helper to sanitize filename
|
||||
function sanitizePathSegment(segment) {
|
||||
if (!segment || typeof segment !== 'string') return '';
|
||||
return segment
|
||||
.replace(/\0/g, '')
|
||||
.replace(/\.\./g, '')
|
||||
.replace(/[\/\\]/g, '')
|
||||
.trim();
|
||||
}
|
||||
|
||||
// Helper to generate week label
|
||||
function getWeekLabel(date) {
|
||||
const now = new Date();
|
||||
const uploadDate = new Date(date);
|
||||
const daysDiff = Math.floor((now - uploadDate) / (1000 * 60 * 60 * 24));
|
||||
|
||||
if (daysDiff < 7) {
|
||||
return "This week's report";
|
||||
} else if (daysDiff < 14) {
|
||||
return "Last week's report";
|
||||
} else {
|
||||
const month = uploadDate.getMonth() + 1;
|
||||
const day = uploadDate.getDate();
|
||||
const year = uploadDate.getFullYear();
|
||||
return `Week of ${month.toString().padStart(2, '0')}/${day.toString().padStart(2, '0')}/${year}`;
|
||||
}
|
||||
}
|
||||
|
||||
// POST /api/weekly-reports/upload - Upload and process vulnerability report
|
||||
router.post('/upload', requireAuth(db), requireRole(db, 'editor', 'admin'), upload.single('file'), async (req, res) => {
|
||||
const uploadedFile = req.file;
|
||||
|
||||
if (!uploadedFile) {
|
||||
return res.status(400).json({ error: 'No file uploaded' });
|
||||
}
|
||||
|
||||
// Validate file extension
|
||||
const ext = path.extname(uploadedFile.originalname).toLowerCase();
|
||||
if (ext !== '.xlsx') {
|
||||
fs.unlinkSync(uploadedFile.path); // Clean up temp file
|
||||
return res.status(400).json({ error: 'Only .xlsx files are allowed' });
|
||||
}
|
||||
|
||||
const timestamp = Date.now();
|
||||
const sanitizedName = sanitizePathSegment(uploadedFile.originalname);
|
||||
const reportsDir = path.join(__dirname, '..', 'uploads', 'weekly_reports');
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
if (!fs.existsSync(reportsDir)) {
|
||||
fs.mkdirSync(reportsDir, { recursive: true });
|
||||
}
|
||||
|
||||
const originalFilename = `${timestamp}_original_${sanitizedName}`;
|
||||
const processedFilename = `${timestamp}_processed_${sanitizedName}`;
|
||||
const originalPath = path.join(reportsDir, originalFilename);
|
||||
const processedPath = path.join(reportsDir, processedFilename);
|
||||
|
||||
try {
|
||||
// Move uploaded file to permanent location
|
||||
fs.renameSync(uploadedFile.path, originalPath);
|
||||
|
||||
// Process the file with Python script
|
||||
const result = await processVulnerabilityReport(originalPath, processedPath);
|
||||
|
||||
const uploadDate = new Date().toISOString().split('T')[0];
|
||||
|
||||
// Update previous current reports to not current
|
||||
db.run('UPDATE weekly_reports SET is_current = 0 WHERE is_current = 1', (err) => {
|
||||
if (err) {
|
||||
console.error('Error updating previous current reports:', err);
|
||||
}
|
||||
});
|
||||
|
||||
// Insert new report record
|
||||
const insertSql = `
|
||||
INSERT INTO weekly_reports (
|
||||
upload_date, week_label, original_filename, processed_filename,
|
||||
original_file_path, processed_file_path, row_count_original,
|
||||
row_count_processed, uploaded_by, is_current
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 1)
|
||||
`;
|
||||
|
||||
const weekLabel = getWeekLabel(uploadDate);
|
||||
|
||||
db.run(
|
||||
insertSql,
|
||||
[
|
||||
uploadDate,
|
||||
weekLabel,
|
||||
sanitizedName,
|
||||
processedFilename,
|
||||
originalPath,
|
||||
processedPath,
|
||||
result.original_rows,
|
||||
result.processed_rows,
|
||||
req.user.id
|
||||
],
|
||||
function (err) {
|
||||
if (err) {
|
||||
console.error('Error inserting weekly report:', err);
|
||||
return res.status(500).json({ error: 'Failed to save report metadata' });
|
||||
}
|
||||
|
||||
// Log audit entry
|
||||
logAudit(
|
||||
db,
|
||||
req.user.id,
|
||||
req.user.username,
|
||||
'UPLOAD_WEEKLY_REPORT',
|
||||
'weekly_reports',
|
||||
this.lastID,
|
||||
JSON.stringify({ filename: sanitizedName, rows: result.processed_rows }),
|
||||
req.ip
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
id: this.lastID,
|
||||
original_rows: result.original_rows,
|
||||
processed_rows: result.processed_rows,
|
||||
week_label: weekLabel
|
||||
});
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
// Clean up files on error
|
||||
if (fs.existsSync(originalPath)) fs.unlinkSync(originalPath);
|
||||
if (fs.existsSync(processedPath)) fs.unlinkSync(processedPath);
|
||||
|
||||
console.error('Error processing vulnerability report:', error);
|
||||
res.status(500).json({ error: error.message || 'Failed to process report' });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/weekly-reports - List all reports
|
||||
router.get('/', requireAuth(db), (req, res) => {
|
||||
const sql = `
|
||||
SELECT id, upload_date, week_label, original_filename, processed_filename,
|
||||
row_count_original, row_count_processed, is_current, uploaded_at
|
||||
FROM weekly_reports
|
||||
ORDER BY upload_date DESC, uploaded_at DESC
|
||||
`;
|
||||
|
||||
db.all(sql, [], (err, rows) => {
|
||||
if (err) {
|
||||
console.error('Error fetching weekly reports:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch reports' });
|
||||
}
|
||||
|
||||
res.json(rows);
|
||||
});
|
||||
});
|
||||
|
||||
// GET /api/weekly-reports/:id/download/:type - Download report file
|
||||
router.get('/:id/download/:type', requireAuth(db), (req, res) => {
|
||||
const { id, type } = req.params;
|
||||
|
||||
if (type !== 'original' && type !== 'processed') {
|
||||
return res.status(400).json({ error: 'Invalid download type. Use "original" or "processed"' });
|
||||
}
|
||||
|
||||
const sql = `SELECT original_file_path, processed_file_path, original_filename FROM weekly_reports WHERE id = ?`;
|
||||
|
||||
db.get(sql, [id], (err, row) => {
|
||||
if (err) {
|
||||
console.error('Error fetching report:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch report' });
|
||||
}
|
||||
|
||||
if (!row) {
|
||||
return res.status(404).json({ error: 'Report not found' });
|
||||
}
|
||||
|
||||
const filePath = type === 'original' ? row.original_file_path : row.processed_file_path;
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return res.status(404).json({ error: 'File not found on disk' });
|
||||
}
|
||||
|
||||
// Log audit entry
|
||||
logAudit(
|
||||
db,
|
||||
req.user.id,
|
||||
req.user.username,
|
||||
'DOWNLOAD_WEEKLY_REPORT',
|
||||
'weekly_reports',
|
||||
id,
|
||||
JSON.stringify({ type }),
|
||||
req.ip
|
||||
);
|
||||
|
||||
const downloadName = type === 'original' ? row.original_filename : row.original_filename.replace('.xlsx', '_processed.xlsx');
|
||||
|
||||
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${downloadName}"`);
|
||||
res.sendFile(filePath);
|
||||
});
|
||||
});
|
||||
|
||||
// DELETE /api/weekly-reports/:id - Delete report (admin only)
|
||||
router.delete('/:id', requireAuth(db), requireRole(db, 'admin'), (req, res) => {
|
||||
const { id } = req.params;
|
||||
|
||||
const sql = 'SELECT original_file_path, processed_file_path FROM weekly_reports WHERE id = ?';
|
||||
|
||||
db.get(sql, [id], (err, row) => {
|
||||
if (err) {
|
||||
console.error('Error fetching report for deletion:', err);
|
||||
return res.status(500).json({ error: 'Failed to fetch report' });
|
||||
}
|
||||
|
||||
if (!row) {
|
||||
return res.status(404).json({ error: 'Report not found' });
|
||||
}
|
||||
|
||||
// Delete database record
|
||||
db.run('DELETE FROM weekly_reports WHERE id = ?', [id], (err) => {
|
||||
if (err) {
|
||||
console.error('Error deleting report:', err);
|
||||
return res.status(500).json({ error: 'Failed to delete report' });
|
||||
}
|
||||
|
||||
// Delete files
|
||||
if (fs.existsSync(row.original_file_path)) {
|
||||
fs.unlinkSync(row.original_file_path);
|
||||
}
|
||||
if (fs.existsSync(row.processed_file_path)) {
|
||||
fs.unlinkSync(row.processed_file_path);
|
||||
}
|
||||
|
||||
// Log audit entry
|
||||
logAudit(
|
||||
db,
|
||||
req.user.id,
|
||||
req.user.username,
|
||||
'DELETE_WEEKLY_REPORT',
|
||||
'weekly_reports',
|
||||
id,
|
||||
null,
|
||||
req.ip
|
||||
);
|
||||
|
||||
res.json({ success: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
module.exports = createWeeklyReportsRouter;
|
||||
2
backend/scripts/requirements.txt
Normal file
2
backend/scripts/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
pandas>=2.0.0
|
||||
openpyxl>=3.0.0
|
||||
83
backend/scripts/split_cve_report.py
Executable file
83
backend/scripts/split_cve_report.py
Executable file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
CVE Report Splitter
|
||||
Splits multiple CVE IDs in a single row into separate rows for easier filtering and analysis.
|
||||
"""
|
||||
|
||||
import pandas as pd
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
def split_cve_report(input_file, output_file=None, sheet_name='Vulnerabilities', cve_column='CVE ID'):
|
||||
"""
|
||||
Split CVE IDs into separate rows.
|
||||
|
||||
Args:
|
||||
input_file: Path to input Excel file
|
||||
output_file: Path to output file (default: adds '_Split' to input filename)
|
||||
sheet_name: Name of sheet with vulnerability data (default: 'Vulnerabilities')
|
||||
cve_column: Name of column containing CVE IDs (default: 'CVE ID')
|
||||
"""
|
||||
input_path = Path(input_file)
|
||||
|
||||
if not input_path.exists():
|
||||
print(f"Error: File not found: {input_file}")
|
||||
sys.exit(1)
|
||||
|
||||
if output_file is None:
|
||||
output_file = input_path.parent / f"{input_path.stem}_Split{input_path.suffix}"
|
||||
|
||||
print(f"Reading: {input_file}")
|
||||
|
||||
try:
|
||||
df = pd.read_excel(input_file, sheet_name=sheet_name)
|
||||
except ValueError as e:
|
||||
print(f"Error: Sheet '{sheet_name}' not found in workbook")
|
||||
print(f"Available sheets: {pd.ExcelFile(input_file).sheet_names}")
|
||||
sys.exit(1)
|
||||
|
||||
if cve_column not in df.columns:
|
||||
print(f"Error: Column '{cve_column}' not found")
|
||||
print(f"Available columns: {list(df.columns)}")
|
||||
sys.exit(1)
|
||||
|
||||
original_rows = len(df)
|
||||
print(f"Original rows: {original_rows}")
|
||||
|
||||
# Split CVE IDs by comma
|
||||
df[cve_column] = df[cve_column].astype(str).str.split(',')
|
||||
|
||||
# Explode to create separate rows
|
||||
df_exploded = df.explode(cve_column)
|
||||
|
||||
# Clean up CVE IDs
|
||||
df_exploded[cve_column] = df_exploded[cve_column].str.strip()
|
||||
df_exploded = df_exploded[df_exploded[cve_column].notna()]
|
||||
df_exploded = df_exploded[df_exploded[cve_column] != 'nan']
|
||||
df_exploded = df_exploded[df_exploded[cve_column] != '']
|
||||
|
||||
# Reset index
|
||||
df_exploded = df_exploded.reset_index(drop=True)
|
||||
|
||||
new_rows = len(df_exploded)
|
||||
print(f"New rows: {new_rows}")
|
||||
print(f"Added {new_rows - original_rows} rows from splitting CVEs")
|
||||
|
||||
# Save output
|
||||
df_exploded.to_excel(output_file, index=False, sheet_name=sheet_name)
|
||||
print(f"\n✓ Success! Saved to: {output_file}")
|
||||
|
||||
return output_file
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python3 split_cve_report.py <input_file.xlsx> [output_file.xlsx]")
|
||||
print("\nExample:")
|
||||
print(" python3 split_cve_report.py 'Vulnerability Workbook.xlsx'")
|
||||
print(" python3 split_cve_report.py 'input.xlsx' 'output.xlsx'")
|
||||
sys.exit(1)
|
||||
|
||||
input_file = sys.argv[1]
|
||||
output_file = sys.argv[2] if len(sys.argv) > 2 else None
|
||||
|
||||
split_cve_report(input_file, output_file)
|
||||
@@ -18,6 +18,7 @@ const createUsersRouter = require('./routes/users');
|
||||
const createAuditLogRouter = require('./routes/auditLog');
|
||||
const logAudit = require('./helpers/auditLog');
|
||||
const createNvdLookupRouter = require('./routes/nvdLookup');
|
||||
const createWeeklyReportsRouter = require('./routes/weeklyReports');
|
||||
|
||||
const app = express();
|
||||
const PORT = process.env.PORT || 3001;
|
||||
@@ -167,6 +168,9 @@ const upload = multer({
|
||||
limits: { fileSize: 10 * 1024 * 1024 } // 10MB limit
|
||||
});
|
||||
|
||||
// Weekly reports routes (editor/admin for upload, all authenticated for download)
|
||||
app.use('/api/weekly-reports', createWeeklyReportsRouter(db, upload));
|
||||
|
||||
// ========== CVE ENDPOINTS ==========
|
||||
|
||||
// Get all CVEs with optional filters (authenticated users)
|
||||
|
||||
Reference in New Issue
Block a user