feat: security hardening, monitoring improvements, and API guards

- Hardened cron endpoints with coordination and auth improvements
- Added API guards and input validation layer
- Security observability and secrets health checks
- Monitoring types and service improvements
- PDF URL validation and newsletter unsubscribe security
- Unit tests for security-critical paths

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Martin Porwoll 2026-02-17 11:42:56 +00:00
parent 4d1456fe98
commit e3987e50dc
35 changed files with 2330 additions and 809 deletions

View file

@ -39,6 +39,8 @@ META_REDIRECT_URI=http://localhost:3000/api/auth/meta/callback
# Cron Jobs (required in production)
CRON_SECRET=your-64-char-hex
CRON_LOCK_TTL_MS=600000
CRON_IDEMPOTENCY_TTL_MS=900000
# PDF Security
PDF_ALLOWED_HOSTS=example.com,.example.com
@ -48,6 +50,23 @@ PDF_ALLOW_HTTP_URLS=false
# Scheduler
# In Production standardmäßig deaktiviert, um Doppel-Ausführungen in Multi-Instance-Deployments zu vermeiden
ENABLE_IN_PROCESS_SCHEDULER=false
SCHEDULER_MODE=external
# Security Observability
SECURITY_METRICS_WINDOW_MS=300000
SECURITY_ALERT_COOLDOWN_MS=900000
SECURITY_ALERT_THRESHOLD_DEFAULT=25
SECURITY_ALERT_THRESHOLD_CRON_AUTH_REJECTED=10
SECURITY_ALERT_THRESHOLD_PDF_SSRF_BLOCKED=5
SECURITY_ALERT_THRESHOLD_RATE_LIMIT_BLOCKED=50
# Secret Lifecycle Monitoring
SECRET_EXPIRY_WARNING_DAYS=14
SECRET_ROTATION_MAX_DAYS=90
PAYLOAD_SECRET_ROTATED_AT=2026-02-01T00:00:00Z
PAYLOAD_SECRET_EXPIRES_AT=2026-08-01T00:00:00Z
CRON_SECRET_ROTATED_AT=2026-02-01T00:00:00Z
CRON_SECRET_EXPIRES_AT=2026-08-01T00:00:00Z
# Tests
EMAIL_DELIVERY_DISABLED=false

View file

@ -0,0 +1,24 @@
# Git History Security Scan
- Generated: 2026-02-17 11:42:09 UTC
- Repository: payload-cms
## Summary
- `backup.sql` is not tracked in current HEAD.
- `backup.sql` exists in git history and must be treated as potentially sensitive.
- `gitleaks` available: no (install recommended for full-history secret scanning).
## backup.sql Commit History
```text
063dae4 2026-02-17 security: harden payload endpoints and access controls
0a8e191 2025-12-05 chore: add database backup for server migration
```
## Recommended Actions
1. Rotate DB credentials if `backup.sql` contained production or staging data.
2. Rotate SMTP/API/OAuth secrets if dumps included integration credentials.
3. If required by compliance, rewrite history for `backup.sql` (e.g. `git filter-repo`) and force-push.
4. Enable periodic full-history scans in CI using gitleaks.

View file

@ -0,0 +1,62 @@
#!/usr/bin/env bash
set -euo pipefail
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
cd "$REPO_ROOT"
REPORT_PATH="${1:-docs/reports/2026-02-17-history-scan.md}"
NOW_UTC="$(date -u +"%Y-%m-%d %H:%M:%S UTC")"
mkdir -p "$(dirname "$REPORT_PATH")"
{
echo "# Git History Security Scan"
echo
echo "- Generated: ${NOW_UTC}"
echo "- Repository: payload-cms"
echo
echo "## Summary"
echo
if git ls-files --error-unmatch backup.sql >/dev/null 2>&1; then
echo "- \`backup.sql\` is still tracked in current HEAD (critical)."
else
echo "- \`backup.sql\` is not tracked in current HEAD."
fi
backup_history="$(git log --all --date=short --pretty=format:'%h %ad %s' -- backup.sql || true)"
if [[ -n "${backup_history}" ]]; then
echo "- \`backup.sql\` exists in git history and must be treated as potentially sensitive."
else
echo "- No git history entries found for \`backup.sql\`."
fi
if command -v gitleaks >/dev/null 2>&1; then
echo "- \`gitleaks\` available: yes (run with: \`gitleaks git --redact --verbose\`)."
else
echo "- \`gitleaks\` available: no (install recommended for full-history secret scanning)."
fi
echo
echo "## backup.sql Commit History"
echo
if [[ -n "${backup_history}" ]]; then
echo '```text'
echo "${backup_history}"
echo '```'
else
echo "_No entries found._"
fi
echo
echo "## Recommended Actions"
echo
echo "1. Rotate DB credentials if \`backup.sql\` contained production or staging data."
echo "2. Rotate SMTP/API/OAuth secrets if dumps included integration credentials."
echo "3. If required by compliance, rewrite history for \`backup.sql\` (e.g. \`git filter-repo\`) and force-push."
echo "4. Enable periodic full-history scans in CI using gitleaks."
} > "$REPORT_PATH"
echo "History scan report written to ${REPORT_PATH}"

View file

@ -23,77 +23,80 @@ import { NextRequest, NextResponse } from 'next/server'
import { logLoginFailed, logRateLimit } from '@/lib/audit/audit-service'
import {
authLimiter,
rateLimitHeaders,
getClientIpFromRequest,
isIpBlocked,
validateCsrf,
runApiGuards,
} from '@/lib/security'
import {
asObject,
requiredString,
validateJsonBody,
validationErrorResponse,
type ApiValidationResult,
} from '@/lib/validation'
/**
* Extrahiert Client-Informationen aus dem Request für Audit-Logging
*/
function getClientInfo(req: NextRequest): { ipAddress: string; userAgent: string } {
const forwarded = req.headers.get('x-forwarded-for')
const realIp = req.headers.get('x-real-ip')
const ipAddress =
(forwarded ? forwarded.split(',')[0]?.trim() : undefined) || realIp || 'unknown'
function getClientInfo(req: NextRequest, ipAddress?: string): { ipAddress: string; userAgent: string } {
const userAgent = req.headers.get('user-agent') || 'unknown'
return { ipAddress, userAgent }
return { ipAddress: ipAddress || 'unknown', userAgent }
}
interface LoginBody {
email: string
password: string
}
function validateLoginBody(input: unknown): ApiValidationResult<LoginBody> {
const objectResult = asObject(input)
if (!objectResult.valid) {
return objectResult as ApiValidationResult<LoginBody>
}
const emailResult = requiredString(objectResult.data, 'email')
const passwordResult = requiredString(objectResult.data, 'password')
const issues = [
...(emailResult.valid ? [] : emailResult.issues),
...(passwordResult.valid ? [] : passwordResult.issues),
]
if (issues.length > 0) {
return { valid: false, issues }
}
return {
valid: true,
data: {
email: emailResult.data,
password: passwordResult.data,
},
}
}
export async function POST(req: NextRequest): Promise<NextResponse> {
try {
// IP-Blocklist prüfen
const clientIp = getClientIpFromRequest(req)
if (isIpBlocked(clientIp)) {
return NextResponse.json(
{ success: false, error: 'Access denied' },
{ status: 403 },
)
}
// CSRF-Schutz für Browser-basierte Requests
const csrfResult = validateCsrf(req)
if (!csrfResult.valid) {
return NextResponse.json(
{ success: false, error: 'CSRF validation failed' },
{ status: 403 },
)
}
// Rate-Limiting prüfen (Anti-Brute-Force)
const rateLimit = await authLimiter.check(clientIp)
if (!rateLimit.allowed) {
const payload = await getPayload({ config: configPromise })
await logRateLimit(payload, '/api/auth/login', undefined, undefined)
return NextResponse.json(
{
success: false,
error: 'Too many login attempts. Please try again later.',
},
{
status: 429,
headers: rateLimitHeaders(rateLimit, 5),
},
)
}
const payload = await getPayload({ config: configPromise })
const body = await req.json()
const { email, password } = body
// Validierung
if (!email || !password) {
return NextResponse.json(
{ error: 'E-Mail und Passwort sind erforderlich' },
{ status: 400 },
)
const guardResult = await runApiGuards(req, {
endpoint: '/api/auth/login',
blocklistOnly: true,
csrf: 'browser',
rateLimiter: authLimiter,
rateLimitMax: 5,
onRateLimit: async () => {
await logRateLimit(payload, '/api/auth/login', undefined, undefined)
},
})
if (!guardResult.ok) {
return guardResult.response
}
const bodyResult = await validateJsonBody(req, validateLoginBody)
if (!bodyResult.valid) {
return validationErrorResponse(bodyResult.issues)
}
const { email, password } = bodyResult.data
try {
// Versuche Login über Payload
const result = await payload.login({
@ -152,7 +155,7 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
}
// Client-Info für Audit-Log extrahieren
const clientInfo = getClientInfo(req)
const clientInfo = getClientInfo(req, guardResult.ip)
// Audit-Log für fehlgeschlagenen Login mit vollem Client-Context
await logLoginFailed(payload, email, reason, clientInfo)

View file

@ -8,7 +8,7 @@ import {
type SupportedPlatform,
type UnifiedSyncOptions,
} from '@/lib/jobs/UnifiedSyncService'
import { requireCronAuth } from '@/lib/security'
import { requireCronAuth, withCronExecution } from '@/lib/security'
/**
* GET /api/cron/community-sync
@ -21,90 +21,85 @@ import { requireCronAuth } from '@/lib/security'
* - maxItems: Maximale Items pro Account (default: 100)
*/
export async function GET(request: NextRequest) {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized request to community-sync')
return authError
}
return withCronExecution(request, { endpoint: 'community-sync' }, async () => {
// Query-Parameter parsen
const searchParams = request.nextUrl.searchParams
const options: UnifiedSyncOptions = {}
// Query-Parameter parsen
const searchParams = request.nextUrl.searchParams
const options: UnifiedSyncOptions = {}
// Plattform-Filter
const platformsParam = searchParams.get('platforms')
if (platformsParam) {
const platforms = platformsParam.split(',').map((p) => p.trim().toLowerCase())
const validPlatforms = platforms.filter((p): p is SupportedPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
)
if (validPlatforms.length > 0) {
options.platforms = validPlatforms
}
}
// Account-ID Filter
const accountIdsParam = searchParams.get('accountIds')
if (accountIdsParam) {
const ids = accountIdsParam
.split(',')
.map((id) => parseInt(id.trim(), 10))
.filter((id) => !isNaN(id))
if (ids.length > 0) {
options.accountIds = ids
}
}
// AI-Analyse
const analyzeParam = searchParams.get('analyzeWithAI')
if (analyzeParam !== null) {
options.analyzeWithAI = analyzeParam !== 'false'
}
// Max Items
const maxItemsParam = searchParams.get('maxItems')
if (maxItemsParam) {
const maxItems = parseInt(maxItemsParam, 10)
if (!isNaN(maxItems) && maxItems > 0) {
options.maxItemsPerAccount = maxItems
}
}
console.log('[Cron] Starting community sync', { options })
const result = await runUnifiedSync(options)
if (result.success) {
// Gesamtstatistiken berechnen
let totalNew = 0
let totalUpdated = 0
let totalErrors = 0
for (const platform of Object.values(result.platforms)) {
if (platform) {
totalNew += platform.totalNewComments
totalUpdated += platform.totalUpdatedComments
totalErrors += platform.totalErrors
// Plattform-Filter
const platformsParam = searchParams.get('platforms')
if (platformsParam) {
const platforms = platformsParam.split(',').map((p) => p.trim().toLowerCase())
const validPlatforms = platforms.filter((p): p is SupportedPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
)
if (validPlatforms.length > 0) {
options.platforms = validPlatforms
}
}
return NextResponse.json({
success: true,
message: `Sync completed: ${totalNew} new, ${totalUpdated} updated comments across ${result.results.length} accounts`,
duration: result.duration,
platforms: result.platforms,
results: result.results,
})
}
// Account-ID Filter
const accountIdsParam = searchParams.get('accountIds')
if (accountIdsParam) {
const ids = accountIdsParam
.split(',')
.map((id) => parseInt(id.trim(), 10))
.filter((id) => !isNaN(id))
if (ids.length > 0) {
options.accountIds = ids
}
}
return NextResponse.json(
{
// AI-Analyse
const analyzeParam = searchParams.get('analyzeWithAI')
if (analyzeParam !== null) {
options.analyzeWithAI = analyzeParam !== 'false'
}
// Max Items
const maxItemsParam = searchParams.get('maxItems')
if (maxItemsParam) {
const maxItems = parseInt(maxItemsParam, 10)
if (!isNaN(maxItems) && maxItems > 0) {
options.maxItemsPerAccount = maxItems
}
}
console.log('[Cron] Starting community sync', { options })
const result = await runUnifiedSync(options)
if (result.success) {
// Gesamtstatistiken berechnen
let totalNew = 0
let totalUpdated = 0
let totalErrors = 0
for (const platform of Object.values(result.platforms)) {
if (platform) {
totalNew += platform.totalNewComments
totalUpdated += platform.totalUpdatedComments
totalErrors += platform.totalErrors
}
}
return NextResponse.json({
success: true,
message: `Sync completed: ${totalNew} new, ${totalUpdated} updated comments across ${result.results.length} accounts`,
duration: result.duration,
platforms: result.platforms,
results: result.results,
})
}
return NextResponse.json({
success: false,
errors: result.errors,
partialResults: result.results,
},
{ status: 500 }
)
{ status: 500 },
)
})
}
/**
@ -112,62 +107,57 @@ export async function GET(request: NextRequest) {
* Manueller Sync-Trigger mit erweiterten Optionen
*/
export async function POST(request: NextRequest) {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized POST to community-sync')
return authError
}
return withCronExecution(request, { endpoint: 'community-sync' }, async () => {
try {
const body = await request.json()
const options: UnifiedSyncOptions = {}
try {
const body = await request.json()
const options: UnifiedSyncOptions = {}
// Optionen aus Body übernehmen
if (body.platforms && Array.isArray(body.platforms)) {
options.platforms = body.platforms.filter((p: string): p is SupportedPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
)
}
if (body.accountIds && Array.isArray(body.accountIds)) {
options.accountIds = body.accountIds.filter(
(id: unknown) => typeof id === 'number' && id > 0
)
}
if (typeof body.analyzeWithAI === 'boolean') {
options.analyzeWithAI = body.analyzeWithAI
}
if (typeof body.maxItemsPerAccount === 'number' && body.maxItemsPerAccount > 0) {
options.maxItemsPerAccount = body.maxItemsPerAccount
}
if (body.sinceDate) {
const date = new Date(body.sinceDate)
if (!isNaN(date.getTime())) {
options.sinceDate = date
// Optionen aus Body übernehmen
if (body.platforms && Array.isArray(body.platforms)) {
options.platforms = body.platforms.filter((p: string): p is SupportedPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
)
}
if (body.accountIds && Array.isArray(body.accountIds)) {
options.accountIds = body.accountIds.filter(
(id: unknown) => typeof id === 'number' && id > 0,
)
}
if (typeof body.analyzeWithAI === 'boolean') {
options.analyzeWithAI = body.analyzeWithAI
}
if (typeof body.maxItemsPerAccount === 'number' && body.maxItemsPerAccount > 0) {
options.maxItemsPerAccount = body.maxItemsPerAccount
}
if (body.sinceDate) {
const date = new Date(body.sinceDate)
if (!isNaN(date.getTime())) {
options.sinceDate = date
}
}
console.log('[Cron] Manual community sync triggered', { options })
const result = await runUnifiedSync(options)
return NextResponse.json({
success: result.success,
startedAt: result.startedAt,
completedAt: result.completedAt,
duration: result.duration,
platforms: result.platforms,
results: result.results,
errors: result.errors,
})
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'
console.error('[Cron] POST error:', error)
return NextResponse.json({ error: message }, { status: 500 })
}
console.log('[Cron] Manual community sync triggered', { options })
const result = await runUnifiedSync(options)
return NextResponse.json({
success: result.success,
startedAt: result.startedAt,
completedAt: result.completedAt,
duration: result.duration,
platforms: result.platforms,
results: result.results,
errors: result.errors,
})
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'
console.error('[Cron] POST error:', error)
return NextResponse.json({ error: message }, { status: 500 })
}
})
}
/**
@ -175,7 +165,7 @@ export async function POST(request: NextRequest) {
* Status-Check für Monitoring
*/
export async function HEAD(request: NextRequest) {
const authError = requireCronAuth(request)
const authError = requireCronAuth(request, 'community-sync')
if (authError) {
return new NextResponse(null, { status: authError.status })
}

View file

@ -5,13 +5,51 @@ import { NextRequest, NextResponse } from 'next/server'
import { getPayload } from 'payload'
import config from '@payload-config'
import { ReportGeneratorService, ReportSchedule } from '@/lib/services/ReportGeneratorService'
import { requireCronAuth } from '@/lib/security'
import { requireCronAuth, withCronExecution } from '@/lib/security'
import {
asObject,
optionalNumber,
validateJsonBody,
validationIssue,
validationErrorResponse,
type ApiValidationResult,
} from '@/lib/validation'
// Status für Monitoring
let isRunning = false
let lastRunAt: Date | null = null
let lastResult: { success: boolean; sent: number; failed: number } | null = null
interface SendReportsBody {
scheduleId: number
}
function validateSendReportsBody(input: unknown): ApiValidationResult<SendReportsBody> {
const objectResult = asObject(input)
if (!objectResult.valid) {
return objectResult as ApiValidationResult<SendReportsBody>
}
const scheduleIdResult = optionalNumber(objectResult.data, 'scheduleId')
if (!scheduleIdResult.valid) {
return scheduleIdResult as ApiValidationResult<SendReportsBody>
}
if (scheduleIdResult.data === undefined) {
return {
valid: false,
issues: [validationIssue('scheduleId', 'required', 'scheduleId is required')],
}
}
return {
valid: true,
data: {
scheduleId: scheduleIdResult.data,
},
}
}
/**
* GET /api/cron/send-reports
* Prüft und sendet fällige Reports
@ -19,93 +57,89 @@ let lastResult: { success: boolean; sent: number; failed: number } | null = null
* Läuft stündlich und prüft welche Reports gesendet werden müssen
*/
export async function GET(request: NextRequest) {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized request to send-reports')
return authError
}
if (isRunning) {
return NextResponse.json(
{ error: 'Report sending already in progress' },
{ status: 423 }
)
}
isRunning = true
const startedAt = new Date()
let sent = 0
let failed = 0
try {
const payload = await getPayload({ config })
const reportService = new ReportGeneratorService(payload)
// Fällige Reports finden
const now = new Date()
const dueSchedules = await findDueSchedules(payload, now)
console.log(`[Cron] Found ${dueSchedules.length} due report schedules`)
for (const schedule of dueSchedules) {
try {
console.log(`[Cron] Generating report: ${schedule.name}`)
// Report generieren
const report = await reportService.generateReport(schedule)
if (!report.success) {
console.error(`[Cron] Report generation failed: ${report.error}`)
failed++
await updateScheduleError(payload, schedule.id, report.error || 'Generation failed')
continue
}
// Report senden
const sendSuccess = await reportService.sendReport(schedule, report)
if (sendSuccess) {
sent++
console.log(`[Cron] Report sent successfully: ${schedule.name}`)
// Nächsten Versandzeitpunkt berechnen
await updateNextScheduledTime(payload, schedule)
} else {
failed++
console.error(`[Cron] Report send failed: ${schedule.name}`)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error(`[Cron] Error processing schedule ${schedule.name}:`, error)
await updateScheduleError(payload, schedule.id, message)
failed++
}
return withCronExecution(request, { endpoint: 'send-reports' }, async () => {
if (isRunning) {
return NextResponse.json(
{ error: 'Report sending already in progress' },
{ status: 423 },
)
}
lastRunAt = startedAt
lastResult = { success: failed === 0, sent, failed }
isRunning = true
const startedAt = new Date()
let sent = 0
let failed = 0
return NextResponse.json({
success: failed === 0,
message: `Reports processed: ${sent} sent, ${failed} failed`,
processed: dueSchedules.length,
sent,
failed,
duration: Date.now() - startedAt.getTime(),
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] send-reports error:', error)
try {
const payload = await getPayload({ config })
const reportService = new ReportGeneratorService(payload)
lastResult = { success: false, sent, failed: failed + 1 }
// Fällige Reports finden
const now = new Date()
const dueSchedules = await findDueSchedules(payload, now)
return NextResponse.json(
{ error: message, sent, failed },
{ status: 500 }
)
} finally {
isRunning = false
}
console.log(`[Cron] Found ${dueSchedules.length} due report schedules`)
for (const schedule of dueSchedules) {
try {
console.log(`[Cron] Generating report: ${schedule.name}`)
// Report generieren
const report = await reportService.generateReport(schedule)
if (!report.success) {
console.error(`[Cron] Report generation failed: ${report.error}`)
failed++
await updateScheduleError(payload, schedule.id, report.error || 'Generation failed')
continue
}
// Report senden
const sendSuccess = await reportService.sendReport(schedule, report)
if (sendSuccess) {
sent++
console.log(`[Cron] Report sent successfully: ${schedule.name}`)
// Nächsten Versandzeitpunkt berechnen
await updateNextScheduledTime(payload, schedule)
} else {
failed++
console.error(`[Cron] Report send failed: ${schedule.name}`)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error(`[Cron] Error processing schedule ${schedule.name}:`, error)
await updateScheduleError(payload, schedule.id, message)
failed++
}
}
lastRunAt = startedAt
lastResult = { success: failed === 0, sent, failed }
return NextResponse.json({
success: failed === 0,
message: `Reports processed: ${sent} sent, ${failed} failed`,
processed: dueSchedules.length,
sent,
failed,
duration: Date.now() - startedAt.getTime(),
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] send-reports error:', error)
lastResult = { success: false, sent, failed: failed + 1 }
return NextResponse.json(
{ error: message, sent, failed },
{ status: 500 },
)
} finally {
isRunning = false
}
})
}
/**
@ -113,59 +147,54 @@ export async function GET(request: NextRequest) {
* Manuelles Senden eines bestimmten Reports
*/
export async function POST(request: NextRequest) {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized POST to send-reports')
return authError
}
return withCronExecution(request, { endpoint: 'send-reports' }, async () => {
try {
const bodyResult = await validateJsonBody(request, validateSendReportsBody)
if (!bodyResult.valid) {
return validationErrorResponse(bodyResult.issues)
}
const { scheduleId } = bodyResult.data
try {
const body = await request.json()
const { scheduleId } = body
const payload = await getPayload({ config })
if (!scheduleId) {
return NextResponse.json({ error: 'scheduleId required' }, { status: 400 })
// Schedule laden
const scheduleDoc = await payload.findByID({
collection: 'report-schedules',
id: scheduleId,
depth: 2,
})
if (!scheduleDoc) {
return NextResponse.json({ error: 'Schedule not found' }, { status: 404 })
}
const schedule = transformSchedule(scheduleDoc)
const reportService = new ReportGeneratorService(payload)
// Report generieren und senden
const report = await reportService.generateReport(schedule)
if (!report.success) {
return NextResponse.json(
{ success: false, error: report.error },
{ status: 500 },
)
}
const sendSuccess = await reportService.sendReport(schedule, report)
return NextResponse.json({
success: sendSuccess,
scheduleName: schedule.name,
format: schedule.format,
recipients: schedule.recipients.length,
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] POST send-reports error:', error)
return NextResponse.json({ error: message }, { status: 500 })
}
const payload = await getPayload({ config })
// Schedule laden
const scheduleDoc = await payload.findByID({
collection: 'report-schedules',
id: scheduleId,
depth: 2,
})
if (!scheduleDoc) {
return NextResponse.json({ error: 'Schedule not found' }, { status: 404 })
}
const schedule = transformSchedule(scheduleDoc)
const reportService = new ReportGeneratorService(payload)
// Report generieren und senden
const report = await reportService.generateReport(schedule)
if (!report.success) {
return NextResponse.json(
{ success: false, error: report.error },
{ status: 500 }
)
}
const sendSuccess = await reportService.sendReport(schedule, report)
return NextResponse.json({
success: sendSuccess,
scheduleName: schedule.name,
format: schedule.format,
recipients: schedule.recipients.length,
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] POST send-reports error:', error)
return NextResponse.json({ error: message }, { status: 500 })
}
})
}
/**
@ -173,7 +202,7 @@ export async function POST(request: NextRequest) {
* Status-Check für Monitoring
*/
export async function HEAD(request: NextRequest) {
const authError = requireCronAuth(request)
const authError = requireCronAuth(request, 'send-reports')
if (authError) {
return new NextResponse(null, { status: authError.status })
}

View file

@ -8,7 +8,7 @@ import {
type TokenRefreshOptions,
type TokenPlatform,
} from '@/lib/jobs/TokenRefreshService'
import { requireCronAuth } from '@/lib/security'
import { requireCronAuth, withCronExecution } from '@/lib/security'
/**
* GET /api/cron/token-refresh
@ -21,75 +21,71 @@ import { requireCronAuth } from '@/lib/security'
* - dryRun: true/false - nur prüfen, nicht erneuern
*/
export async function GET(request: NextRequest) {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized request to token-refresh')
return authError
}
return withCronExecution(request, { endpoint: 'token-refresh' }, async () => {
// Query-Parameter parsen
const searchParams = request.nextUrl.searchParams
const options: TokenRefreshOptions = {}
// Query-Parameter parsen
const searchParams = request.nextUrl.searchParams
const options: TokenRefreshOptions = {}
// Plattform-Filter
const platformsParam = searchParams.get('platforms')
if (platformsParam) {
const platforms = platformsParam.split(',').map((p) => p.trim().toLowerCase())
const validPlatforms = platforms.filter((p): p is TokenPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
)
if (validPlatforms.length > 0) {
options.platforms = validPlatforms
}
}
// Plattform-Filter
const platformsParam = searchParams.get('platforms')
if (platformsParam) {
const platforms = platformsParam.split(',').map((p) => p.trim().toLowerCase())
const validPlatforms = platforms.filter((p): p is TokenPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
// Threshold Days
const thresholdParam = searchParams.get('thresholdDays')
if (thresholdParam) {
const days = parseInt(thresholdParam, 10)
if (!isNaN(days) && days > 0) {
options.refreshThresholdDays = days
}
}
// Include Expired
const includeExpiredParam = searchParams.get('includeExpired')
if (includeExpiredParam === 'true') {
options.includeExpired = true
}
// Dry Run
const dryRunParam = searchParams.get('dryRun')
if (dryRunParam === 'true') {
options.dryRun = true
}
console.log('[Cron] Starting token refresh', { options })
const result = await runTokenRefresh(options)
if (result.success) {
return NextResponse.json({
success: true,
message: `Token refresh completed: ${result.stats.refreshed} refreshed, ${result.stats.skipped} skipped`,
duration: result.duration,
stats: result.stats,
notifications: result.notifications.filter((n) => n.type !== 'info'),
})
}
// Teilerfolg oder Fehler
return NextResponse.json(
{
success: false,
message: `Token refresh completed with issues: ${result.stats.failed} failed, ${result.stats.expired} expired`,
duration: result.duration,
stats: result.stats,
notifications: result.notifications,
results: result.results.filter((r) => r.action !== 'skipped'),
},
{ status: result.stats.failed > 0 || result.stats.expired > 0 ? 207 : 200 },
)
if (validPlatforms.length > 0) {
options.platforms = validPlatforms
}
}
// Threshold Days
const thresholdParam = searchParams.get('thresholdDays')
if (thresholdParam) {
const days = parseInt(thresholdParam, 10)
if (!isNaN(days) && days > 0) {
options.refreshThresholdDays = days
}
}
// Include Expired
const includeExpiredParam = searchParams.get('includeExpired')
if (includeExpiredParam === 'true') {
options.includeExpired = true
}
// Dry Run
const dryRunParam = searchParams.get('dryRun')
if (dryRunParam === 'true') {
options.dryRun = true
}
console.log('[Cron] Starting token refresh', { options })
const result = await runTokenRefresh(options)
if (result.success) {
return NextResponse.json({
success: true,
message: `Token refresh completed: ${result.stats.refreshed} refreshed, ${result.stats.skipped} skipped`,
duration: result.duration,
stats: result.stats,
notifications: result.notifications.filter((n) => n.type !== 'info'),
})
}
// Teilerfolg oder Fehler
return NextResponse.json(
{
success: false,
message: `Token refresh completed with issues: ${result.stats.failed} failed, ${result.stats.expired} expired`,
duration: result.duration,
stats: result.stats,
notifications: result.notifications,
results: result.results.filter((r) => r.action !== 'skipped'),
},
{ status: result.stats.failed > 0 || result.stats.expired > 0 ? 207 : 200 }
)
})
}
/**
@ -97,59 +93,55 @@ export async function GET(request: NextRequest) {
* Manueller Token-Refresh mit erweiterten Optionen
*/
export async function POST(request: NextRequest) {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized POST to token-refresh')
return authError
}
return withCronExecution(request, { endpoint: 'token-refresh' }, async () => {
try {
const body = await request.json()
const options: TokenRefreshOptions = {}
try {
const body = await request.json()
const options: TokenRefreshOptions = {}
// Optionen aus Body übernehmen
if (body.platforms && Array.isArray(body.platforms)) {
options.platforms = body.platforms.filter((p: string): p is TokenPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
)
}
// Optionen aus Body übernehmen
if (body.platforms && Array.isArray(body.platforms)) {
options.platforms = body.platforms.filter((p: string): p is TokenPlatform =>
['youtube', 'facebook', 'instagram'].includes(p)
)
if (body.accountIds && Array.isArray(body.accountIds)) {
options.accountIds = body.accountIds.filter(
(id: unknown) => typeof id === 'number' && id > 0,
)
}
if (typeof body.refreshThresholdDays === 'number' && body.refreshThresholdDays > 0) {
options.refreshThresholdDays = body.refreshThresholdDays
}
if (typeof body.includeExpired === 'boolean') {
options.includeExpired = body.includeExpired
}
if (typeof body.dryRun === 'boolean') {
options.dryRun = body.dryRun
}
console.log('[Cron] Manual token refresh triggered', { options })
const result = await runTokenRefresh(options)
return NextResponse.json({
success: result.success,
startedAt: result.startedAt,
completedAt: result.completedAt,
duration: result.duration,
stats: result.stats,
results: result.results,
notifications: result.notifications,
})
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'
console.error('[Cron] POST error:', error)
return NextResponse.json({ error: message }, { status: 500 })
}
if (body.accountIds && Array.isArray(body.accountIds)) {
options.accountIds = body.accountIds.filter(
(id: unknown) => typeof id === 'number' && id > 0
)
}
if (typeof body.refreshThresholdDays === 'number' && body.refreshThresholdDays > 0) {
options.refreshThresholdDays = body.refreshThresholdDays
}
if (typeof body.includeExpired === 'boolean') {
options.includeExpired = body.includeExpired
}
if (typeof body.dryRun === 'boolean') {
options.dryRun = body.dryRun
}
console.log('[Cron] Manual token refresh triggered', { options })
const result = await runTokenRefresh(options)
return NextResponse.json({
success: result.success,
startedAt: result.startedAt,
completedAt: result.completedAt,
duration: result.duration,
stats: result.stats,
results: result.results,
notifications: result.notifications,
})
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'
console.error('[Cron] POST error:', error)
return NextResponse.json({ error: message }, { status: 500 })
}
})
}
/**
@ -157,7 +149,7 @@ export async function POST(request: NextRequest) {
* Status-Check für Monitoring
*/
export async function HEAD(request: NextRequest) {
const authError = requireCronAuth(request)
const authError = requireCronAuth(request, 'token-refresh')
if (authError) {
return new NextResponse(null, { status: authError.status })
}

View file

@ -6,7 +6,7 @@ import { getPayload } from 'payload'
import config from '@payload-config'
import { ChannelMetricsSyncService } from '@/lib/integrations/youtube/ChannelMetricsSyncService'
import { requireCronAuth } from '@/lib/security'
import { requireCronAuth, withCronExecution } from '@/lib/security'
// Monitoring state
let isRunning = false
@ -18,51 +18,47 @@ let lastRunAt: Date | null = null
* Scheduled daily at 04:00 UTC via Vercel Cron.
*/
export async function GET(request: NextRequest): Promise<NextResponse> {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized request to youtube-channel-sync')
return authError
}
return withCronExecution(request, { endpoint: 'youtube-channel-sync' }, async () => {
if (isRunning) {
return NextResponse.json(
{ error: 'Channel metrics sync already in progress' },
{ status: 423 },
)
}
if (isRunning) {
return NextResponse.json(
{ error: 'Channel metrics sync already in progress' },
{ status: 423 },
)
}
isRunning = true
const startedAt = new Date()
isRunning = true
const startedAt = new Date()
try {
const payload = await getPayload({ config })
const service = new ChannelMetricsSyncService(payload)
try {
const payload = await getPayload({ config })
const service = new ChannelMetricsSyncService(payload)
console.log('[Cron] Starting channel metrics sync')
console.log('[Cron] Starting channel metrics sync')
const result = await service.syncAllChannels()
const result = await service.syncAllChannels()
lastRunAt = startedAt
const duration = Date.now() - startedAt.getTime()
lastRunAt = startedAt
const duration = Date.now() - startedAt.getTime()
console.log(
`[Cron] Channel metrics sync completed: ${result.channelsSynced} channels synced, ${result.errors.length} errors, ${duration}ms`,
)
console.log(
`[Cron] Channel metrics sync completed: ${result.channelsSynced} channels synced, ${result.errors.length} errors, ${duration}ms`,
)
return NextResponse.json({
success: result.success,
channelsSynced: result.channelsSynced,
errors: result.errors,
duration,
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] youtube-channel-sync error:', error)
return NextResponse.json({
success: result.success,
channelsSynced: result.channelsSynced,
errors: result.errors,
duration,
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] youtube-channel-sync error:', error)
return NextResponse.json({ error: message }, { status: 500 })
} finally {
isRunning = false
}
return NextResponse.json({ error: message }, { status: 500 })
} finally {
isRunning = false
}
})
}
/**
@ -70,7 +66,7 @@ export async function GET(request: NextRequest): Promise<NextResponse> {
* Status check for monitoring.
*/
export async function HEAD(request: NextRequest): Promise<NextResponse> {
const authError = requireCronAuth(request)
const authError = requireCronAuth(request, 'youtube-channel-sync')
if (authError) {
return new NextResponse(null, { status: authError.status })
}

View file

@ -6,7 +6,7 @@ import { getPayload } from 'payload'
import config from '@payload-config'
import { VideoMetricsSyncService } from '@/lib/integrations/youtube/VideoMetricsSyncService'
import { requireCronAuth } from '@/lib/security'
import { requireCronAuth, withCronExecution } from '@/lib/security'
// Monitoring state
let isRunning = false
@ -18,80 +18,76 @@ let lastRunAt: Date | null = null
* Scheduled every 6 hours via Vercel Cron.
*/
export async function GET(request: NextRequest): Promise<NextResponse> {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized request to youtube-metrics-sync')
return authError
}
if (isRunning) {
return NextResponse.json(
{ error: 'Video metrics sync already in progress' },
{ status: 423 },
)
}
isRunning = true
const startedAt = new Date()
try {
const payload = await getPayload({ config })
const service = new VideoMetricsSyncService(payload)
// Find all active YouTube channels
const channels = await payload.find({
collection: 'youtube-channels',
where: {
status: { equals: 'active' },
},
limit: 0,
depth: 0,
})
console.log(`[Cron] Starting video metrics sync for ${channels.docs.length} channels`)
let totalSynced = 0
const errors: string[] = []
for (const channel of channels.docs) {
const channelName = (channel as any).name ?? `ID ${channel.id}`
try {
const result = await service.syncMetrics(channel.id as number)
totalSynced += result.syncedCount
if (result.errors.length > 0) {
errors.push(`Channel "${channelName}": ${result.errors.join('; ')}`)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
errors.push(`Channel "${channelName}": ${message}`)
}
return withCronExecution(request, { endpoint: 'youtube-metrics-sync' }, async () => {
if (isRunning) {
return NextResponse.json(
{ error: 'Video metrics sync already in progress' },
{ status: 423 },
)
}
lastRunAt = startedAt
const duration = Date.now() - startedAt.getTime()
isRunning = true
const startedAt = new Date()
console.log(
`[Cron] Video metrics sync completed: ${totalSynced} videos synced, ${errors.length} errors, ${duration}ms`,
)
try {
const payload = await getPayload({ config })
const service = new VideoMetricsSyncService(payload)
return NextResponse.json({
success: errors.length === 0,
totalSynced,
channels: channels.docs.length,
errors,
duration,
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] youtube-metrics-sync error:', error)
// Find all active YouTube channels
const channels = await payload.find({
collection: 'youtube-channels',
where: {
status: { equals: 'active' },
},
limit: 0,
depth: 0,
})
return NextResponse.json({ error: message }, { status: 500 })
} finally {
isRunning = false
}
console.log(`[Cron] Starting video metrics sync for ${channels.docs.length} channels`)
let totalSynced = 0
const errors: string[] = []
for (const channel of channels.docs) {
const channelName = (channel as any).name ?? `ID ${channel.id}`
try {
const result = await service.syncMetrics(channel.id as number)
totalSynced += result.syncedCount
if (result.errors.length > 0) {
errors.push(`Channel "${channelName}": ${result.errors.join('; ')}`)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
errors.push(`Channel "${channelName}": ${message}`)
}
}
lastRunAt = startedAt
const duration = Date.now() - startedAt.getTime()
console.log(
`[Cron] Video metrics sync completed: ${totalSynced} videos synced, ${errors.length} errors, ${duration}ms`,
)
return NextResponse.json({
success: errors.length === 0,
totalSynced,
channels: channels.docs.length,
errors,
duration,
})
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
console.error('[Cron] youtube-metrics-sync error:', error)
return NextResponse.json({ error: message }, { status: 500 })
} finally {
isRunning = false
}
})
}
/**
@ -99,7 +95,7 @@ export async function GET(request: NextRequest): Promise<NextResponse> {
* Status check for monitoring.
*/
export async function HEAD(request: NextRequest): Promise<NextResponse> {
const authError = requireCronAuth(request)
const authError = requireCronAuth(request, 'youtube-metrics-sync')
if (authError) {
return new NextResponse(null, { status: authError.status })
}

View file

@ -2,41 +2,37 @@
import { NextRequest, NextResponse } from 'next/server'
import { runSync, getSyncStatus } from '@/lib/jobs/syncAllComments'
import { requireCronAuth } from '@/lib/security'
import { requireCronAuth, withCronExecution } from '@/lib/security'
/**
* GET /api/cron/youtube-sync
* Wird von externem Cron-Job aufgerufen (z.B. Vercel Cron, cron-job.org)
*/
export async function GET(request: NextRequest) {
const authError = requireCronAuth(request)
if (authError) {
console.warn('[Cron] Unauthorized request to youtube-sync')
return authError
}
return withCronExecution(request, { endpoint: 'youtube-sync' }, async () => {
console.log('[Cron] Starting scheduled YouTube sync')
console.log('[Cron] Starting scheduled YouTube sync')
const result = await runSync()
const result = await runSync()
if (result.success) {
const totalNew = result.results?.reduce((sum, r) => sum + r.newComments, 0) || 0
const totalUpdated = result.results?.reduce((sum, r) => sum + r.updatedComments, 0) || 0
if (result.success) {
const totalNew = result.results?.reduce((sum, r) => sum + r.newComments, 0) || 0
const totalUpdated = result.results?.reduce((sum, r) => sum + r.updatedComments, 0) || 0
return NextResponse.json({
success: true,
message: `Sync completed: ${totalNew} new, ${totalUpdated} updated comments`,
results: result.results,
})
}
return NextResponse.json({
success: true,
message: `Sync completed: ${totalNew} new, ${totalUpdated} updated comments`,
results: result.results,
})
}
return NextResponse.json(
{
success: false,
error: result.error,
},
{ status: 500 }
)
return NextResponse.json(
{
success: false,
error: result.error,
},
{ status: 500 },
)
})
}
/**
@ -44,7 +40,7 @@ export async function GET(request: NextRequest) {
* Status-Check für Monitoring
*/
export async function HEAD(request: NextRequest) {
const authError = requireCronAuth(request)
const authError = requireCronAuth(request, 'youtube-sync')
if (authError) {
return new NextResponse(null, { status: authError.status })
}

View file

@ -14,22 +14,104 @@ import { logAccessDenied } from '@/lib/audit/audit-service'
import {
publicApiLimiter,
rateLimitHeaders,
validateIpAccess,
createSafeLogger,
validateCsrf,
runApiGuards,
createApiErrorResponse,
} from '@/lib/security'
import {
asObject,
validateJsonBody,
validationIssue,
validationErrorResponse,
type ApiValidationResult,
} from '@/lib/validation'
const RATE_LIMIT_MAX = 10
const logger = createSafeLogger('API:GeneratePdf')
interface UserWithTenants {
id: number
email?: string
isSuperAdmin?: boolean
tenants?: Array<{
tenant: { id: number } | number
}>
}
interface GeneratePdfBody {
tenantId: number
source: 'html' | 'url'
html?: string
url?: string
options: Record<string, unknown>
queued: boolean
documentType?: string
filename?: string
priority?: 'high' | 'normal' | 'low'
}
function validateGeneratePdfBody(input: unknown): ApiValidationResult<GeneratePdfBody> {
const objectResult = asObject(input)
if (!objectResult.valid) {
return objectResult as ApiValidationResult<GeneratePdfBody>
}
const data = objectResult.data
const issues = []
const tenantIdValue = Number(data.tenantId)
if (!Number.isFinite(tenantIdValue)) {
issues.push(validationIssue('tenantId', 'invalid_type', 'tenantId must be a number'))
}
const source = data.source
if (source !== 'html' && source !== 'url') {
issues.push(validationIssue('source', 'invalid_value', 'source must be "html" or "url"'))
}
const html = typeof data.html === 'string' ? data.html : undefined
const url = typeof data.url === 'string' ? data.url : undefined
if (source === 'html' && !html) {
issues.push(validationIssue('html', 'required', 'html is required when source="html"'))
}
if (source === 'url' && !url) {
issues.push(validationIssue('url', 'required', 'url is required when source="url"'))
}
const options = data.options && typeof data.options === 'object' && !Array.isArray(data.options)
? (data.options as Record<string, unknown>)
: {}
const queued = typeof data.queued === 'boolean' ? data.queued : true
const documentType = typeof data.documentType === 'string' ? data.documentType : undefined
const filename = typeof data.filename === 'string' ? data.filename : undefined
const priority = ['high', 'normal', 'low'].includes(String(data.priority))
? (data.priority as 'high' | 'normal' | 'low')
: undefined
if (issues.length > 0) {
return {
valid: false,
issues,
}
}
return {
valid: true,
data: {
tenantId: tenantIdValue,
source,
html,
url,
options,
queued,
documentType,
filename,
priority,
},
}
}
/**
* Prüft ob User Zugriff auf den angegebenen Tenant hat
*/
@ -65,106 +147,62 @@ function userHasAccessToTenant(user: UserWithTenants, tenantId: number): boolean
*/
export async function POST(req: NextRequest) {
try {
// IP-Allowlist prüfen
const ipCheck = validateIpAccess(req, 'generatePdf')
if (!ipCheck.allowed) {
logger.warn(`IP blocked: ${ipCheck.ip}`, { reason: ipCheck.reason })
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// CSRF-Schutz
const csrfResult = validateCsrf(req)
if (!csrfResult.valid) {
logger.warn('CSRF validation failed', { reason: csrfResult.reason })
return NextResponse.json({ error: 'CSRF validation failed' }, { status: 403 })
}
const payload = await getPayload({ config })
// Authentifizierung prüfen
const { user } = await payload.auth({ headers: req.headers })
if (!user) {
return NextResponse.json({ error: 'Unauthorized - Login required' }, { status: 401 })
const guardResult = await runApiGuards(req, {
endpoint: '/api/generate-pdf',
ipEndpoint: 'generatePdf',
csrf: 'browser',
authProvider: payload,
requireUser: true,
rateLimiter: publicApiLimiter,
rateLimitMax: RATE_LIMIT_MAX,
rateLimitIdentifier: ({ user, ip }) =>
typeof user === 'object' && user && 'id' in user
? String((user as { id: unknown }).id)
: ip,
})
if (!guardResult.ok) {
return guardResult.response
}
const typedUser = user as UserWithTenants
const typedUser = guardResult.user as UserWithTenants
// Rate Limiting
const rateLimit = await publicApiLimiter.check(String(typedUser.id))
if (!rateLimit.allowed) {
return NextResponse.json(
{
error: 'Rate limit exceeded',
message: `Maximum ${RATE_LIMIT_MAX} requests per minute.`,
},
{
status: 429,
headers: rateLimitHeaders(rateLimit, RATE_LIMIT_MAX),
},
)
const bodyResult = await validateJsonBody(req, validateGeneratePdfBody)
if (!bodyResult.valid) {
return validationErrorResponse(bodyResult.issues)
}
const body = await req.json()
const {
tenantId,
source,
html,
url,
options = {},
queued = true,
options,
queued,
documentType,
filename,
priority,
} = body
// Validierung
if (!tenantId) {
return NextResponse.json({ error: 'Missing required field: tenantId' }, { status: 400 })
}
const numericTenantId = Number(tenantId)
if (isNaN(numericTenantId)) {
return NextResponse.json({ error: 'Invalid tenantId: must be a number' }, { status: 400 })
}
if (!source || !['html', 'url'].includes(source)) {
return NextResponse.json(
{ error: 'Invalid source: must be "html" or "url"' },
{ status: 400 },
)
}
if (source === 'html' && !html) {
return NextResponse.json(
{ error: 'Missing required field: html (for source="html")' },
{ status: 400 },
)
}
if (source === 'url' && !url) {
return NextResponse.json(
{ error: 'Missing required field: url (for source="url")' },
{ status: 400 },
)
}
} = bodyResult.data
// Zugriffskontrolle
if (!userHasAccessToTenant(typedUser, numericTenantId)) {
if (!userHasAccessToTenant(typedUser, tenantId)) {
await logAccessDenied(
payload,
`/api/generate-pdf (tenantId: ${numericTenantId})`,
`/api/generate-pdf (tenantId: ${tenantId})`,
typedUser.id,
user.email as string,
typedUser.email as string,
)
return NextResponse.json(
{ error: 'Forbidden - You do not have access to this tenant' },
{ status: 403 },
return createApiErrorResponse(
403,
'FORBIDDEN',
'You do not have access to this tenant',
)
}
const rlHeaders = rateLimitHeaders(rateLimit, RATE_LIMIT_MAX)
const rlHeaders = guardResult.rateLimit
? rateLimitHeaders(guardResult.rateLimit, RATE_LIMIT_MAX)
: undefined
// Queued PDF Generation (async)
if (queued) {
@ -177,7 +215,7 @@ export async function POST(req: NextRequest) {
}
const job = await enqueuePdf({
tenantId: numericTenantId,
tenantId,
source,
html,
url,
@ -203,9 +241,9 @@ export async function POST(req: NextRequest) {
// Direct PDF Generation (sync)
let result
if (source === 'html') {
result = await generatePdfFromHtml(html, options)
result = await generatePdfFromHtml(html || '', options)
} else {
result = await generatePdfFromUrl(url, options)
result = await generatePdfFromUrl(url || '', options)
}
if (!result.success) {
@ -299,12 +337,14 @@ export async function GET(req: NextRequest) {
}
const payload = await getPayload({ config })
// Authentifizierung prüfen
const { user } = await payload.auth({ headers: req.headers })
if (!user) {
return NextResponse.json({ error: 'Unauthorized - Login required' }, { status: 401 })
const guardResult = await runApiGuards(req, {
endpoint: '/api/generate-pdf',
csrf: 'none',
authProvider: payload,
requireUser: true,
})
if (!guardResult.ok) {
return guardResult.response
}
// Job-Status abrufen

View file

@ -4,6 +4,65 @@ import { NextResponse } from 'next/server'
import { getPayload } from 'payload'
import config from '@payload-config'
import { createNewsletterService } from '@/lib/email/newsletter-service'
import { createApiErrorResponse } from '@/lib/security'
import {
asObject,
optionalString,
optionalNumber,
validateJsonBody,
validationIssue,
validationErrorResponse,
type ApiValidationResult,
} from '@/lib/validation'
interface UnsubscribeBody {
token?: string
email?: string
tenantId?: number
}
function validateUnsubscribeBody(input: unknown): ApiValidationResult<UnsubscribeBody> {
const objectResult = asObject(input)
if (!objectResult.valid) {
return objectResult as ApiValidationResult<UnsubscribeBody>
}
const tokenResult = optionalString(objectResult.data, 'token')
const emailResult = optionalString(objectResult.data, 'email')
const tenantResult = optionalNumber(objectResult.data, 'tenantId')
const issues = [
...(tokenResult.valid ? [] : tokenResult.issues),
...(emailResult.valid ? [] : emailResult.issues),
...(tenantResult.valid ? [] : tenantResult.issues),
]
if (issues.length > 0) {
return { valid: false, issues }
}
if (!tokenResult.data && !(emailResult.data && tenantResult.data !== undefined)) {
return {
valid: false,
issues: [
validationIssue(
'token|email+tenantId',
'required',
'Either token or email+tenantId must be provided',
),
],
}
}
return {
valid: true,
data: {
token: tokenResult.data,
email: emailResult.data,
tenantId: tenantResult.data,
},
}
}
/**
* GET /api/newsletter/unsubscribe?token=<token>
@ -44,7 +103,12 @@ export async function GET(request: Request): Promise<Response> {
*/
export async function POST(request: Request): Promise<Response> {
try {
const body = await request.json()
const bodyResult = await validateJsonBody(request, validateUnsubscribeBody)
if (!bodyResult.valid) {
return validationErrorResponse(bodyResult.issues)
}
const body = bodyResult.data
const payload = await getPayload({ config })
const newsletterService = createNewsletterService(payload)
@ -57,7 +121,7 @@ export async function POST(request: Request): Promise<Response> {
}
// E-Mail-basierte Abmeldung (erfordert Tenant-ID)
if (body.email && body.tenantId) {
if (body.email && body.tenantId !== undefined) {
// Subscriber finden
const subscriber = await payload.find({
collection: 'newsletter-subscribers',
@ -103,13 +167,10 @@ export async function POST(request: Request): Promise<Response> {
status: result.success ? 200 : 400,
})
}
return NextResponse.json(
{
success: false,
message: 'Token oder E-Mail-Adresse fehlt.',
},
{ status: 400 },
return createApiErrorResponse(
400,
'VALIDATION_FAILED',
'Token oder E-Mail-Adresse fehlt.',
)
} catch (error) {
console.error('[Newsletter Unsubscribe] Error:', error)

View file

@ -17,40 +17,7 @@
import { getPayload } from 'payload'
import config from '@payload-config'
import { NextRequest, NextResponse } from 'next/server'
// Lazy imports für Security-Module um Initialisierungsfehler zu vermeiden
type SecurityModules = {
authLimiter?: { check: (ip: string) => Promise<{ allowed: boolean; remaining: number; resetIn: number }> }
rateLimitHeaders?: (result: unknown, max: number) => Record<string, string>
getClientIpFromRequest?: (req: NextRequest) => string
isIpBlocked?: (ip: string) => boolean
validateCsrf?: (req: NextRequest) => { valid: boolean; reason?: string }
}
let securityModules: SecurityModules = {}
let securityModulesLoaded = false
async function getSecurityModules(): Promise<SecurityModules> {
if (securityModulesLoaded) return securityModules
try {
const security = await import('@/lib/security')
securityModules = {
authLimiter: security.authLimiter,
rateLimitHeaders: security.rateLimitHeaders as SecurityModules['rateLimitHeaders'],
getClientIpFromRequest: security.getClientIpFromRequest,
isIpBlocked: security.isIpBlocked,
validateCsrf: security.validateCsrf,
}
} catch (err) {
console.warn('[Login] Security modules not available:', err)
securityModules = {}
}
securityModulesLoaded = true
return securityModules
}
import { authLimiter, runApiGuards } from '@/lib/security'
// Lazy import für Audit-Service
type AuditService = {
@ -101,68 +68,13 @@ function getClientInfo(req: NextRequest, ipAddress: string): { ipAddress: string
}
export async function POST(req: NextRequest): Promise<NextResponse> {
const security = await getSecurityModules()
if (
!security.getClientIpFromRequest ||
!security.isIpBlocked ||
!security.validateCsrf ||
!security.authLimiter ||
!security.rateLimitHeaders
) {
console.error('[Login] Required security modules are unavailable')
return NextResponse.json(
{ errors: [{ message: 'Security checks unavailable' }] },
{ status: 503 },
)
}
const clientIp = security.getClientIpFromRequest(req)
// 1. IP-Blocklist prüfen
try {
if (security.isIpBlocked(clientIp)) {
return NextResponse.json(
{ errors: [{ message: 'Access denied' }] },
{ status: 403 },
)
}
} catch (err) {
console.error('[Login] IP blocklist check failed:', err)
return NextResponse.json(
{ errors: [{ message: 'Security check failed' }] },
{ status: 503 },
)
}
// 2. CSRF-Schutz (nur für Browser-Requests)
// API-Requests ohne Origin-Header (CLI, Server-to-Server) brauchen kein CSRF
const origin = req.headers.get('origin')
const isApiRequest = !origin && req.headers.get('content-type')?.includes('application/json')
if (!isApiRequest) {
try {
const csrfResult = security.validateCsrf(req)
if (!csrfResult.valid) {
console.log('[Login] CSRF validation failed:', csrfResult.reason)
return NextResponse.json(
{ errors: [{ message: 'CSRF validation failed' }] },
{ status: 403 },
)
}
} catch (err) {
console.error('[Login] CSRF check failed:', err)
return NextResponse.json(
{ errors: [{ message: 'Security check failed' }] },
{ status: 503 },
)
}
}
// 3. Rate-Limiting prüfen
try {
const rateLimit = await security.authLimiter.check(clientIp)
if (!rateLimit.allowed) {
// Optionally log rate limit hit
const guardResult = await runApiGuards(req, {
endpoint: '/api/users/login',
blocklistOnly: true,
csrf: 'browser',
rateLimiter: authLimiter,
rateLimitMax: 5,
onRateLimit: async () => {
try {
const payload = await getPayload({ config })
const audit = await getAuditService()
@ -170,20 +82,12 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
} catch {
// Ignore audit logging errors
}
const headers = security.rateLimitHeaders(rateLimit, 5)
return NextResponse.json(
{ errors: [{ message: 'Too many login attempts. Please try again later.' }] },
{ status: 429, headers },
)
}
} catch (err) {
console.error('[Login] Rate limiting check failed:', err)
return NextResponse.json(
{ errors: [{ message: 'Security check failed' }] },
{ status: 503 },
)
},
})
if (!guardResult.ok) {
return guardResult.response
}
const clientIp = guardResult.ip
// 4. Parse Request Body
let email: string | undefined
@ -283,14 +187,14 @@ export async function POST(req: NextRequest): Promise<NextResponse> {
reason = errorMessage
}
// Audit-Log für fehlgeschlagenen Login (optional, fail-safe)
try {
const payload = await getPayload({ config })
const audit = await getAuditService()
const clientInfo = getClientInfo(req, clientIp)
await audit.logLoginFailed?.(payload, email, reason, clientInfo)
console.log(`[Audit:Auth] Login failed for ${email}: ${reason} (IP: ${clientInfo.ipAddress})`)
} catch (auditErr) {
// Audit-Log für fehlgeschlagenen Login (optional, fail-safe)
try {
const payload = await getPayload({ config })
const audit = await getAuditService()
const clientInfo = getClientInfo(req, clientIp)
await audit.logLoginFailed?.(payload, email, reason, clientInfo)
console.log(`[Audit:Auth] Login failed for ${email}: ${reason} (IP: ${clientInfo.ipAddress})`)
} catch (auditErr) {
console.warn('[Login] Audit logging failed:', auditErr)
// Continue - don't let audit failure affect response
}

View file

@ -35,6 +35,7 @@ export const MonitoringLogs: CollectionConfig = {
{ label: 'Email', value: 'email' },
{ label: 'OAuth', value: 'oauth' },
{ label: 'Sync', value: 'sync' },
{ label: 'Security', value: 'security' },
],
admin: { readOnly: true },
},

View file

@ -63,6 +63,8 @@ export const MonitoringSnapshots: CollectionConfig = {
{ name: 'metaOAuth', type: 'json', admin: { readOnly: true } },
{ name: 'youtubeOAuth', type: 'json', admin: { readOnly: true } },
{ name: 'cronJobs', type: 'json', admin: { readOnly: true } },
{ name: 'secrets', type: 'json', admin: { readOnly: true } },
{ name: 'securityEvents', type: 'json', admin: { readOnly: true } },
],
},
{

View file

@ -15,15 +15,28 @@ export async function register() {
config: config.default,
})
const enableInProcessScheduler =
process.env.ENABLE_IN_PROCESS_SCHEDULER === 'true' || process.env.NODE_ENV !== 'production'
const schedulerMode = (
process.env.SCHEDULER_MODE ||
(process.env.NODE_ENV === 'production' ? 'external' : 'in-process')
).toLowerCase()
const explicitInProcess = process.env.ENABLE_IN_PROCESS_SCHEDULER === 'true'
const shouldRunInProcessScheduler =
schedulerMode === 'in-process' ||
(explicitInProcess && schedulerMode !== 'external')
if (enableInProcessScheduler) {
if (process.env.NODE_ENV === 'production' && shouldRunInProcessScheduler) {
console.warn(
'[Instrumentation] In-process scheduler is enabled in production. ' +
'Use SCHEDULER_MODE=external for multi-instance safe scheduling.',
)
}
if (shouldRunInProcessScheduler) {
const { initScheduledJobs } = await import('./jobs/scheduler')
initScheduledJobs(payload)
} else {
console.log(
'[Instrumentation] In-process scheduler disabled (set ENABLE_IN_PROCESS_SCHEDULER=true to enable).',
'[Instrumentation] In-process scheduler disabled (SCHEDULER_MODE=external).',
)
}

View file

@ -61,6 +61,25 @@ function validateEnvVar(name: string, value: string | undefined): string {
return value.trim()
}
function validateSecretExpiry(secretName: string): void {
const expiryKey = `${secretName}_EXPIRES_AT`
const expiryValue = process.env[expiryKey]
if (!expiryValue) return
const expiresAt = new Date(expiryValue)
if (Number.isNaN(expiresAt.getTime())) {
throw new Error(
`FATAL: ${expiryKey} must be a valid ISO timestamp (example: 2026-12-31T23:59:59Z).`,
)
}
if (process.env.NODE_ENV === 'production' && expiresAt.getTime() <= Date.now()) {
throw new Error(
`FATAL: ${secretName} is expired according to ${expiryKey}. Rotate secret before startup.`,
)
}
}
/**
* Validiert alle erforderlichen Environment-Variablen.
* Wirft einen Fehler und beendet den Server-Start, wenn Variablen fehlen.
@ -71,6 +90,10 @@ export function validateRequiredEnvVars(): RequiredEnvVars {
validateEnvVar('CRON_SECRET', process.env.CRON_SECRET)
}
validateSecretExpiry('PAYLOAD_SECRET')
validateSecretExpiry('CRON_SECRET')
validateSecretExpiry('CSRF_SECRET')
return {
PAYLOAD_SECRET: validateEnvVar('PAYLOAD_SECRET', process.env.PAYLOAD_SECRET),
DATABASE_URI: validateEnvVar('DATABASE_URI', process.env.DATABASE_URI),

View file

@ -17,9 +17,13 @@ import type {
OAuthTokenStatus,
CronStatuses,
CronJobStatus,
SecretsHealthStatus,
SecurityMetricsStatus,
PerformanceMetrics,
SystemMetrics,
} from './types.js'
import { checkSecretsHealth } from '../security/secrets-health.js'
import { getSecurityMetricsSnapshot } from '../security/security-observability.js'
// ============================================================================
// System Health
@ -339,7 +343,7 @@ export async function checkCronJobs(): Promise<CronStatuses> {
* are isolated and return safe defaults instead of failing the whole collection.
*/
export async function collectMetrics(): Promise<Omit<SystemMetrics, 'timestamp'>> {
const [system, redis, postgresql, pgbouncer, smtp, oauth, cronJobs] = await Promise.allSettled([
const [system, redis, postgresql, pgbouncer, smtp, oauth, cronJobs, secrets, securityEvents] = await Promise.allSettled([
checkSystemHealth(),
checkRedis(),
checkPostgresql(),
@ -347,6 +351,8 @@ export async function collectMetrics(): Promise<Omit<SystemMetrics, 'timestamp'>
checkSmtp(),
checkOAuthTokens(),
checkCronJobs(),
Promise.resolve(checkSecretsHealth()),
Promise.resolve(getSecurityMetricsSnapshot()),
])
// Load performance tracker lazily to avoid circular dependencies
@ -388,6 +394,20 @@ export async function collectMetrics(): Promise<Omit<SystemMetrics, 'timestamp'>
youtubeSync: { lastRun: '', status: 'unknown' },
}
const secretsDefaults: SecretsHealthStatus = {
status: 'critical',
checkedAt: new Date().toISOString(),
missing: [],
expiringSoon: [],
expired: [],
rotationOverdue: [],
}
const securityEventsDefaults: SecurityMetricsStatus = {
windowMs: 300000,
counters: [],
}
const systemDefaults: SystemHealth = {
cpuUsagePercent: 0,
memoryUsedMB: 0,
@ -417,6 +437,8 @@ export async function collectMetrics(): Promise<Omit<SystemMetrics, 'timestamp'>
metaOAuth: oauthResult.metaOAuth,
youtubeOAuth: oauthResult.youtubeOAuth,
cronJobs: settled(cronJobs, cronDefaults),
secrets: settled(secrets, secretsDefaults),
securityEvents: settled(securityEvents, securityEventsDefaults),
},
performance,
}

View file

@ -79,11 +79,48 @@ export interface CronStatuses {
youtubeSync: CronJobStatus
}
export interface SecretExpiringSoon {
name: string
expiresAt: string
daysRemaining: number
}
export interface SecretExpired {
name: string
expiresAt: string
}
export interface SecretRotationOverdue {
name: string
rotatedAt: string
ageDays: number
}
export interface SecretsHealthStatus {
status: 'ok' | 'warning' | 'critical'
checkedAt: string
missing: string[]
expiringSoon: SecretExpiringSoon[]
expired: SecretExpired[]
rotationOverdue: SecretRotationOverdue[]
}
export interface SecurityMetricsStatus {
windowMs: number
counters: Array<{
eventType: string
count: number
windowStart: string
}>
}
export interface ExternalStatuses {
smtp: SmtpStatus
metaOAuth: OAuthTokenStatus
youtubeOAuth: OAuthTokenStatus
cronJobs: CronStatuses
secrets: SecretsHealthStatus
securityEvents: SecurityMetricsStatus
}
// === Performance ===
@ -136,7 +173,14 @@ export interface LogEvent {
export type AlertCondition = 'gt' | 'lt' | 'eq' | 'gte' | 'lte'
export type AlertSeverity = 'warning' | 'error' | 'critical'
export type LogLevel = 'debug' | 'info' | 'warn' | 'error' | 'fatal'
export type LogSource = 'payload' | 'queue-worker' | 'cron' | 'email' | 'oauth' | 'sync'
export type LogSource =
| 'payload'
| 'queue-worker'
| 'cron'
| 'email'
| 'oauth'
| 'sync'
| 'security'
// === Performance Tracker Entry ===
export interface PerformanceEntry {

View file

@ -10,6 +10,7 @@ import * as fs from 'fs/promises'
import * as path from 'path'
import { lookup } from 'dns/promises'
import { isIP } from 'net'
import { recordSecurityEvent } from '../security/security-observability'
// Umgebungsvariablen
const PDF_OUTPUT_DIR = process.env.PDF_OUTPUT_DIR || '/tmp/payload-pdfs'
@ -69,40 +70,54 @@ function hostnameMatchesAllowedHost(hostname: string): boolean {
})
}
async function validatePdfSourceUrl(rawUrl: string): Promise<{ valid: boolean; reason?: string }> {
function denyPdfSourceUrl(rawUrl: string, reason: string, host?: string): { valid: boolean; reason: string } {
recordSecurityEvent('pdf_ssrf_blocked', {
endpoint: '/api/generate-pdf',
reason,
targetUrl: rawUrl,
host,
})
return { valid: false, reason }
}
export async function validatePdfSourceUrl(rawUrl: string): Promise<{ valid: boolean; reason?: string }> {
let parsedUrl: URL
try {
parsedUrl = new URL(rawUrl)
} catch {
return { valid: false, reason: 'Invalid URL format' }
return denyPdfSourceUrl(rawUrl, 'Invalid URL format')
}
const isHttps = parsedUrl.protocol === 'https:'
const isAllowedHttp = parsedUrl.protocol === 'http:' && PDF_ALLOW_HTTP_URLS
if (!isHttps && !isAllowedHttp) {
return { valid: false, reason: 'Only HTTPS URLs are allowed (or HTTP with PDF_ALLOW_HTTP_URLS=true in non-production)' }
return denyPdfSourceUrl(
rawUrl,
'Only HTTPS URLs are allowed (or HTTP with PDF_ALLOW_HTTP_URLS=true in non-production)',
parsedUrl.hostname,
)
}
if (parsedUrl.username || parsedUrl.password) {
return { valid: false, reason: 'URL credentials are not allowed' }
return denyPdfSourceUrl(rawUrl, 'URL credentials are not allowed', parsedUrl.hostname)
}
const hostname = parsedUrl.hostname.trim().toLowerCase()
if (!hostname) {
return { valid: false, reason: 'URL host is required' }
return denyPdfSourceUrl(rawUrl, 'URL host is required')
}
if (hostname === 'localhost' || hostname === '0.0.0.0') {
return { valid: false, reason: 'Localhost is not allowed' }
return denyPdfSourceUrl(rawUrl, 'Localhost is not allowed', hostname)
}
if (!hostnameMatchesAllowedHost(hostname)) {
return { valid: false, reason: 'Host is not in PDF_ALLOWED_HOSTS allowlist' }
return denyPdfSourceUrl(rawUrl, 'Host is not in PDF_ALLOWED_HOSTS allowlist', hostname)
}
if (isIP(hostname)) {
if (isPrivateOrLoopbackIp(hostname)) {
return { valid: false, reason: 'Private or loopback IP addresses are not allowed' }
return denyPdfSourceUrl(rawUrl, 'Private or loopback IP addresses are not allowed', hostname)
}
return { valid: true }
}
@ -110,14 +125,14 @@ async function validatePdfSourceUrl(rawUrl: string): Promise<{ valid: boolean; r
try {
const records = await lookup(hostname, { all: true, verbatim: true })
if (!records.length) {
return { valid: false, reason: 'Host could not be resolved' }
return denyPdfSourceUrl(rawUrl, 'Host could not be resolved', hostname)
}
if (records.some((record) => isPrivateOrLoopbackIp(record.address))) {
return { valid: false, reason: 'Host resolves to private or loopback addresses' }
return denyPdfSourceUrl(rawUrl, 'Host resolves to private or loopback addresses', hostname)
}
} catch {
return { valid: false, reason: 'Host lookup failed' }
return denyPdfSourceUrl(rawUrl, 'Host lookup failed', hostname)
}
return { valid: true }

View file

@ -0,0 +1,233 @@
import { NextRequest, NextResponse } from 'next/server'
import { validateCsrf } from './csrf'
import { getClientIpFromRequest, isIpBlocked, validateIpAccess } from './ip-allowlist'
import { rateLimitHeaders, type RateLimitResult } from './rate-limiter'
import { recordSecurityEvent } from './security-observability'
type IpEndpoint = 'sendEmail' | 'admin' | 'generatePdf' | 'webhooks'
type AuthProvider = {
auth: (args: { headers: Headers }) => Promise<{ user?: unknown | null }>
}
type RateLimiter = {
check: (identifier: string) => Promise<RateLimitResult>
}
export type ApiErrorCode =
| 'ACCESS_DENIED'
| 'UNAUTHORIZED'
| 'FORBIDDEN'
| 'CSRF_INVALID'
| 'RATE_LIMITED'
| 'SECURITY_UNAVAILABLE'
| 'VALIDATION_FAILED'
| 'INVALID_REQUEST'
export interface ApiGuardOptions {
endpoint: string
ipEndpoint?: IpEndpoint
blocklistOnly?: boolean
csrf?: 'none' | 'browser' | 'required'
authProvider?: AuthProvider
requireUser?: boolean
rateLimiter?: RateLimiter
rateLimitMax?: number
rateLimitIdentifier?: string | ((args: { ip: string; user: unknown | null }) => string)
onRateLimit?: (args: {
endpoint: string
ip: string
user: unknown | null
result: RateLimitResult
}) => Promise<void> | void
}
export interface ApiGuardSuccess {
ok: true
ip: string
user: unknown | null
rateLimit?: RateLimitResult
}
export interface ApiGuardFailure {
ok: false
response: NextResponse
}
export type ApiGuardResult = ApiGuardSuccess | ApiGuardFailure
export function createApiErrorResponse(
status: number,
code: ApiErrorCode,
message: string,
details?: unknown,
headers?: HeadersInit,
): NextResponse {
return NextResponse.json(
{
error: {
code,
message,
details,
},
},
{
status,
headers,
},
)
}
export function isServerToServerRequest(req: NextRequest): boolean {
const origin = req.headers.get('origin')
const referer = req.headers.get('referer')
return !origin && !referer
}
export async function runApiGuards(
req: NextRequest,
options: ApiGuardOptions,
): Promise<ApiGuardResult> {
const csrfMode = options.csrf || 'browser'
let ip = 'unknown'
let user: unknown | null = null
try {
if (options.blocklistOnly) {
ip = getClientIpFromRequest(req)
if (isIpBlocked(ip)) {
recordSecurityEvent('ip_access_denied', {
endpoint: options.endpoint,
ip,
reason: `IP ${ip} is blocked`,
})
return {
ok: false,
response: createApiErrorResponse(403, 'ACCESS_DENIED', 'Access denied'),
}
}
}
if (options.ipEndpoint) {
const ipCheck = validateIpAccess(req, options.ipEndpoint)
ip = ipCheck.ip
if (!ipCheck.allowed) {
recordSecurityEvent('ip_access_denied', {
endpoint: options.endpoint,
ip,
reason: ipCheck.reason,
})
return {
ok: false,
response: createApiErrorResponse(403, 'ACCESS_DENIED', 'Access denied'),
}
}
}
if (csrfMode !== 'none') {
const shouldValidateCsrf = csrfMode === 'required' || !isServerToServerRequest(req)
if (shouldValidateCsrf) {
const csrfResult = validateCsrf(req)
if (!csrfResult.valid) {
recordSecurityEvent('csrf_blocked', {
endpoint: options.endpoint,
ip,
reason: csrfResult.reason,
})
return {
ok: false,
response: createApiErrorResponse(403, 'CSRF_INVALID', 'CSRF validation failed'),
}
}
}
}
if (options.authProvider) {
const authResult = await options.authProvider.auth({ headers: req.headers })
user = authResult.user || null
}
if (options.requireUser && !user) {
recordSecurityEvent('auth_blocked', {
endpoint: options.endpoint,
ip,
reason: 'Missing authenticated user',
})
return {
ok: false,
response: createApiErrorResponse(401, 'UNAUTHORIZED', 'Unauthorized'),
}
}
let rateLimit: RateLimitResult | undefined
if (options.rateLimiter) {
const rateLimitIdentifier =
typeof options.rateLimitIdentifier === 'function'
? options.rateLimitIdentifier({ ip, user })
: options.rateLimitIdentifier ||
(typeof user === 'object' && user && 'id' in user ? String((user as { id: unknown }).id) : ip)
rateLimit = await options.rateLimiter.check(rateLimitIdentifier)
if (!rateLimit.allowed) {
recordSecurityEvent('rate_limit_blocked', {
endpoint: options.endpoint,
ip,
limiter: options.rateLimiter?.constructor?.name || 'custom',
})
if (options.onRateLimit) {
try {
await options.onRateLimit({
endpoint: options.endpoint,
ip,
user,
result: rateLimit,
})
} catch {
// Guard responses must not fail because optional callbacks fail.
}
}
const headers =
options.rateLimitMax && options.rateLimitMax > 0
? rateLimitHeaders(rateLimit, options.rateLimitMax)
: undefined
return {
ok: false,
response: createApiErrorResponse(
429,
'RATE_LIMITED',
'Rate limit exceeded',
{
remaining: rateLimit.remaining,
retryAfter: rateLimit.retryAfter,
},
headers,
),
}
}
}
return {
ok: true,
ip,
user,
rateLimit,
}
} catch (error) {
recordSecurityEvent('auth_blocked', {
endpoint: options.endpoint,
ip,
reason: error instanceof Error ? error.message : String(error),
}, 'error')
return {
ok: false,
response: createApiErrorResponse(
503,
'SECURITY_UNAVAILABLE',
'Security checks unavailable',
),
}
}
}

View file

@ -1,5 +1,6 @@
import { timingSafeEqual } from 'crypto'
import { NextRequest, NextResponse } from 'next/server'
import { recordSecurityEvent } from './security-observability'
function safeTokenEquals(left: string, right: string): boolean {
const leftBuffer = Buffer.from(left)
@ -16,21 +17,33 @@ function safeTokenEquals(left: string, right: string): boolean {
* Enforces authorization for cron endpoints.
* Returns a response when auth fails, otherwise null.
*/
export function requireCronAuth(request: NextRequest): NextResponse | null {
export function requireCronAuth(request: NextRequest, endpoint = 'unknown'): NextResponse | null {
const secret = process.env.CRON_SECRET?.trim()
// Fail closed when secret is missing.
if (!secret) {
recordSecurityEvent('cron_secret_missing', {
endpoint,
reason: 'CRON_SECRET is not configured',
}, 'error')
return NextResponse.json({ error: 'Service unavailable' }, { status: 503 })
}
const authorization = request.headers.get('authorization')
if (!authorization?.startsWith('Bearer ')) {
recordSecurityEvent('cron_auth_rejected', {
endpoint,
reason: 'Missing or invalid Authorization header',
})
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const token = authorization.slice('Bearer '.length).trim()
if (!token || !safeTokenEquals(token, secret)) {
recordSecurityEvent('cron_auth_rejected', {
endpoint,
reason: 'Bearer token mismatch',
})
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}

View file

@ -0,0 +1,185 @@
import { randomUUID } from 'crypto'
import { NextRequest, NextResponse } from 'next/server'
import { getRedisClient, isRedisAvailable, setRedisAvailable } from '../redis'
import { requireCronAuth } from './cron-auth'
import { recordSecurityEvent } from './security-observability'
interface CronExecutionOptions {
endpoint: string
lockTtlMs?: number
idempotencyTtlMs?: number
}
const DEFAULT_LOCK_TTL_MS = parseInt(process.env.CRON_LOCK_TTL_MS || '600000', 10)
const DEFAULT_IDEMPOTENCY_TTL_MS = parseInt(process.env.CRON_IDEMPOTENCY_TTL_MS || '900000', 10)
const memoryLocks = new Map<string, { token: string; expiresAt: number }>()
const memoryIdempotency = new Map<string, number>()
function getLockTtlMs(): number {
return Number.isFinite(DEFAULT_LOCK_TTL_MS) && DEFAULT_LOCK_TTL_MS > 0
? DEFAULT_LOCK_TTL_MS
: 600000
}
function getIdempotencyTtlMs(): number {
return Number.isFinite(DEFAULT_IDEMPOTENCY_TTL_MS) && DEFAULT_IDEMPOTENCY_TTL_MS > 0
? DEFAULT_IDEMPOTENCY_TTL_MS
: 900000
}
function cleanupMemoryStores(now: number): void {
for (const [key, entry] of memoryLocks.entries()) {
if (entry.expiresAt <= now) {
memoryLocks.delete(key)
}
}
for (const [key, expiresAt] of memoryIdempotency.entries()) {
if (expiresAt <= now) {
memoryIdempotency.delete(key)
}
}
}
async function acquireExecutionLock(lockKey: string, token: string, ttlMs: number): Promise<boolean> {
const now = Date.now()
cleanupMemoryStores(now)
if (isRedisAvailable()) {
try {
const redis = getRedisClient()
if (redis) {
const result = await redis.set(lockKey, token, 'PX', ttlMs, 'NX')
return result === 'OK'
}
} catch {
setRedisAvailable(false)
}
}
const existing = memoryLocks.get(lockKey)
if (existing && existing.expiresAt > now) {
return false
}
memoryLocks.set(lockKey, {
token,
expiresAt: now + ttlMs,
})
return true
}
async function releaseExecutionLock(lockKey: string, token: string): Promise<void> {
if (isRedisAvailable()) {
try {
const redis = getRedisClient()
if (redis) {
const current = await redis.get(lockKey)
if (current === token) {
await redis.del(lockKey)
}
}
} catch {
setRedisAvailable(false)
}
}
const inMemory = memoryLocks.get(lockKey)
if (inMemory?.token === token) {
memoryLocks.delete(lockKey)
}
}
async function markIdempotencyKey(idempotencyKey: string, ttlMs: number): Promise<boolean> {
const now = Date.now()
cleanupMemoryStores(now)
if (isRedisAvailable()) {
try {
const redis = getRedisClient()
if (redis) {
const result = await redis.set(idempotencyKey, String(now), 'PX', ttlMs, 'NX')
return result === 'OK'
}
} catch {
setRedisAvailable(false)
}
}
const existing = memoryIdempotency.get(idempotencyKey)
if (existing && existing > now) {
return false
}
memoryIdempotency.set(idempotencyKey, now + ttlMs)
return true
}
function readIdempotencyKey(request: NextRequest): string | null {
const headerValue = request.headers.get('x-idempotency-key')?.trim()
if (headerValue) return headerValue
const queryValue = request.nextUrl.searchParams.get('idempotencyKey')?.trim()
if (queryValue) return queryValue
return null
}
export async function withCronExecution(
request: NextRequest,
options: CronExecutionOptions,
handler: () => Promise<NextResponse>,
): Promise<NextResponse> {
const authError = requireCronAuth(request, options.endpoint)
if (authError) {
return authError
}
const idempotencyTtlMs = options.idempotencyTtlMs ?? getIdempotencyTtlMs()
const idempotencyValue = readIdempotencyKey(request)
if (idempotencyValue) {
const idempotencyKey = `cron:idempotency:${options.endpoint}:${idempotencyValue}`
const marked = await markIdempotencyKey(idempotencyKey, idempotencyTtlMs)
if (!marked) {
recordSecurityEvent('cron_duplicate_ignored', {
endpoint: options.endpoint,
idempotencyKey: idempotencyValue,
}, 'info')
return NextResponse.json(
{
success: true,
duplicate: true,
message: 'Duplicate cron request ignored',
},
{ status: 202 },
)
}
}
const lockKey = `cron:lock:${options.endpoint}`
const lockToken = randomUUID()
const lockTtlMs = options.lockTtlMs ?? getLockTtlMs()
const acquired = await acquireExecutionLock(lockKey, lockToken, lockTtlMs)
if (!acquired) {
recordSecurityEvent('cron_execution_locked', {
endpoint: options.endpoint,
reason: 'Another worker still holds the execution lock',
})
return NextResponse.json(
{ error: 'Cron job already running' },
{ status: 423 },
)
}
try {
return await handler()
} finally {
await releaseExecutionLock(lockKey, lockToken)
}
}
export function resetCronCoordinationStateForTests(): void {
memoryLocks.clear()
memoryIdempotency.clear()
}

View file

@ -55,3 +55,28 @@ export {
// Cron Endpoint Authentication
export { requireCronAuth } from './cron-auth'
export {
withCronExecution,
resetCronCoordinationStateForTests,
} from './cron-coordination'
export { checkSecretsHealth } from './secrets-health'
// API Route Guards
export {
runApiGuards,
createApiErrorResponse,
isServerToServerRequest,
type ApiGuardOptions,
type ApiGuardResult,
type ApiErrorCode,
} from './api-guards'
// Security Observability
export {
recordSecurityEvent,
getSecurityMetricsSnapshot,
resetSecurityObservabilityForTests,
type SecurityEventType,
type SecurityEventLevel,
type SecurityEventContext,
} from './security-observability'

View file

@ -6,6 +6,7 @@
*/
import { NextRequest } from 'next/server'
import { recordSecurityEvent } from './security-observability'
/**
* Prüft ob eine IP-Adresse in einem CIDR-Block liegt
@ -273,6 +274,11 @@ export function validateIpAccess(
// Erst Blocklist prüfen
if (isIpBlocked(ip)) {
recordSecurityEvent('ip_access_denied', {
ip,
endpoint,
reason: `IP ${ip} is blocked`,
})
return {
allowed: false,
ip,
@ -282,6 +288,13 @@ export function validateIpAccess(
// Dann Allowlist prüfen
const allowlistResult = isIpAllowed(ip, endpoint)
if (!allowlistResult.allowed) {
recordSecurityEvent('ip_access_denied', {
ip,
endpoint,
reason: allowlistResult.reason,
})
}
return {
...allowlistResult,

View file

@ -12,6 +12,7 @@
*/
import { getRedisClient, isRedisAvailable, setRedisAvailable } from '../redis'
import { recordSecurityEvent } from './security-observability'
export interface RateLimitConfig {
/** Eindeutiger Name für diesen Limiter */
@ -164,6 +165,10 @@ async function checkRateLimitRedis(
const resetIn = windowStart + config.windowMs - now
if ((count as number) > config.maxRequests) {
recordSecurityEvent('rate_limit_blocked', {
limiter: config.name,
identifier: key,
})
return {
allowed: false,
remaining: 0,
@ -209,6 +214,10 @@ function checkRateLimitMemory(
// Limit erreicht
if (entry.count >= config.maxRequests) {
const resetIn = entry.windowStart + config.windowMs - now
recordSecurityEvent('rate_limit_blocked', {
limiter: config.name,
identifier,
})
return {
allowed: false,
remaining: 0,

View file

@ -0,0 +1,112 @@
import type { SecretsHealthStatus } from '../monitoring/types'
interface SecretCheckConfig {
name: string
requiredInProduction: boolean
}
const SECRET_CONFIGS: SecretCheckConfig[] = [
{ name: 'PAYLOAD_SECRET', requiredInProduction: true },
{ name: 'CRON_SECRET', requiredInProduction: true },
{ name: 'CSRF_SECRET', requiredInProduction: false },
{ name: 'SMTP_PASS', requiredInProduction: false },
{ name: 'GOOGLE_CLIENT_SECRET', requiredInProduction: false },
{ name: 'META_APP_SECRET', requiredInProduction: false },
{ name: 'REDIS_PASSWORD', requiredInProduction: false },
]
function parseDate(value: string | undefined): Date | null {
if (!value) return null
const parsed = new Date(value)
return Number.isNaN(parsed.getTime()) ? null : parsed
}
function getWarningDays(): number {
const value = parseInt(process.env.SECRET_EXPIRY_WARNING_DAYS || '14', 10)
return Number.isNaN(value) || value < 1 ? 14 : value
}
function getMaxRotationDays(): number {
const value = parseInt(process.env.SECRET_ROTATION_MAX_DAYS || '90', 10)
return Number.isNaN(value) || value < 1 ? 90 : value
}
function isMissingSecret(name: string): boolean {
const value = process.env[name]
return !value || value.trim().length === 0
}
export function checkSecretsHealth(referenceDate = new Date()): SecretsHealthStatus {
const warningDays = getWarningDays()
const maxRotationDays = getMaxRotationDays()
const isProduction = process.env.NODE_ENV === 'production'
const missing: string[] = []
const expiringSoon: SecretsHealthStatus['expiringSoon'] = []
const expired: SecretsHealthStatus['expired'] = []
const rotationOverdue: SecretsHealthStatus['rotationOverdue'] = []
for (const config of SECRET_CONFIGS) {
if (!isProduction && config.requiredInProduction) {
continue
}
const isMissing = isMissingSecret(config.name)
if (isMissing && (isProduction ? config.requiredInProduction : false)) {
missing.push(config.name)
continue
}
if (isMissing) {
continue
}
const expiresAt = parseDate(process.env[`${config.name}_EXPIRES_AT`])
if (expiresAt) {
const diffMs = expiresAt.getTime() - referenceDate.getTime()
const daysRemaining = Math.floor(diffMs / (24 * 60 * 60 * 1000))
if (diffMs <= 0) {
expired.push({
name: config.name,
expiresAt: expiresAt.toISOString(),
})
} else if (daysRemaining <= warningDays) {
expiringSoon.push({
name: config.name,
expiresAt: expiresAt.toISOString(),
daysRemaining,
})
}
}
const rotatedAt = parseDate(process.env[`${config.name}_ROTATED_AT`])
if (rotatedAt) {
const ageDays = Math.floor(
(referenceDate.getTime() - rotatedAt.getTime()) / (24 * 60 * 60 * 1000),
)
if (ageDays > maxRotationDays) {
rotationOverdue.push({
name: config.name,
rotatedAt: rotatedAt.toISOString(),
ageDays,
})
}
}
}
const status: SecretsHealthStatus['status'] =
missing.length > 0 || expired.length > 0
? 'critical'
: expiringSoon.length > 0 || rotationOverdue.length > 0
? 'warning'
: 'ok'
return {
status,
checkedAt: referenceDate.toISOString(),
missing,
expiringSoon,
expired,
rotationOverdue,
}
}

View file

@ -0,0 +1,177 @@
import { createMonitoringLogger } from '../monitoring/monitoring-logger'
export type SecurityEventType =
| 'cron_auth_rejected'
| 'cron_secret_missing'
| 'cron_execution_locked'
| 'cron_duplicate_ignored'
| 'pdf_ssrf_blocked'
| 'rate_limit_blocked'
| 'ip_access_denied'
| 'csrf_blocked'
| 'auth_blocked'
| 'request_validation_failed'
export type SecurityEventLevel = 'info' | 'warn' | 'error'
export interface SecurityEventContext {
endpoint?: string
reason?: string
ip?: string
limiter?: string
requestId?: string
[key: string]: unknown
}
interface SecurityCounterState {
count: number
windowStart: number
}
const logger = createMonitoringLogger('security')
const counters = new Map<SecurityEventType, SecurityCounterState>()
const lastAlertAt = new Map<SecurityEventType, number>()
const DEFAULT_WINDOW_MS = parseInt(process.env.SECURITY_METRICS_WINDOW_MS || '300000', 10)
const DEFAULT_ALERT_THRESHOLD = parseInt(process.env.SECURITY_ALERT_THRESHOLD_DEFAULT || '25', 10)
const ALERT_COOLDOWN_MS = parseInt(process.env.SECURITY_ALERT_COOLDOWN_MS || '900000', 10)
let cachedPayloadPromise: Promise<any> | null = null
function getWindowMs(): number {
return Number.isFinite(DEFAULT_WINDOW_MS) && DEFAULT_WINDOW_MS > 0
? DEFAULT_WINDOW_MS
: 300000
}
function getAlertThreshold(eventType: SecurityEventType): number {
const specificEnvKey = `SECURITY_ALERT_THRESHOLD_${eventType.toUpperCase()}`
const value = process.env[specificEnvKey]
if (!value) return DEFAULT_ALERT_THRESHOLD
const parsed = parseInt(value, 10)
if (Number.isNaN(parsed) || parsed <= 0) return DEFAULT_ALERT_THRESHOLD
return parsed
}
function getCounter(eventType: SecurityEventType, now: number): SecurityCounterState {
const existing = counters.get(eventType)
const windowMs = getWindowMs()
if (!existing || now - existing.windowStart >= windowMs) {
const reset = { count: 0, windowStart: now }
counters.set(eventType, reset)
return reset
}
return existing
}
function logEvent(level: SecurityEventLevel, message: string, context: SecurityEventContext): void {
if (level === 'error') {
logger.error(message, context)
return
}
if (level === 'warn') {
logger.warn(message, context)
return
}
logger.info(message, context)
}
async function getPayloadInstance(): Promise<any> {
if (!cachedPayloadPromise) {
cachedPayloadPromise = (async () => {
const { getPayload } = await import('payload')
const config = (await import(/* @vite-ignore */ '@payload-config')).default
return getPayload({ config })
})().catch((error) => {
cachedPayloadPromise = null
throw error
})
}
return cachedPayloadPromise
}
async function sendThresholdAlert(
eventType: SecurityEventType,
count: number,
threshold: number,
context: SecurityEventContext,
): Promise<void> {
const now = Date.now()
const previousAlert = lastAlertAt.get(eventType)
if (previousAlert && now - previousAlert < ALERT_COOLDOWN_MS) {
return
}
lastAlertAt.set(eventType, now)
try {
const payload = await getPayloadInstance()
const { sendAlert } = await import('../alerting/alert-service')
await sendAlert(payload, {
level: count >= threshold * 2 ? 'error' : 'warning',
title: `Security threshold reached: ${eventType}`,
message: `Security event "${eventType}" occurred ${count} times in ${Math.round(getWindowMs() / 1000)}s.`,
details: {
eventType,
count,
threshold,
...context,
},
})
} catch (error) {
logger.error('Failed to send security threshold alert', {
eventType,
threshold,
error: error instanceof Error ? error.message : String(error),
})
}
}
export function recordSecurityEvent(
eventType: SecurityEventType,
context: SecurityEventContext = {},
level: SecurityEventLevel = 'warn',
): void {
const now = Date.now()
const counter = getCounter(eventType, now)
counter.count++
logEvent(level, `Security event: ${eventType}`, {
eventType,
count: counter.count,
windowStart: new Date(counter.windowStart).toISOString(),
...context,
})
const threshold = getAlertThreshold(eventType)
if (counter.count >= threshold) {
void sendThresholdAlert(eventType, counter.count, threshold, context)
}
}
export function getSecurityMetricsSnapshot(): {
windowMs: number
counters: Array<{
eventType: SecurityEventType
count: number
windowStart: string
}>
} {
const result = Array.from(counters.entries()).map(([eventType, counter]) => ({
eventType,
count: counter.count,
windowStart: new Date(counter.windowStart).toISOString(),
}))
return {
windowMs: getWindowMs(),
counters: result.sort((a, b) => b.count - a.count),
}
}
export function resetSecurityObservabilityForTests(): void {
counters.clear()
lastAlertAt.clear()
cachedPayloadPromise = null
}

View file

@ -0,0 +1,166 @@
import { NextResponse } from 'next/server'
import { recordSecurityEvent } from '../security/security-observability'
export interface ApiValidationIssue {
field: string
code: 'required' | 'invalid_type' | 'invalid_value' | 'invalid_json'
message: string
}
export type ApiValidationResult<T> =
| { valid: true; data: T }
| { valid: false; issues: ApiValidationIssue[] }
export type ApiValidator<T> = (input: unknown) => ApiValidationResult<T>
function success<T>(data: T): ApiValidationResult<T> {
return { valid: true, data }
}
function failure<T>(issues: ApiValidationIssue[]): ApiValidationResult<T> {
return { valid: false, issues }
}
export function validationIssue(
field: string,
code: ApiValidationIssue['code'],
message: string,
): ApiValidationIssue {
return { field, code, message }
}
export function asObject(input: unknown, field = 'body'): ApiValidationResult<Record<string, unknown>> {
if (!input || typeof input !== 'object' || Array.isArray(input)) {
return failure([
validationIssue(field, 'invalid_type', 'Expected a JSON object'),
])
}
return success(input as Record<string, unknown>)
}
export function requiredString(
input: Record<string, unknown>,
key: string,
field = key,
): ApiValidationResult<string> {
const value = input[key]
if (value === undefined || value === null || value === '') {
return failure([validationIssue(field, 'required', `${field} is required`)])
}
if (typeof value !== 'string') {
return failure([validationIssue(field, 'invalid_type', `${field} must be a string`)])
}
return success(value)
}
export function optionalString(
input: Record<string, unknown>,
key: string,
field = key,
): ApiValidationResult<string | undefined> {
const value = input[key]
if (value === undefined || value === null || value === '') {
return success(undefined)
}
if (typeof value !== 'string') {
return failure([validationIssue(field, 'invalid_type', `${field} must be a string`)])
}
return success(value)
}
export function optionalBoolean(
input: Record<string, unknown>,
key: string,
field = key,
): ApiValidationResult<boolean | undefined> {
const value = input[key]
if (value === undefined || value === null) {
return success(undefined)
}
if (typeof value !== 'boolean') {
return failure([validationIssue(field, 'invalid_type', `${field} must be a boolean`)])
}
return success(value)
}
export function optionalNumber(
input: Record<string, unknown>,
key: string,
field = key,
): ApiValidationResult<number | undefined> {
const value = input[key]
if (value === undefined || value === null || value === '') {
return success(undefined)
}
const numericValue = typeof value === 'number' ? value : Number(value)
if (Number.isNaN(numericValue)) {
return failure([validationIssue(field, 'invalid_type', `${field} must be a number`)])
}
return success(numericValue)
}
export function optionalArray<T>(
input: Record<string, unknown>,
key: string,
mapper: (value: unknown, index: number) => T | null,
field = key,
): ApiValidationResult<T[] | undefined> {
const value = input[key]
if (value === undefined || value === null) {
return success(undefined)
}
if (!Array.isArray(value)) {
return failure([validationIssue(field, 'invalid_type', `${field} must be an array`)])
}
const mapped = value
.map((entry, index) => mapper(entry, index))
.filter((entry): entry is T => entry !== null)
return success(mapped)
}
export async function validateJsonBody<T>(
request: Request,
validator: ApiValidator<T>,
): Promise<ApiValidationResult<T>> {
let body: unknown
try {
body = await request.json()
} catch {
return failure([
validationIssue('body', 'invalid_json', 'Request body must contain valid JSON'),
])
}
return validator(body)
}
export function validationErrorResponse(
issues: ApiValidationIssue[],
status = 400,
context: { endpoint?: string } = {},
): NextResponse {
recordSecurityEvent('request_validation_failed', {
endpoint: context.endpoint,
issues: issues.map((issue) => ({
field: issue.field,
code: issue.code,
})),
}, 'info')
return NextResponse.json(
{
error: {
code: 'VALIDATION_FAILED',
message: 'Request validation failed',
issues,
},
},
{ status },
)
}

View file

@ -10,3 +10,18 @@ export {
generateUniqueSlug,
type SlugValidationOptions,
} from './slug-validation'
export {
asObject,
requiredString,
optionalString,
optionalBoolean,
optionalNumber,
optionalArray,
validateJsonBody,
validationIssue,
validationErrorResponse,
type ApiValidationIssue,
type ApiValidationResult,
type ApiValidator,
} from './api-validation'

View file

@ -0,0 +1,61 @@
import { describe, it, expect } from 'vitest'
import { Users } from '@/collections/Users'
const updateAccess = Users.access?.update
describe('Users collection access controls', () => {
it('allows super admins to update any account', async () => {
expect(updateAccess).toBeTypeOf('function')
const result = await (updateAccess as any)({
req: { user: { id: 1, isSuperAdmin: true } },
id: 999,
})
expect(result).toBe(true)
})
it('allows users to update their own account only', async () => {
const ownResult = await (updateAccess as any)({
req: { user: { id: 42, isSuperAdmin: false } },
id: 42,
})
const foreignResult = await (updateAccess as any)({
req: { user: { id: 42, isSuperAdmin: false } },
id: 99,
})
expect(ownResult).toBe(true)
expect(foreignResult).toBe(false)
})
it('denies anonymous updates', async () => {
const result = await (updateAccess as any)({
req: { user: null },
id: 1,
})
expect(result).toBe(false)
})
it('restricts isSuperAdmin field read/create/update to super admins', async () => {
const superAdminField = Users.fields.find(
(field) => 'name' in field && field.name === 'isSuperAdmin',
) as any
expect(superAdminField).toBeDefined()
expect(superAdminField.access).toBeDefined()
const superAdminReq = { req: { user: { id: 1, isSuperAdmin: true } } }
const regularReq = { req: { user: { id: 2, isSuperAdmin: false } } }
expect(await superAdminField.access.read(superAdminReq)).toBe(true)
expect(await superAdminField.access.create(superAdminReq)).toBe(true)
expect(await superAdminField.access.update(superAdminReq)).toBe(true)
expect(await superAdminField.access.read(regularReq)).toBe(false)
expect(await superAdminField.access.create(regularReq)).toBe(false)
expect(await superAdminField.access.update(regularReq)).toBe(false)
})
})

View file

@ -55,6 +55,18 @@ describe('Monitoring Types', () => {
tokenRefresh: { lastRun: new Date().toISOString(), status: 'ok' },
youtubeSync: { lastRun: new Date().toISOString(), status: 'ok' },
},
secrets: {
status: 'ok',
checkedAt: new Date().toISOString(),
missing: [],
expiringSoon: [],
expired: [],
rotationOverdue: [],
},
securityEvents: {
windowMs: 300000,
counters: [],
},
},
performance: {
avgResponseTimeMs: 120,
@ -87,7 +99,7 @@ describe('Monitoring Types', () => {
})
it('LogSource covers all system components', () => {
const sources: LogSource[] = ['payload', 'queue-worker', 'cron', 'email', 'oauth', 'sync']
expect(sources).toHaveLength(6)
const sources: LogSource[] = ['payload', 'queue-worker', 'cron', 'email', 'oauth', 'sync', 'security']
expect(sources).toHaveLength(7)
})
})

View file

@ -0,0 +1,111 @@
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
import { NextRequest, NextResponse } from 'next/server'
function createCronRequest(
url = 'https://example.com/api/cron/test',
authHeader?: string,
extraHeaders: Record<string, string> = {},
): NextRequest {
const headers = new Headers(extraHeaders)
if (authHeader) {
headers.set('authorization', authHeader)
}
return new NextRequest(url, { headers })
}
describe('cron auth and coordination', () => {
beforeEach(() => {
vi.resetModules()
vi.stubEnv('REDIS_ENABLED', 'false')
vi.stubEnv('CRON_SECRET', 'top-secret-token')
})
afterEach(() => {
vi.unstubAllEnvs()
})
it('denies requests when CRON_SECRET is missing', async () => {
vi.stubEnv('CRON_SECRET', '')
const { requireCronAuth } = await import('@/lib/security/cron-auth')
const response = requireCronAuth(createCronRequest())
expect(response).not.toBeNull()
expect(response?.status).toBe(503)
})
it('denies requests with missing bearer header', async () => {
const { requireCronAuth } = await import('@/lib/security/cron-auth')
const response = requireCronAuth(createCronRequest())
expect(response).not.toBeNull()
expect(response?.status).toBe(401)
})
it('allows requests with valid bearer header', async () => {
const { requireCronAuth } = await import('@/lib/security/cron-auth')
const response = requireCronAuth(
createCronRequest('https://example.com/api/cron/test', 'Bearer top-secret-token'),
)
expect(response).toBeNull()
})
it('ignores duplicate idempotency-key requests', async () => {
const { withCronExecution, resetCronCoordinationStateForTests } = await import(
'@/lib/security/cron-coordination'
)
resetCronCoordinationStateForTests()
const req1 = createCronRequest(
'https://example.com/api/cron/test?idempotencyKey=abc',
'Bearer top-secret-token',
)
const req2 = createCronRequest(
'https://example.com/api/cron/test?idempotencyKey=abc',
'Bearer top-secret-token',
)
const first = await withCronExecution(req1, { endpoint: 'test-cron' }, async () =>
NextResponse.json({ ok: true }),
)
const second = await withCronExecution(req2, { endpoint: 'test-cron' }, async () =>
NextResponse.json({ ok: true }),
)
expect(first.status).toBe(200)
expect(second.status).toBe(202)
})
it('enforces execution lock per endpoint', async () => {
const { withCronExecution, resetCronCoordinationStateForTests } = await import(
'@/lib/security/cron-coordination'
)
resetCronCoordinationStateForTests()
let releaseHandler: (() => void) | null = null
const firstRun = withCronExecution(
createCronRequest('https://example.com/api/cron/test', 'Bearer top-secret-token'),
{ endpoint: 'locked-cron', lockTtlMs: 120000 },
async () =>
new Promise<NextResponse>((resolve) => {
releaseHandler = () => resolve(NextResponse.json({ ok: true }))
}),
)
const second = await withCronExecution(
createCronRequest('https://example.com/api/cron/test', 'Bearer top-secret-token'),
{ endpoint: 'locked-cron', lockTtlMs: 120000 },
async () => NextResponse.json({ ok: true }),
)
expect(second.status).toBe(423)
releaseHandler?.()
const first = await firstRun
expect(first.status).toBe(200)
})
})

View file

@ -0,0 +1,87 @@
import { describe, it, expect, vi } from 'vitest'
import { NewsletterService } from '@/lib/email/newsletter-service'
vi.mock('@/lib/email/tenant-email-service', () => ({
sendTenantEmail: vi.fn().mockResolvedValue(undefined),
}))
interface MockSubscriber {
id: number
email: string
firstName?: string
status: 'pending' | 'confirmed' | 'unsubscribed'
confirmationToken: string
subscribedAt?: string
confirmedAt?: string | null
unsubscribedAt?: string | null
tenant: number
}
function createMockPayload(subscriber: MockSubscriber) {
return {
find: vi.fn().mockImplementation(async ({ where }) => {
const token = (where as any)?.confirmationToken?.equals
if (token && subscriber.confirmationToken === token) {
return { docs: [{ ...subscriber }] }
}
return { docs: [] }
}),
update: vi.fn().mockImplementation(async ({ data }) => {
Object.assign(subscriber, data)
return { ...subscriber }
}),
findByID: vi.fn().mockResolvedValue({
id: subscriber.tenant,
name: 'Demo Tenant',
domains: [],
}),
}
}
describe('Newsletter unsubscribe security', () => {
it('rotates token after confirm and again after unsubscribe, blocking replay', async () => {
const subscriber: MockSubscriber = {
id: 10,
email: 'user@example.com',
status: 'pending',
confirmationToken: 'confirm-token',
subscribedAt: new Date().toISOString(),
tenant: 1,
}
const payload = createMockPayload(subscriber)
const service = new NewsletterService(payload as any)
const confirmResult = await service.confirmSubscription('confirm-token')
expect(confirmResult.success).toBe(true)
expect(subscriber.status).toBe('confirmed')
expect(subscriber.confirmationToken).not.toBe('confirm-token')
const unsubscribeToken = subscriber.confirmationToken
const unsubscribeResult = await service.unsubscribe(unsubscribeToken)
expect(unsubscribeResult.success).toBe(true)
expect(subscriber.status).toBe('unsubscribed')
expect(subscriber.confirmationToken).not.toBe(unsubscribeToken)
const replayResult = await service.unsubscribe(unsubscribeToken)
expect(replayResult.success).toBe(false)
})
it('does not support predictable ID fallback tokens', async () => {
const subscriber: MockSubscriber = {
id: 55,
email: 'id-test@example.com',
status: 'confirmed',
confirmationToken: 'random-token',
confirmedAt: new Date().toISOString(),
tenant: 1,
}
const payload = createMockPayload(subscriber)
const service = new NewsletterService(payload as any)
const result = await service.unsubscribe('55')
expect(result.success).toBe(false)
})
})

View file

@ -0,0 +1,70 @@
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
vi.mock('dns/promises', () => ({
lookup: vi.fn().mockResolvedValue([{ address: '93.184.216.34', family: 4 }]),
}))
describe('PDF source URL validation', () => {
beforeEach(() => {
vi.resetModules()
vi.stubEnv('REDIS_ENABLED', 'false')
})
afterEach(() => {
vi.unstubAllEnvs()
})
it('blocks plain http URLs by default', async () => {
vi.stubEnv('NODE_ENV', 'production')
vi.stubEnv('PDF_ALLOW_HTTP_URLS', 'false')
const { validatePdfSourceUrl } = await import('@/lib/pdf/pdf-service')
const result = await validatePdfSourceUrl('http://example.com/invoice')
expect(result.valid).toBe(false)
expect(result.reason).toContain('Only HTTPS URLs are allowed')
})
it('allows http URLs only in non-production when explicitly enabled', async () => {
vi.stubEnv('NODE_ENV', 'development')
vi.stubEnv('PDF_ALLOW_HTTP_URLS', 'true')
const { validatePdfSourceUrl } = await import('@/lib/pdf/pdf-service')
const result = await validatePdfSourceUrl('http://8.8.8.8/test')
expect(result.valid).toBe(true)
})
it('blocks localhost targets', async () => {
vi.stubEnv('NODE_ENV', 'production')
const { validatePdfSourceUrl } = await import('@/lib/pdf/pdf-service')
const result = await validatePdfSourceUrl('https://localhost/internal')
expect(result.valid).toBe(false)
expect(result.reason).toContain('Localhost is not allowed')
})
it('blocks private IP targets', async () => {
vi.stubEnv('NODE_ENV', 'production')
const { validatePdfSourceUrl } = await import('@/lib/pdf/pdf-service')
const result = await validatePdfSourceUrl('https://10.0.0.5/admin')
expect(result.valid).toBe(false)
expect(result.reason).toContain('Private or loopback')
})
it('enforces PDF_ALLOWED_HOSTS allowlist', async () => {
vi.stubEnv('NODE_ENV', 'production')
vi.stubEnv('PDF_ALLOWED_HOSTS', 'example.com,.trusted.example')
const { validatePdfSourceUrl } = await import('@/lib/pdf/pdf-service')
const allowed = await validatePdfSourceUrl('https://example.com/a')
const blocked = await validatePdfSourceUrl('https://not-allowed.example/a')
expect(allowed.valid).toBe(true)
expect(blocked.valid).toBe(false)
expect(blocked.reason).toContain('allowlist')
})
})