mirror of
https://github.com/complexcaresolutions/cms.c2sgmbh.git
synced 2026-03-17 22:04:10 +00:00
301 lines
8.6 KiB
JavaScript
301 lines
8.6 KiB
JavaScript
#!/usr/bin/env node
|
|
import fs from 'node:fs'
|
|
import path from 'node:path'
|
|
|
|
const ROOT = process.cwd()
|
|
const SRC_DIR = path.join(ROOT, 'src')
|
|
const TEST_DIR = path.join(ROOT, 'tests')
|
|
const BASELINE_FILE = path.join(ROOT, 'tests', 'test-system-baseline.json')
|
|
const REPORT_FILE = path.join(ROOT, 'docs', 'reports', 'test-system-status.md')
|
|
|
|
const args = new Set(process.argv.slice(2))
|
|
const updateBaseline = args.has('--update-baseline')
|
|
const reportOnly = args.has('--report-only')
|
|
|
|
const SOURCE_EXTENSIONS = new Set(['.ts', '.tsx'])
|
|
const TEST_EXTENSIONS = new Set(['.ts', '.tsx'])
|
|
|
|
const EXCLUDE_SOURCE_PATTERNS = [
|
|
/(^|\/)migrations\//,
|
|
/(^|\/)payload-types\.ts$/,
|
|
/(^|\/)payload-generated-schema\.ts$/,
|
|
/\.d\.ts$/,
|
|
]
|
|
|
|
function toPosix(p) {
|
|
return p.split(path.sep).join('/')
|
|
}
|
|
|
|
function fileExists(p) {
|
|
try {
|
|
fs.accessSync(p, fs.constants.F_OK)
|
|
return true
|
|
} catch {
|
|
return false
|
|
}
|
|
}
|
|
|
|
function walkFiles(dir) {
|
|
if (!fileExists(dir)) return []
|
|
const entries = fs.readdirSync(dir, { withFileTypes: true })
|
|
const files = []
|
|
|
|
for (const entry of entries) {
|
|
const abs = path.join(dir, entry.name)
|
|
if (entry.isDirectory()) {
|
|
files.push(...walkFiles(abs))
|
|
} else if (entry.isFile()) {
|
|
files.push(abs)
|
|
}
|
|
}
|
|
|
|
return files
|
|
}
|
|
|
|
function isExcludedSource(relPath) {
|
|
return EXCLUDE_SOURCE_PATTERNS.some((pattern) => pattern.test(relPath))
|
|
}
|
|
|
|
function stripExtension(relPath) {
|
|
return relPath.replace(/\.[^.]+$/, '')
|
|
}
|
|
|
|
function isRouteFile(relPath) {
|
|
return /\/api\/.+\/route\.ts$/.test(relPath)
|
|
}
|
|
|
|
function routeEndpointFromSource(relPath) {
|
|
const noGroups = relPath.replace(/\([^)]*\)\//g, '')
|
|
const apiIndex = noGroups.indexOf('/api/')
|
|
if (apiIndex === -1) return null
|
|
|
|
let endpoint = noGroups.slice(apiIndex).replace(/\/route\.ts$/, '')
|
|
endpoint = endpoint.replace(/\[\.\.\.[^/]+\]/g, '')
|
|
endpoint = endpoint.replace(/\[[^/]+\]/g, '')
|
|
endpoint = endpoint.replace(/\/+/g, '/')
|
|
if (!endpoint.startsWith('/api/')) return null
|
|
return endpoint.endsWith('/') && endpoint.length > 1 ? endpoint.slice(0, -1) : endpoint
|
|
}
|
|
|
|
function buildSourceRecords() {
|
|
const files = walkFiles(SRC_DIR)
|
|
.map((abs) => toPosix(path.relative(ROOT, abs)))
|
|
.filter((rel) => SOURCE_EXTENSIONS.has(path.extname(rel)))
|
|
.filter((rel) => !isExcludedSource(rel))
|
|
.sort()
|
|
|
|
return files.map((rel) => {
|
|
const noExt = stripExtension(rel)
|
|
const base = path.basename(noExt)
|
|
const dir = path.dirname(noExt)
|
|
|
|
const importNeedles = new Set([
|
|
`@/${noExt.replace(/^src\//, '')}`,
|
|
noExt,
|
|
])
|
|
|
|
if (base === 'index') {
|
|
importNeedles.add(`@/${dir.replace(/^src\//, '')}`)
|
|
importNeedles.add(dir)
|
|
}
|
|
|
|
return {
|
|
source: rel,
|
|
noExt,
|
|
base,
|
|
endpoint: isRouteFile(rel) ? routeEndpointFromSource(rel) : null,
|
|
importNeedles: [...importNeedles],
|
|
}
|
|
})
|
|
}
|
|
|
|
function buildTestRecords() {
|
|
const files = walkFiles(TEST_DIR)
|
|
.map((abs) => toPosix(path.relative(ROOT, abs)))
|
|
.filter((rel) => TEST_EXTENSIONS.has(path.extname(rel)))
|
|
.sort()
|
|
|
|
return files.map((rel) => {
|
|
const content = fs.readFileSync(path.join(ROOT, rel), 'utf8')
|
|
return { testFile: rel, content }
|
|
})
|
|
}
|
|
|
|
function hasImportEvidence(test, sourceRecord) {
|
|
for (const needle of sourceRecord.importNeedles) {
|
|
if (!needle) continue
|
|
if (test.content.includes(needle)) return true
|
|
}
|
|
return false
|
|
}
|
|
|
|
function hasEndpointEvidence(test, sourceRecord) {
|
|
if (!sourceRecord.endpoint) return false
|
|
return test.content.includes(sourceRecord.endpoint)
|
|
}
|
|
|
|
function hasFilenameHeuristic(test, sourceRecord) {
|
|
if (sourceRecord.base.length < 5) return false
|
|
const testName = path.basename(test.testFile).toLowerCase()
|
|
return testName.includes(sourceRecord.base.toLowerCase())
|
|
}
|
|
|
|
function findCoverageForSource(sourceRecord, tests) {
|
|
const matched = []
|
|
|
|
for (const test of tests) {
|
|
if (hasImportEvidence(test, sourceRecord)) {
|
|
matched.push({ testFile: test.testFile, reason: 'import' })
|
|
continue
|
|
}
|
|
|
|
if (hasEndpointEvidence(test, sourceRecord)) {
|
|
matched.push({ testFile: test.testFile, reason: 'endpoint' })
|
|
continue
|
|
}
|
|
|
|
if (hasFilenameHeuristic(test, sourceRecord)) {
|
|
matched.push({ testFile: test.testFile, reason: 'filename' })
|
|
}
|
|
}
|
|
|
|
return matched
|
|
}
|
|
|
|
function ensureDirFor(filePath) {
|
|
fs.mkdirSync(path.dirname(filePath), { recursive: true })
|
|
}
|
|
|
|
function loadBaseline() {
|
|
if (!fileExists(BASELINE_FILE)) return null
|
|
try {
|
|
const parsed = JSON.parse(fs.readFileSync(BASELINE_FILE, 'utf8'))
|
|
const untested = Array.isArray(parsed?.untestedSources)
|
|
? parsed.untestedSources.filter((v) => typeof v === 'string')
|
|
: []
|
|
return { ...parsed, untestedSources: untested }
|
|
} catch {
|
|
return null
|
|
}
|
|
}
|
|
|
|
function writeBaseline(payload) {
|
|
ensureDirFor(BASELINE_FILE)
|
|
fs.writeFileSync(BASELINE_FILE, `${JSON.stringify(payload, null, 2)}\n`, 'utf8')
|
|
}
|
|
|
|
function writeReport(payload) {
|
|
const lines = []
|
|
lines.push('# Test System Status')
|
|
lines.push('')
|
|
lines.push(`- Generated: ${payload.generatedAt}`)
|
|
lines.push(`- Total source files (monitored): ${payload.totalSources}`)
|
|
lines.push(`- Covered source files: ${payload.coveredSources}`)
|
|
lines.push(`- Untested source files: ${payload.untestedSources.length}`)
|
|
lines.push(`- Baseline size: ${payload.baselineSize}`)
|
|
lines.push(`- New gaps vs baseline: ${payload.newGaps.length}`)
|
|
lines.push(`- Fixed since baseline: ${payload.fixedSinceBaseline.length}`)
|
|
lines.push('')
|
|
|
|
lines.push('## New Gaps (Failing)')
|
|
lines.push('')
|
|
if (payload.newGaps.length === 0) {
|
|
lines.push('- None')
|
|
} else {
|
|
for (const item of payload.newGaps) lines.push(`- \`${item}\``)
|
|
}
|
|
lines.push('')
|
|
|
|
lines.push('## Fixed Since Baseline')
|
|
lines.push('')
|
|
if (payload.fixedSinceBaseline.length === 0) {
|
|
lines.push('- None')
|
|
} else {
|
|
for (const item of payload.fixedSinceBaseline) lines.push(`- \`${item}\``)
|
|
}
|
|
lines.push('')
|
|
|
|
lines.push('## Current Untested Inventory')
|
|
lines.push('')
|
|
for (const item of payload.untestedSources) lines.push(`- \`${item}\``)
|
|
|
|
ensureDirFor(REPORT_FILE)
|
|
fs.writeFileSync(REPORT_FILE, `${lines.join('\n')}\n`, 'utf8')
|
|
}
|
|
|
|
function main() {
|
|
const sourceRecords = buildSourceRecords()
|
|
const tests = buildTestRecords()
|
|
|
|
const coverageEntries = sourceRecords.map((source) => ({
|
|
source: source.source,
|
|
matches: findCoverageForSource(source, tests),
|
|
}))
|
|
|
|
const coveredSources = coverageEntries.filter((e) => e.matches.length > 0).map((e) => e.source)
|
|
const untestedSources = coverageEntries.filter((e) => e.matches.length === 0).map((e) => e.source)
|
|
|
|
const baseline = loadBaseline()
|
|
const baselineUntested = new Set(baseline?.untestedSources || [])
|
|
const currentUntested = new Set(untestedSources)
|
|
|
|
const newGaps = [...currentUntested].filter((s) => !baselineUntested.has(s)).sort()
|
|
const fixedSinceBaseline = [...baselineUntested].filter((s) => !currentUntested.has(s)).sort()
|
|
|
|
const reportPayload = {
|
|
generatedAt: new Date().toISOString(),
|
|
totalSources: sourceRecords.length,
|
|
coveredSources: coveredSources.length,
|
|
untestedSources: [...currentUntested].sort(),
|
|
baselineSize: baselineUntested.size,
|
|
newGaps,
|
|
fixedSinceBaseline,
|
|
}
|
|
|
|
writeReport(reportPayload)
|
|
|
|
if (updateBaseline) {
|
|
const baselinePayload = {
|
|
version: 1,
|
|
generatedAt: reportPayload.generatedAt,
|
|
totalSources: reportPayload.totalSources,
|
|
coveredSources: reportPayload.coveredSources,
|
|
untestedSources: reportPayload.untestedSources,
|
|
notes: [
|
|
'Baseline blocks only NEW untested files.',
|
|
'Update baseline intentionally after planned test-debt cleanup.',
|
|
],
|
|
}
|
|
|
|
writeBaseline(baselinePayload)
|
|
console.log(`Baseline updated: ${toPosix(path.relative(ROOT, BASELINE_FILE))}`)
|
|
console.log(`Report generated: ${toPosix(path.relative(ROOT, REPORT_FILE))}`)
|
|
return
|
|
}
|
|
|
|
if (!baseline) {
|
|
console.error('Missing baseline file.')
|
|
console.error('Run: pnpm test:system:update')
|
|
process.exit(reportOnly ? 0 : 1)
|
|
}
|
|
|
|
console.log(`Report generated: ${toPosix(path.relative(ROOT, REPORT_FILE))}`)
|
|
console.log(`Monitored sources: ${reportPayload.totalSources}`)
|
|
console.log(`Covered sources: ${reportPayload.coveredSources}`)
|
|
console.log(`Untested sources: ${reportPayload.untestedSources.length}`)
|
|
console.log(`New gaps vs baseline: ${newGaps.length}`)
|
|
|
|
if (fixedSinceBaseline.length > 0) {
|
|
console.log(`Fixed since baseline: ${fixedSinceBaseline.length} (run pnpm test:system:update to shrink baseline)`)
|
|
}
|
|
|
|
if (!reportOnly && newGaps.length > 0) {
|
|
console.error('New untested source files detected:')
|
|
for (const gap of newGaps) {
|
|
console.error(`- ${gap}`)
|
|
}
|
|
process.exit(1)
|
|
}
|
|
}
|
|
|
|
main()
|