diff --git a/harness/adf-inspector.ts b/harness/adf-inspector.ts index d898111..ea37784 100644 --- a/harness/adf-inspector.ts +++ b/harness/adf-inspector.ts @@ -120,6 +120,9 @@ export function printSnapshot(snapshot: AdfSnapshot, previous?: AdfSnapshot): vo export function detectAccumulationIssues(snapshots: AdfSnapshot[]): string[] { const issues: string[] = []; if (snapshots.length < 2) return issues; + const MIN_ABSOLUTE_GROWTH = 10; + const MIN_BASELINE_ITEMS = 3; + const MAX_SECTION_ITEMS = 20; const first = snapshots[0]; const last = snapshots[snapshots.length - 1]; @@ -131,13 +134,13 @@ export function detectAccumulationIssues(snapshots: AdfSnapshot[]): string[] { const growth = mod.totalItems - start.totalItems; const growthRate = start.totalItems > 0 ? growth / start.totalItems : growth; - if (growthRate > 2) { + if (growth >= MIN_ABSOLUTE_GROWTH && start.totalItems >= MIN_BASELINE_ITEMS && growthRate > 2) { issues.push(`${mod.module}: grew ${growth} items (${(growthRate * 100).toFixed(0)}% increase) — possible accumulation`); } // Check any single section that got very large for (const sec of mod.sections) { - if (sec.itemCount > 15) { + if (sec.itemCount > MAX_SECTION_ITEMS) { issues.push(`${mod.module} > ${sec.key}: ${sec.itemCount} items — section may need pruning`); } } diff --git a/harness/corpus/sdlc.ts b/harness/corpus/sdlc.ts new file mode 100644 index 0000000..e5ed949 --- /dev/null +++ b/harness/corpus/sdlc.ts @@ -0,0 +1,138 @@ +/** + * SDLC-focused scenarios — validate that ADF modules stay updated and portable + * as project guidance evolves from requirements through release. + */ + +import type { Scenario } from '../types'; + +export const sdlcScenarios: Scenario[] = [ + { + id: 'fullstack-sdlc-handoff-portability', + archetype: 'fullstack', + description: 'Rules evolve across SDLC phases while remaining portable through ADF modules', + manifest: { + onDemand: [ + { path: 'frontend.adf', triggers: ['react', 'component', 'ui', 'css', 'vite', 'tsx'] }, + { path: 'backend.adf', triggers: ['api', 'endpoint', 'route', 'handler', 'database', 'auth', 'zod', 'request', 'response'] }, + { path: 'infra.adf', triggers: ['deploy', 'release', 'rollback', 'ci', 'pipeline', 'docker', 'env', 'artifact'] }, + { path: 'qa.adf', triggers: ['test', 'testing', 'playwright', 'contract', 'smoke', 'verification', 'evidence', 'auditability'] }, + ], + }, + sessions: [ + { + label: 'session-1: requirements', + inject: ` +## API Requirements + +- Every API endpoint must publish request and response schemas +- Auth is required for all write endpoints +- Route handlers must return structured error codes +- Database migrations must be reviewed before merge +`, + expected: { 'backend.adf': 4 }, + }, + { + label: 'session-2: design', + inject: ` +## System Design + +- React UI components must map one-to-one to approved design tokens +- API handlers must validate all payloads with Zod +- Route naming must stay stable across versions +- Frontend component props must be typed in TSX files +`, + expected: { 'frontend.adf': 2, 'backend.adf': 2 }, + }, + { + label: 'session-3: implementation', + inject: ` +## Implementation Rules + +- API route files live under \`app/api/\` and use one handler per endpoint +- Database writes must run inside transactions +- Auth checks execute before any handler business logic +- Build artifacts are generated only in CI pipeline jobs +`, + expected: { 'backend.adf': 3, 'infra.adf': 1 }, + }, + { + label: 'session-4: verification', + inject: ` +## Verification + +- CI pipeline must run unit, integration, and Playwright suites on every PR +- API contract tests validate request and response schema compatibility +- Deploy preview environments must run smoke checks before approval +- Test artifacts are uploaded from CI for auditability +`, + expected: { 'qa.adf': 4 }, + }, + { + label: 'session-5: release and portability handoff', + inject: ` +## Release Handoff + +- Deploy jobs must consume versioned artifacts from the pipeline only +- Rollback instructions must be validated in staging before production release +- Environment configuration uses env keys defined in the deployment checklist +- Release evidence includes CI run ID, artifact hash, and deployment timestamp +`, + expected: { 'infra.adf': 4 }, + }, + ], + }, + { + id: 'fullstack-sdlc-generic-checklist-routing', + archetype: 'fullstack', + description: 'Generic SDLC handoff headings still separate verification evidence from release operations', + manifest: { + onDemand: [ + { path: 'frontend.adf', triggers: ['react', 'component', 'ui', 'css', 'vite', 'tsx'] }, + { path: 'backend.adf', triggers: ['api', 'endpoint', 'route', 'handler', 'database', 'auth', 'zod', 'request', 'response'] }, + { path: 'infra.adf', triggers: ['deploy', 'release', 'rollback', 'ci', 'pipeline', 'docker', 'env', 'artifact'] }, + { path: 'qa.adf', triggers: ['test', 'testing', 'playwright', 'contract', 'smoke', 'verification', 'evidence', 'auditability'] }, + ], + }, + sessions: [ + { + label: 'session-1: generic checklist handoff', + inject: ` +## Checklist + +- Playwright smoke tests must pass before release approval +- Contract test evidence is attached to the deployment record for auditability +- Release artifact hashes are recorded before deploy starts +- Rollback drills must use the staged deploy artifact from the pipeline +`, + expected: { 'qa.adf': 2, 'infra.adf': 2 }, + }, + ], + }, + { + id: 'fullstack-sdlc-mixed-qa-backend-signals', + archetype: 'fullstack', + description: 'Mixed backend and QA wording in a generic checklist should still route by dominant verification vs API intent', + manifest: { + onDemand: [ + { path: 'frontend.adf', triggers: ['react', 'component', 'ui', 'css', 'vite', 'tsx'] }, + { path: 'backend.adf', triggers: ['api', 'endpoint', 'route', 'handler', 'database', 'auth', 'zod', 'request', 'response'] }, + { path: 'infra.adf', triggers: ['deploy', 'release', 'rollback', 'ci', 'pipeline', 'docker', 'env', 'artifact'] }, + { path: 'qa.adf', triggers: ['test', 'testing', 'playwright', 'contract', 'smoke', 'verification', 'evidence', 'auditability'] }, + ], + }, + sessions: [ + { + label: 'session-1: mixed checklist bullets', + inject: ` +## Checklist + +- API contract test evidence must be attached to the release review for auditability +- Request and response schema contract tests must pass before merging backend changes +- Endpoint smoke tests run in CI before deploy approval +- API handler error responses are verified against contract fixtures +`, + expected: { 'qa.adf': 3, 'backend.adf': 1 }, + }, + ], + }, +]; diff --git a/harness/runner.ts b/harness/runner.ts index 675a4e3..24866b0 100644 --- a/harness/runner.ts +++ b/harness/runner.ts @@ -21,7 +21,8 @@ import * as os from 'node:os'; import * as path from 'node:path'; import { execFileSync } from 'node:child_process'; -import type { Scenario, TidyOutput, ScenarioResult, HarnessReport } from './types'; +import { buildMigrationPlan, parseMarkdownSections, type TriggerMap } from '../packages/adf/src'; +import type { Scenario, TidyOutput, ScenarioResult, HarnessReport, StaticSessionAudit, StaticItemRoute } from './types'; import { evaluateSession, printSessionResult } from './evaluator'; import { generateScenarios, getArchetypeManifest } from './ollama'; import { REAL_REPOS } from './corpus/real-repos'; @@ -31,6 +32,7 @@ import { workerScenarios } from './corpus/worker'; import { backendScenarios } from './corpus/backend'; import { fullstackScenarios } from './corpus/fullstack'; import { edgeCaseScenarios } from './corpus/edge-cases'; +import { sdlcScenarios } from './corpus/sdlc'; // ============================================================================ // Config @@ -44,6 +46,7 @@ const ALL_STATIC: Scenario[] = [ ...backendScenarios, ...fullstackScenarios, ...edgeCaseScenarios, + ...sdlcScenarios, ]; const OLLAMA_ARCHETYPES = ['worker', 'backend', 'fullstack']; @@ -158,7 +161,12 @@ function runTidy(repoDir: string, dryRun = true): TidyOutput { function runStaticScenario(scenario: Scenario): ScenarioResult { const tmp = makeTempRepo(scenario); const sessionResults = []; + const sessionAudits: StaticSessionAudit[] = []; + const snapshots: AdfSnapshot[] = []; + let prevSnapshot: AdfSnapshot | undefined; let scenarioPass = true; + const baseClaude = THIN_POINTER.trim(); + const aiDir = path.join(tmp, '.ai'); for (const session of scenario.sessions) { // Each session: inject onto thin pointer, dry-run to evaluate, then apply @@ -173,7 +181,43 @@ function runStaticScenario(scenario: Scenario): ScenarioResult { // Apply tidy (non-dry-run) to route content into ADF modules, restoring // CLAUDE.md to thin pointer so the next session sees a clean baseline. - runTidy(tmp, false); + const applyOutput = runTidy(tmp, false); + + const postClaude = fs.readFileSync(path.join(tmp, 'CLAUDE.md'), 'utf-8').trim(); + const claudeRestored = postClaude === baseClaude; + if (!claudeRestored) { + scenarioPass = false; + console.log(' portability warning: CLAUDE.md was not restored to thin pointer state'); + } + + const snapshot = inspectAdfModules(aiDir, session.label, prevSnapshot); + snapshots.push(snapshot); + prevSnapshot = snapshot; + const itemRoutes = previewItemRoutes(session.inject, scenario); + + sessionAudits.push({ + sessionLabel: session.label, + dryRunExtracted: tidyOutput.totalExtracted, + appliedModulesModified: applyOutput.modulesModified, + claudeRestored, + adfTotalItems: snapshot.totalItemsAcrossAllModules, + modulesGrew: snapshot.grew, + itemRoutes, + }); + + if (!sessionResult.pass) { + console.log(' item routing preview:'); + for (const item of itemRoutes) { + const matches = item.matchedTriggers.length > 0 ? ` | matches=${item.matchedTriggers.join(', ')} score=${item.matchScore}` : ''; + console.log(` [${item.heading || 'preamble'} -> ${item.headingModule}] ${item.targetModule} (${item.targetSection}) :: ${item.content}${matches}`); + } + } + } + + const accumulationIssues = detectAccumulationIssues(snapshots); + if (accumulationIssues.length > 0) { + console.log(' accumulation warnings:'); + for (const issue of accumulationIssues) console.log(` - ${issue}`); } return { @@ -181,10 +225,79 @@ function runStaticScenario(scenario: Scenario): ScenarioResult { archetype: scenario.archetype, description: scenario.description, sessions: sessionResults, + staticAudit: { + sessions: sessionAudits, + accumulationIssues, + }, pass: scenarioPass, }; } +function previewItemRoutes(inject: string, scenario: Scenario): StaticItemRoute[] { + const triggerMap: TriggerMap = {}; + for (const entry of scenario.manifest.onDemand) { + if (entry.triggers.length > 0) { + triggerMap[entry.path] = entry.triggers.map(trigger => trigger.toLowerCase()); + } + } + + const sections = parseMarkdownSections(inject); + const plan = buildMigrationPlan(sections, undefined, triggerMap); + + return plan.items.map(item => ({ + heading: item.sourceHeading, + content: item.element.content, + headingModule: previewHeadingModule(item.sourceHeading), + targetModule: item.classification.targetModule, + targetSection: item.classification.targetSection, + decision: item.classification.decision, + reason: item.classification.reason, + ...scoreItemAgainstTriggers(item.element.content, triggerMap), + })); +} + +function previewHeadingModule(heading: string): string { + const lower = heading.toLowerCase(); + if (/\b(design.system|ui|frontend|css|component|react|vue|svelte|next|nextjs|tailwind|shadcn|radix|storybook|vite|vitest|playwright|remix|nuxt|astro)\b/.test(lower)) { + return 'frontend.adf'; + } + if (/\b(qa|quality|test|testing|verification|validate|validation|contract|smoke|evidence|audit)\b/.test(lower)) { + return 'qa.adf'; + } + if (/\b(auth|authentication|authorization|security|secret|token|permission|cors|rate.limit|jwt|oauth|clerk|nextauth|lucia|session|cookie|csrf|xss|password|bcrypt)\b/.test(lower)) { + return 'security.adf'; + } + if (/\b(deploy|deployment|infrastructure|infra|ci|cd|pipeline|config|configuration|environment|env|docker|wrangler|cloudflare|vercel|netlify|railway|fly|render|github.actions|kv|d1|r2|queue|durable.object)\b/.test(lower)) { + return 'infra.adf'; + } + if (/\b(api|backend|server|database|db|endpoint|query|migration|handler|prisma|drizzle|mongoose|postgres|postgresql|mysql|sqlite|express|fastify|hono|trpc|zod|graphql)\b/.test(lower)) { + return 'backend.adf'; + } + return 'core.adf'; +} + +function scoreItemAgainstTriggers(text: string, triggerMap: TriggerMap): Pick { + const lower = text.toLowerCase(); + let matchedTriggers: string[] = []; + let matchScore = 0; + + for (const triggers of Object.values(triggerMap)) { + const currentMatches = triggers.filter(trigger => + new RegExp(`\\b${escapeRegex(trigger)}(?:s|ed|ing|ment|tion|ity|ication)?\\b`, 'i').test(lower), + ); + if (currentMatches.length > matchScore) { + matchedTriggers = currentMatches; + matchScore = currentMatches.length; + } + } + + return { matchedTriggers, matchScore }; +} + +function escapeRegex(str: string): string { + return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + // ============================================================================ // Ollama Scenario Runner (exploratory — no expected routing) // ============================================================================ diff --git a/harness/types.ts b/harness/types.ts index f46c1ad..3eb2cdc 100644 --- a/harness/types.ts +++ b/harness/types.ts @@ -87,9 +87,37 @@ export interface ScenarioResult { archetype: string; description: string; sessions: SessionResult[]; + staticAudit?: StaticScenarioAudit; pass: boolean; } +export interface StaticSessionAudit { + sessionLabel: string; + dryRunExtracted: number; + appliedModulesModified: string[]; + claudeRestored: boolean; + adfTotalItems: number; + modulesGrew: string[]; + itemRoutes: StaticItemRoute[]; +} + +export interface StaticScenarioAudit { + sessions: StaticSessionAudit[]; + accumulationIssues: string[]; +} + +export interface StaticItemRoute { + heading: string; + content: string; + headingModule: string; + targetModule: string; + targetSection: string; + decision: 'STAY' | 'MIGRATE'; + reason: string; + matchedTriggers: string[]; + matchScore: number; +} + // ============================================================================ // Run Report // ============================================================================ diff --git a/packages/adf/src/__tests__/content-classifier.test.ts b/packages/adf/src/__tests__/content-classifier.test.ts index 5946173..164b122 100644 --- a/packages/adf/src/__tests__/content-classifier.test.ts +++ b/packages/adf/src/__tests__/content-classifier.test.ts @@ -28,6 +28,11 @@ describe('classifyElement', () => { expect(result.targetModule).toBe('backend.adf'); }); + it('routes verification headings to qa.adf', () => { + const result = classifyElement(rule('Run contract tests before release'), 'Verification'); + expect(result.targetModule).toBe('qa.adf'); + }); + it('routes to core.adf for generic headings', () => { const result = classifyElement(rule('Use conventional commits'), 'Conventions'); expect(result.targetModule).toBe('core.adf'); @@ -55,6 +60,19 @@ describe('classifyElement', () => { expect(result.targetModule).toBe('frontend.adf'); }); + it('chooses the module with the strongest trigger match instead of first match', () => { + const qaTriggerMap: TriggerMap = { + 'infra.adf': ['ci', 'pipeline', 'artifact'], + 'qa.adf': ['test', 'playwright', 'evidence', 'auditability'], + }; + const result = classifyElement( + rule('Playwright test evidence is uploaded from the CI pipeline for auditability'), + 'Checklist', + qaTriggerMap, + ); + expect(result.targetModule).toBe('qa.adf'); + }); + it('stays on core.adf when no trigger keyword matches', () => { const result = classifyElement(rule('Use conventional commits'), 'Conventions', triggerMap); expect(result.targetModule).toBe('core.adf'); diff --git a/packages/adf/src/content-classifier.ts b/packages/adf/src/content-classifier.ts index 423f44e..0500eac 100644 --- a/packages/adf/src/content-classifier.ts +++ b/packages/adf/src/content-classifier.ts @@ -95,6 +95,9 @@ function headingToModule(heading: string, routes?: ClassifierConfig['headingRout if (/\b(design.system|ui|frontend|css|component|react|vue|svelte|next|nextjs|tailwind|shadcn|radix|storybook|vite|vitest|playwright|remix|nuxt|astro)\b/.test(lower)) { return 'frontend.adf'; } + if (/\b(qa|quality|test|testing|verification|validate|validation|contract|smoke|evidence|audit)\b/.test(lower)) { + return 'qa.adf'; + } if (/\b(auth|authentication|authorization|security|secret|token|permission|cors|rate.limit|jwt|oauth|clerk|nextauth|lucia|session|cookie|csrf|xss|password|bcrypt)\b/.test(lower)) { return 'security.adf'; } @@ -117,17 +120,31 @@ function escapeRegex(str: string): string { */ function contentToModule(text: string, triggerMap: TriggerMap): string { const lower = text.toLowerCase(); + let bestModule = 'core.adf'; + let bestScore = 0; + let bestSpecificity = 0; + for (const [module, triggers] of Object.entries(triggerMap)) { + let score = 0; + let specificity = 0; + for (const trigger of triggers) { // Match whole words or common suffixes (s, ed, ing, ment, tion, ity, ication). // This allows "token" → "tokens", "deploy" → "deploying"/"deployment", // "auth" → "authentication" — while blocking "author", "apiary", "authority". if (new RegExp(`\\b${escapeRegex(trigger)}(?:s|ed|ing|ment|tion|ity|ication)?\\b`, 'i').test(lower)) { - return module; + score++; + specificity = Math.max(specificity, trigger.length); } } + + if (score > bestScore || (score === bestScore && specificity > bestSpecificity)) { + bestModule = module; + bestScore = score; + bestSpecificity = specificity; + } } - return 'core.adf'; + return bestModule; } // ============================================================================ diff --git a/packages/adf/src/patcher.ts b/packages/adf/src/patcher.ts index 66a1396..5245b38 100644 --- a/packages/adf/src/patcher.ts +++ b/packages/adf/src/patcher.ts @@ -37,8 +37,16 @@ const handlers: Record Ad }; function applyOne(doc: AdfDocument, op: PatchOperation): AdfDocument { + const handler = handlers[op.op]; + if (!handler) { + const valid = Object.keys(handlers).join(', '); + throw new AdfPatchError( + `Unknown patch op: '${op.op}'. Valid ops: ${valid}`, + String(op.op) + ); + } // eslint-disable-next-line @typescript-eslint/no-explicit-any - return (handlers[op.op] as any)(doc, op); + return (handler as any)(doc, op); } // ============================================================================ diff --git a/packages/cli/src/commands/adf-populate.ts b/packages/cli/src/commands/adf-populate.ts new file mode 100644 index 0000000..960f2ec --- /dev/null +++ b/packages/cli/src/commands/adf-populate.ts @@ -0,0 +1,390 @@ +/** + * charter adf populate + * + * Auto-fills ADF context files from codebase signals: + * package.json, README.md, and stack detection. + * + * Replaces scaffold placeholder content with project-specific context. + * Skips sections that have already been customized (unless --force). + */ + +import * as fs from 'node:fs'; +import * as path from 'node:path'; +import { parseAdf, formatAdf, applyPatches } from '@stackbilt/adf'; +import type { AdfSection, PatchOperation } from '@stackbilt/adf'; +import type { CLIOptions } from '../index'; +import { CLIError, EXIT_CODE } from '../index'; +import { getFlag } from '../flags'; +import { + loadPackageContexts, + detectStack, + inferProjectName, + type PackageContext, + type DetectionResult, +} from './setup'; + +// ============================================================================ +// Scaffold markers — used to detect un-authored placeholder content +// ============================================================================ + +const SCAFFOLD_MARKERS = [ + 'Frontend module scaffold', + 'Backend module scaffold', + 'Module scaffold', + 'Add framework-specific constraints', + 'Add service/API/database constraints', + 'Add project-specific rules', + "run 'charter adf populate'", + 'Project context (run', + 'Repository initialized with ADF context system', + 'Configure on-demand modules for your stack', +]; + +function hasScaffoldContent(section: AdfSection): boolean { + const text = sectionText(section); + return SCAFFOLD_MARKERS.some(m => text.includes(m)); +} + +function sectionText(section: AdfSection): string { + switch (section.content.type) { + case 'list': return section.content.items.join('\n'); + case 'text': return section.content.value; + case 'map': return section.content.entries.map(e => `${e.key}: ${e.value}`).join('\n'); + default: return ''; + } +} + +// ============================================================================ +// Command Entry +// ============================================================================ + +export async function adfPopulateCommand(options: CLIOptions, args: string[]): Promise { + const dryRun = args.includes('--dry-run'); + const force = options.yes || args.includes('--force'); + const aiDir = getFlag(args, '--ai-dir') || '.ai'; + const manifestPath = path.join(aiDir, 'manifest.adf'); + + if (!fs.existsSync(manifestPath)) { + throw new CLIError(`manifest.adf not found at ${manifestPath}. Run 'charter adf init' first.`); + } + + // Gather codebase signals + const contexts = loadPackageContexts(); + const detection = detectStack(contexts); + const projectName = inferProjectName(contexts) || path.basename(process.cwd()); + const rootPkg = readRootPackageJson(); + const description = rootPkg?.description; + const readmeSummary = readReadmeSummary(); + + const results: Array<{ file: string; ops: number; status: 'populated' | 'skipped' | 'missing' }> = []; + + const fileTasks: Array<{ file: string; build: () => PatchOperation[] | null }> = [ + { + file: path.join(aiDir, 'core.adf'), + build: () => buildCoreOps(aiDir, projectName, description, readmeSummary, detection, contexts, force), + }, + { + file: path.join(aiDir, 'state.adf'), + build: () => buildStateOps(aiDir, detection, force), + }, + { + file: path.join(aiDir, 'backend.adf'), + build: () => buildBackendOps(aiDir, detection, force), + }, + { + file: path.join(aiDir, 'frontend.adf'), + build: () => buildFrontendOps(aiDir, detection, force), + }, + ]; + + for (const task of fileTasks) { + if (!fs.existsSync(task.file)) { + results.push({ file: task.file, ops: 0, status: 'missing' }); + continue; + } + + const ops = task.build(); + if (!ops || ops.length === 0) { + results.push({ file: task.file, ops: 0, status: 'skipped' }); + continue; + } + + if (!dryRun) { + const input = fs.readFileSync(task.file, 'utf-8'); + const doc = parseAdf(input); + const patched = applyPatches(doc, ops); + fs.writeFileSync(task.file, formatAdf(patched)); + } + + results.push({ file: task.file, ops: ops.length, status: 'populated' }); + } + + if (options.format === 'json') { + console.log(JSON.stringify({ + dryRun, + projectName, + detection: { + preset: detection.suggestedPreset, + confidence: detection.confidence, + runtime: detection.runtime, + frameworks: detection.frameworks, + }, + results, + }, null, 2)); + } else { + const prefix = dryRun ? '[dry-run] ' : ''; + console.log(` ${prefix}ADF context populated from codebase signals:`); + console.log(` Project: ${projectName}${description ? ' — ' + description : ''}`); + console.log(` Stack: ${detection.suggestedPreset} (${detection.confidence} confidence)`); + if (detection.frameworks.length > 0) { + console.log(` Frameworks: ${detection.frameworks.join(', ')}`); + } + console.log(''); + for (const r of results) { + if (r.status === 'missing') continue; + const icon = r.status === 'populated' ? '[ok]' : '[skip]'; + const detail = r.status === 'populated' + ? `${r.ops} op${r.ops === 1 ? '' : 's'} applied` + : 'already customized — use --force to overwrite'; + console.log(` ${icon} ${r.file} (${detail})`); + } + if (dryRun) { + console.log(''); + console.log(' Run without --dry-run to apply.'); + } + } + + return EXIT_CODE.SUCCESS; +} + +// ============================================================================ +// core.adf ops +// ============================================================================ + +function buildCoreOps( + aiDir: string, + projectName: string, + description: string | undefined, + readmeSummary: string | undefined, + detection: DetectionResult, + contexts: PackageContext[], + force: boolean +): PatchOperation[] | null { + const filePath = path.join(aiDir, 'core.adf'); + const input = fs.readFileSync(filePath, 'utf-8'); + const doc = parseAdf(input); + const ops: PatchOperation[] = []; + + // Build CONTEXT items from signals + const contextItems: string[] = [ + `project: ${projectName}${description ? ' — ' + description : ''}`, + ]; + if (readmeSummary) contextItems.push(readmeSummary); + if (detection.runtime.length > 0) contextItems.push(`runtime: ${detection.runtime.join(', ')}`); + if (detection.frameworks.length > 0) contextItems.push(`stack: ${detection.frameworks.join(', ')}`); + if (detection.monorepo) contextItems.push('monorepo: true'); + + const contextSection = doc.sections.find(s => s.key === 'CONTEXT'); + if (!contextSection) { + ops.push({ + op: 'ADD_SECTION', + key: 'CONTEXT', + decoration: '\u{1F4CB}', + content: { type: 'list', items: contextItems }, + }); + } else if (force || hasScaffoldContent(contextSection)) { + ops.push({ + op: 'REPLACE_SECTION', + key: 'CONTEXT', + content: { type: 'list', items: contextItems }, + }); + } + + // Add stack-specific constraints (additive, never overwrite existing) + const constraintsSection = doc.sections.find(s => s.key === 'CONSTRAINTS'); + if (constraintsSection && constraintsSection.content.type === 'list') { + const existingItems = constraintsSection.content.items; + + const addConstraint = (value: string, matchFn: (item: string) => boolean) => { + if (!existingItems.some(matchFn)) { + ops.push({ op: 'ADD_BULLET', section: 'CONSTRAINTS', value }); + } + }; + + // ESM: detect type: "module" in any package.json + const isEsm = contexts.some(ctx => { + try { + const pkg = JSON.parse(fs.readFileSync(ctx.source, 'utf-8')); + return pkg.type === 'module'; + } catch { return false; } + }); + if (isEsm) { + addConstraint( + 'Use .js extensions for all ESM imports (never .ts in import paths)', + item => item.includes('.js extensions') || (item.includes('ESM') && item.includes('import')) + ); + } + + if (detection.signals.hasCloudflare) { + addConstraint( + 'No Node.js-specific APIs in Worker handlers; use CF-native APIs (fetch, KV, D1, R2)', + item => item.includes('Worker handler') || (item.includes('Node') && item.includes('CF')) + ); + } + + if (detection.signals.hasPnpm && detection.monorepo) { + addConstraint( + 'Internal packages use pnpm workspace:^ protocol, never relative paths', + item => item.includes('workspace') || (item.includes('pnpm') && item.includes('package')) + ); + } + } + + return ops.length > 0 ? ops : null; +} + +// ============================================================================ +// state.adf ops +// ============================================================================ + +function buildStateOps( + aiDir: string, + detection: DetectionResult, + force: boolean +): PatchOperation[] | null { + const filePath = path.join(aiDir, 'state.adf'); + const input = fs.readFileSync(filePath, 'utf-8'); + const doc = parseAdf(input); + + const stateSection = doc.sections.find(s => s.key === 'STATE'); + if (!stateSection) return null; + if (!force && !hasScaffoldContent(stateSection)) return null; + + const stackSummary = [ + ...detection.runtime, + ...detection.frameworks, + ].join(', ') || detection.suggestedPreset; + + return [{ + op: 'REPLACE_SECTION', + key: 'STATE', + content: { + type: 'map', + entries: [ + { key: 'CURRENT', value: `Charter initialized — ${stackSummary} project` }, + { key: 'NEXT', value: 'Author project-specific constraints in core.adf' }, + ], + }, + }]; +} + +// ============================================================================ +// backend.adf ops +// ============================================================================ + +function buildBackendOps( + aiDir: string, + detection: DetectionResult, + force: boolean +): PatchOperation[] | null { + const filePath = path.join(aiDir, 'backend.adf'); + if (!fs.existsSync(filePath)) return null; + + const input = fs.readFileSync(filePath, 'utf-8'); + const doc = parseAdf(input); + + const contextSection = doc.sections.find(s => s.key === 'CONTEXT'); + if (contextSection && !force && !hasScaffoldContent(contextSection)) return null; + + const items: string[] = []; + if (detection.signals.hasWorker || detection.signals.hasCloudflare) { + items.push('Cloudflare Workers edge runtime (wrangler deploy)'); + } + if (detection.signals.hasHono) { + items.push('Hono for route composition — typed, lightweight, edge-compatible'); + } + if (!detection.signals.hasWorker && detection.signals.hasBackend) { + items.push('Node.js backend service with typed request boundaries'); + } + if (items.length === 0) { + items.push('Backend module — add service/API/database constraints and rules'); + } + + const op: PatchOperation = contextSection + ? { op: 'REPLACE_SECTION', key: 'CONTEXT', content: { type: 'list', items } } + : { op: 'ADD_SECTION', key: 'CONTEXT', decoration: '\u{1F4CB}', content: { type: 'list', items } }; + + return [op]; +} + +// ============================================================================ +// frontend.adf ops +// ============================================================================ + +function buildFrontendOps( + aiDir: string, + detection: DetectionResult, + force: boolean +): PatchOperation[] | null { + const filePath = path.join(aiDir, 'frontend.adf'); + if (!fs.existsSync(filePath)) return null; + + const input = fs.readFileSync(filePath, 'utf-8'); + const doc = parseAdf(input); + + const contextSection = doc.sections.find(s => s.key === 'CONTEXT'); + if (contextSection && !force && !hasScaffoldContent(contextSection)) return null; + + const items: string[] = []; + if (detection.signals.hasReact) items.push('React component model (hooks-based, no class components)'); + if (detection.signals.hasVite) items.push('Vite for build tooling and dev server'); + if (items.length === 0) { + items.push('Frontend module — add framework-specific constraints and rules'); + } + + const op: PatchOperation = contextSection + ? { op: 'REPLACE_SECTION', key: 'CONTEXT', content: { type: 'list', items } } + : { op: 'ADD_SECTION', key: 'CONTEXT', decoration: '\u{1F4CB}', content: { type: 'list', items } }; + + return [op]; +} + +// ============================================================================ +// Helpers +// ============================================================================ + +function readRootPackageJson(): { name?: string; description?: string; type?: string } | null { + try { + return JSON.parse(fs.readFileSync(path.resolve('package.json'), 'utf-8')); + } catch { + return null; + } +} + +function readReadmeSummary(): string | undefined { + for (const name of ['README.md', 'readme.md', 'Readme.md']) { + try { + const content = fs.readFileSync(path.resolve(name), 'utf-8'); + const lines = content.split('\n'); + let inParagraph = false; + const paragraphLines: string[] = []; + + for (const line of lines) { + if (line.startsWith('#')) continue; + if (line.trim() === '') { + if (inParagraph) break; + continue; + } + inParagraph = true; + paragraphLines.push(line.trim()); + if (paragraphLines.length >= 2) break; + } + + if (paragraphLines.length > 0) { + const summary = paragraphLines.join(' '); + return summary.length > 120 ? summary.slice(0, 117) + '...' : summary; + } + } catch { /* file not found */ } + } + return undefined; +} diff --git a/packages/cli/src/commands/adf.ts b/packages/cli/src/commands/adf.ts index c9dad52..ed398df 100644 --- a/packages/cli/src/commands/adf.ts +++ b/packages/cli/src/commands/adf.ts @@ -22,6 +22,7 @@ import { adfSync } from './adf-sync'; import { adfEvidence } from './adf-evidence'; import { adfMetricsCommand } from './adf-metrics'; import { adfTidyCommand } from './adf-tidy'; +import { adfPopulateCommand } from './adf-populate'; // ============================================================================ // Scaffold Content @@ -45,6 +46,9 @@ export const MANIFEST_SCAFFOLD = `ADF: 0.1 export const CORE_SCAFFOLD = `ADF: 0.1 +\u{1F4CB} CONTEXT: + - Project context (run 'charter adf populate' to auto-fill from codebase) + \u{1F4D6} GUIDE [advisory]: - Pure runtime/environment? (OS, line endings) \u2192 CLAUDE.md, not ADF - Universal architecture constraint? \u2192 core.adf CONSTRAINTS [load-bearing] @@ -160,8 +164,10 @@ export async function adfCommand(options: CLIOptions, args: string[]): Promise"', ], @@ -319,8 +325,8 @@ function adfInit(options: CLIOptions, args: string[]): number { } console.log(''); console.log(' Next steps:'); - console.log(' 1. Edit core.adf with your universal repo rules'); - console.log(' 2. Edit frontend.adf/backend.adf stubs or replace with domain modules'); + console.log(' 1. Run: charter adf populate # auto-fill ADF files from codebase signals'); + console.log(' 2. Edit core.adf to add project-specific constraints and rules'); console.log(' 3. Run: charter adf fmt .ai/core.adf --check'); console.log(' 4. Run: charter adf bundle --task "" to compile context for an agent session'); console.log(' (The verify:adf script runs this automatically in CI)'); @@ -480,7 +486,19 @@ function adfCreate(options: CLIOptions, args: string[]): number { const modulePath = moduleArg.endsWith('.adf') ? moduleArg : `${moduleArg}.adf`; const moduleRelPath = modulePath.replace(/\\/g, '/'); + + // Prevent directory traversal: reject paths that escape the .ai/ directory + if (moduleRelPath.includes('..') || path.isAbsolute(moduleRelPath)) { + throw new CLIError(`Invalid module path: "${moduleRelPath}". Path must not contain ".." or be absolute.`); + } + const moduleAbsPath = path.join(aiDir, moduleRelPath); + const resolvedAiDir = path.resolve(aiDir); + const resolvedModulePath = path.resolve(moduleAbsPath); + if (!resolvedModulePath.startsWith(resolvedAiDir + path.sep)) { + throw new CLIError(`Invalid module path: "${moduleRelPath}". Path must stay within ${aiDir}/.`); + } + fs.mkdirSync(path.dirname(moduleAbsPath), { recursive: true }); let fileCreated = false; @@ -573,7 +591,17 @@ function printHelp(): void { console.log(''); console.log(' charter adf patch --ops | --ops-file '); console.log(' Apply ADF_PATCH operations to a file.'); + console.log(' Valid ops: ADD_BULLET, REPLACE_BULLET, REMOVE_BULLET,'); + console.log(' ADD_SECTION, REPLACE_SECTION, REMOVE_SECTION, UPDATE_METRIC'); + console.log(' Examples:'); + console.log(' ADD_BULLET: {"op":"ADD_BULLET","section":"CONSTRAINTS","value":"..."}'); + console.log(' ADD_SECTION: {"op":"ADD_SECTION","key":"CONTEXT","decoration":"📋","content":{"type":"list","items":["..."]}}'); + console.log(' REPLACE_SECTION: {"op":"REPLACE_SECTION","key":"STATE","content":{"type":"map","entries":[{"key":"CURRENT","value":"..."}]}}'); console.log(''); + console.log(' charter adf populate [--ai-dir ] [--dry-run] [--force]'); + console.log(' Auto-fill ADF files from codebase signals (package.json, README, stack detection).'); + console.log(' Populates CONTEXT in core/backend/frontend.adf and STATE in state.adf.'); + console.log(' Skips files with existing custom content unless --force.'); console.log(' charter adf create [--ai-dir ] [--triggers "a,b,c"] [--load default|on-demand] [--force]'); console.log(' Create a module file and register it in manifest DEFAULT_LOAD or ON_DEMAND.'); console.log(' --triggers: comma-separated trigger keywords (for ON_DEMAND entries).'); diff --git a/packages/cli/src/commands/bootstrap.ts b/packages/cli/src/commands/bootstrap.ts index e4bbb16..6691932 100644 --- a/packages/cli/src/commands/bootstrap.ts +++ b/packages/cli/src/commands/bootstrap.ts @@ -237,9 +237,9 @@ export async function bootstrapCommand(options: CLIOptions, args: string[]): Pro reason: 'Customize blessed stack patterns', }); result.nextSteps.push({ - cmd: 'Add project-specific rules to .ai/core.adf', + cmd: 'charter adf populate # auto-fill ADF files from codebase signals', required: false, - reason: 'Add project-specific ADF rules', + reason: 'Populate ADF context from package.json, README, and stack detection', }); result.nextSteps.push({ cmd: 'git add .charter .ai CLAUDE.md .cursorrules agents.md && git commit -m "chore: bootstrap charter governance"', diff --git a/packages/cli/src/git-helpers.ts b/packages/cli/src/git-helpers.ts index 54b059d..455eed9 100644 --- a/packages/cli/src/git-helpers.ts +++ b/packages/cli/src/git-helpers.ts @@ -1,9 +1,9 @@ /** * Shared git invocation helpers. * - * Centralizes all child-process git calls behind a single `runGit()` that - * uses `shell: true` for cross-platform PATH resolution (fixes WSL, CMD, - * PowerShell parity — see ADX-005 F2). + * Centralizes all child-process git calls behind a single `runGit()`. + * All args are hardcoded call-site strings, never user input — but we + * still avoid `shell: true` to eliminate any shell-injection surface. */ import { execFileSync } from 'node:child_process'; @@ -16,17 +16,16 @@ import type { GitCommit } from '@stackbilt/types'; /** * Run a git command and return its stdout. * - * Uses `shell: true` so that the OS shell resolves the `git` binary via - * PATH. This is the key cross-platform fix: `execFileSync` *without* a - * shell can fail on WSL/Windows when git lives in a PATH entry the Node - * process doesn't see directly. + * Does NOT use `shell: true` — Node resolves `git` via PATH directly, which + * works on WSL, Linux, macOS, and Windows (Git for Windows adds git to PATH + * at install time). Using shell: true is unnecessary here and would allow + * shell metacharacters in args to be interpreted as shell syntax. */ export function runGit(args: string[]): string { return execFileSync('git', args, { encoding: 'utf-8', stdio: ['ignore', 'pipe', 'pipe'], maxBuffer: 10 * 1024 * 1024, - shell: true, }); }