diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b73ab97b..95da17ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,7 +46,19 @@ jobs: - name: Build run: pnpm build + - name: Test with coverage + if: matrix.node-version == 22 + run: pnpm test:coverage + - name: Test + if: matrix.node-version != 22 run: pnpm test + - name: Upload coverage + if: matrix.node-version == 22 + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: coverage/ + # dist/ is not tracked in git — built fresh during CI and publish workflows. diff --git a/.gitignore b/.gitignore index 2e986584..e405ce38 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ Thumbs.db # Test output directories docs/test-decider/ docs/test-progressive/ +coverage/ # Generated CLAUDE.md layers .claude-layers/ diff --git a/delivery-process.config.ts b/delivery-process.config.ts index 60259b84..bea50465 100644 --- a/delivery-process.config.ts +++ b/delivery-process.config.ts @@ -26,6 +26,22 @@ const indexNavigationPreamble = loadPreambleFromMarkdown( 'docs-sources/index-navigation.md' ); +const processGuardPreamble = loadPreambleFromMarkdown( + 'docs-sources/process-guard.md' +); + +const configurationGuidePreamble = loadPreambleFromMarkdown( + 'docs-sources/configuration-guide.md' +); + +const validationToolsGuidePreamble = loadPreambleFromMarkdown( + 'docs-sources/validation-tools-guide.md' +); + +const gherkinPatternsPreamble = loadPreambleFromMarkdown( + 'docs-sources/gherkin-patterns.md' +); + // DD-2: Document entries configured statically, not via filesystem discovery. // All paths are relative to docs-live/ (where INDEX.md is generated). const INDEX_DOCUMENT_ENTRIES: readonly DocumentEntry[] = [ @@ -46,6 +62,9 @@ const INDEX_DOCUMENT_ENTRIES: readonly DocumentEntry[] = [ { title: 'Process Guard Reference', path: 'reference/PROCESS-GUARD-REFERENCE.md', description: 'Pre-commit hooks, error codes, programmatic API', audience: 'Team Leads', topic: 'Reference Guides' }, { title: 'Architecture Codecs', path: 'reference/ARCHITECTURE-CODECS.md', description: 'All codecs with factory patterns and options', audience: 'Developers', topic: 'Reference Guides' }, { title: 'Architecture Types', path: 'reference/ARCHITECTURE-TYPES.md', description: 'MasterDataset interface and type shapes', audience: 'Developers', topic: 'Reference Guides' }, + { title: 'Configuration Guide', path: 'reference/CONFIGURATION-GUIDE.md', description: 'Presets, config files, sources, output, and monorepo setup', audience: 'Users', topic: 'Reference Guides' }, + { title: 'Validation Tools Guide', path: 'reference/VALIDATION-TOOLS-GUIDE.md', description: 'lint-patterns, lint-steps, lint-process, validate-patterns reference', audience: 'CI/CD', topic: 'Reference Guides' }, + { title: 'Gherkin Authoring Guide', path: 'reference/GHERKIN-AUTHORING-GUIDE.md', description: 'Roadmap specs, Rule blocks, DataTables, tag conventions', audience: 'Developers', topic: 'Reference Guides' }, // --- Product Area Details --- { title: 'Annotation', path: 'product-areas/ANNOTATION.md', description: 'Annotation product area patterns and statistics', audience: 'Developers', topic: 'Product Area Details' }, { title: 'Configuration', path: 'product-areas/CONFIGURATION.md', description: 'Configuration product area patterns and statistics', audience: 'Users', topic: 'Product Area Details' }, @@ -84,121 +103,7 @@ export default defineConfig({ claudeMdSection: 'validation', docsFilename: 'PROCESS-GUARD-REFERENCE.md', claudeMdFilename: 'process-guard.md', - preamble: [ - // --- Pre-commit Setup --- - { - type: 'heading' as const, - level: 2, - text: 'Pre-commit Setup', - }, - { - type: 'paragraph' as const, - text: 'Configure Process Guard as a pre-commit hook using Husky.', - }, - { - type: 'code' as const, - language: 'bash', - content: - '#!/usr/bin/env sh\n. "$(dirname -- "$0")/_/husky.sh"\n\nnpx lint-process --staged', - }, - { - type: 'heading' as const, - level: 3, - text: 'package.json Scripts', - }, - { - type: 'code' as const, - language: 'json', - content: JSON.stringify( - { - scripts: { - 'lint:process': 'lint-process --staged', - 'lint:process:ci': 'lint-process --all --strict', - }, - }, - null, - 2 - ), - }, - // --- Programmatic API --- - { - type: 'heading' as const, - level: 2, - text: 'Programmatic API', - }, - { - type: 'paragraph' as const, - text: 'Use Process Guard programmatically for custom validation workflows.', - }, - { - type: 'code' as const, - language: 'typescript', - content: [ - "import {", - " deriveProcessState,", - " detectStagedChanges,", - " validateChanges,", - " hasErrors,", - " summarizeResult,", - "} from '@libar-dev/delivery-process/lint';", - "", - "// 1. Derive state from annotations", - "const state = (await deriveProcessState({ baseDir: '.' })).value;", - "", - "// 2. Detect changes", - "const changes = detectStagedChanges('.').value;", - "", - "// 3. Validate", - "const { result } = validateChanges({", - " state,", - " changes,", - " options: { strict: false, ignoreSession: false },", - "});", - "", - "// 4. Handle results", - "if (hasErrors(result)) {", - " console.log(summarizeResult(result));", - " process.exit(1);", - "}", - ].join('\n'), - }, - { - type: 'heading' as const, - level: 3, - text: 'API Functions', - }, - { - type: 'table' as const, - columns: ['Category', 'Function', 'Description'], - rows: [ - ['State', 'deriveProcessState(cfg)', 'Build state from file annotations'], - ['Changes', 'detectStagedChanges(dir)', 'Parse staged git diff'], - ['Changes', 'detectBranchChanges(dir)', 'Parse all changes vs main'], - ['Validate', 'validateChanges(input)', 'Run all validation rules'], - ['Results', 'hasErrors(result)', 'Check for blocking errors'], - ['Results', 'summarizeResult(result)', 'Human-readable summary'], - ], - }, - // --- Architecture --- - { - type: 'heading' as const, - level: 2, - text: 'Architecture', - }, - { - type: 'paragraph' as const, - text: 'Process Guard uses the Decider pattern: pure functions with no I/O.', - }, - { - type: 'mermaid' as const, - content: [ - 'graph LR', - ' A[deriveProcessState] --> C[validateChanges]', - ' B[detectChanges] --> C', - ' C --> D[ValidationResult]', - ].join('\n'), - }, - ], + preamble: [...processGuardPreamble], }, { title: 'Available Codecs Reference', @@ -292,6 +197,36 @@ export default defineConfig({ claudeMdFilename: 'annotation-reference.md', preamble: [...annotationGuidePreamble], }, + { + title: 'Configuration Guide', + conventionTags: [], + shapeSources: [], + behaviorCategories: [], + claudeMdSection: 'configuration', + docsFilename: 'CONFIGURATION-GUIDE.md', + claudeMdFilename: 'configuration-guide.md', + preamble: [...configurationGuidePreamble], + }, + { + title: 'Validation Tools Guide', + conventionTags: [], + shapeSources: [], + behaviorCategories: [], + claudeMdSection: 'validation', + docsFilename: 'VALIDATION-TOOLS-GUIDE.md', + claudeMdFilename: 'validation-tools-guide.md', + preamble: [...validationToolsGuidePreamble], + }, + { + title: 'Gherkin Authoring Guide', + conventionTags: [], + shapeSources: [], + behaviorCategories: [], + claudeMdSection: 'authoring', + docsFilename: 'GHERKIN-AUTHORING-GUIDE.md', + claudeMdFilename: 'gherkin-authoring-guide.md', + preamble: [...gherkinPatternsPreamble], + }, ], generatorOverrides: { 'business-rules': { diff --git a/docs-live/ARCHITECTURE.md b/docs-live/ARCHITECTURE.md index bc4e6cb6..94987235 100644 --- a/docs-live/ARCHITECTURE.md +++ b/docs-live/ARCHITECTURE.md @@ -7,13 +7,14 @@ ## Overview -This diagram was auto-generated from 160 annotated source files across 11 bounded contexts. +This diagram shows 59 key components with explicit architectural roles across 10 bounded contexts. -| Metric | Count | -| ---------------- | ----- | -| Total Components | 160 | -| Bounded Contexts | 11 | -| Component Roles | 5 | +| Metric | Count | +| ------------------ | ----- | +| Diagram Components | 59 | +| Bounded Contexts | 10 | +| Component Roles | 5 | +| Total Annotated | 163 | --- @@ -25,14 +26,9 @@ Component architecture with bounded context isolation: graph TB subgraph api["Api BC"] MasterDataset["MasterDataset[read-model]"] - ProcessStateTypes["ProcessStateTypes"] PatternSummarizerImpl["PatternSummarizerImpl[service]"] - StubResolverImpl["StubResolverImpl"] ScopeValidatorImpl["ScopeValidatorImpl[service]"] - RulesQueryModule["RulesQueryModule"] ProcessStateAPI["ProcessStateAPI[service]"] - PatternHelpers["PatternHelpers"] - APIModule["APIModule"] HandoffGeneratorImpl["HandoffGeneratorImpl[service]"] FuzzyMatcherImpl["FuzzyMatcherImpl[service]"] CoverageAnalyzerImpl["CoverageAnalyzerImpl[service]"] @@ -41,104 +37,53 @@ graph TB ArchQueriesImpl["ArchQueriesImpl[service]"] end subgraph cli["Cli BC"] - CLIVersionHelper["CLIVersionHelper"] - ValidatePatternsCLI["ValidatePatternsCLI"] ReplMode["ReplMode[service]"] ProcessAPICLIImpl["ProcessAPICLIImpl[service]"] OutputPipelineImpl["OutputPipelineImpl[service]"] - LintProcessCLI["LintProcessCLI"] - LintPatternsCLI["LintPatternsCLI"] - TagTaxonomyCLI["TagTaxonomyCLI"] - Documentation_Generator_CLI["Documentation Generator CLI"] - CLIErrorHandler["CLIErrorHandler"] DatasetCache["DatasetCache[infrastructure]"] - CLISchema["CLISchema"] end subgraph config["Config BC"] WorkflowLoader["WorkflowLoader[infrastructure]"] - ConfigurationTypes["ConfigurationTypes"] ConfigResolver["ConfigResolver[service]"] RegexBuilders["RegexBuilders[infrastructure]"] - ProjectConfigTypes["ProjectConfigTypes"] ProjectConfigSchema["ProjectConfigSchema[infrastructure]"] - ConfigurationPresets["ConfigurationPresets"] SourceMerger["SourceMerger[service]"] DeliveryProcessFactory["DeliveryProcessFactory[service]"] DefineConfig["DefineConfig[infrastructure]"] - ConfigurationDefaults["ConfigurationDefaults"] ConfigLoader["ConfigLoader[infrastructure]"] end subgraph extractor["Extractor BC"] - ShapeExtractor["ShapeExtractor"] - LayerInference["LayerInference"] GherkinExtractor["GherkinExtractor[service]"] DualSourceExtractor["DualSourceExtractor[service]"] Document_Extractor["Document Extractor[service]"] end subgraph generator["Generator BC"] - WarningCollector["WarningCollector"] - GeneratorTypes["GeneratorTypes"] - SourceMappingValidator["SourceMappingValidator"] SourceMapper["SourceMapper[infrastructure]"] - GeneratorRegistry["GeneratorRegistry"] Documentation_Generation_Orchestrator["Documentation Generation Orchestrator[service]"] ContentDeduplicator["ContentDeduplicator[infrastructure]"] CodecBasedGenerator["CodecBasedGenerator[service]"] FileCache["FileCache[infrastructure]"] TransformDataset["TransformDataset[service]"] SequenceTransformUtils["SequenceTransformUtils[service]"] - MergePatterns["MergePatterns"] - PipelineModule["PipelineModule"] - PipelineFactory["PipelineFactory"] - ReferenceGeneratorRegistration["ReferenceGeneratorRegistration"] - ProcessApiReferenceGenerator["ProcessApiReferenceGenerator"] - BuiltInGenerators["BuiltInGenerators"] + RelationshipResolver["RelationshipResolver[service]"] DesignReviewGenerator["DesignReviewGenerator[service]"] DecisionDocGenerator["DecisionDocGenerator[service]"] - CodecGeneratorRegistration["CodecGeneratorRegistration"] - CliRecipeGenerator["CliRecipeGenerator"] end subgraph lint["Lint BC"] LintRules["LintRules[service]"] - LintModule["LintModule"] LintEngine["LintEngine[service]"] - ProcessGuardTypes["ProcessGuardTypes"] - ProcessGuardModule["ProcessGuardModule"] - DetectChanges["DetectChanges"] - DeriveProcessState["DeriveProcessState"] ProcessGuardDecider["ProcessGuardDecider[decider]"] end subgraph renderer["Renderer BC"] - RenderableUtils["RenderableUtils"] RenderableDocument["RenderableDocument[read-model]"] - SectionBlock["SectionBlock"] UniversalRenderer["UniversalRenderer[service]"] - loadPreambleFromMarkdown___Shared_Markdown_to_SectionBlock_Parser["loadPreambleFromMarkdown — Shared Markdown-to-SectionBlock Parser"] - RenderableDocumentModel_RDM_["RenderableDocumentModel(RDM)"] DocumentGenerator["DocumentGenerator[service]"] - ValidationRulesCodec["ValidationRulesCodec"] - TimelineCodec["TimelineCodec"] - TaxonomyCodec["TaxonomyCodec"] - SharedCodecSchema["SharedCodecSchema"] SessionCodec["SessionCodec[projection]"] - RequirementsCodec["RequirementsCodec"] - ReportingCodecs["ReportingCodecs"] - ReferenceDocumentCodec["ReferenceDocumentCodec"] - PrChangesCodec["PrChangesCodec"] - PlanningCodecs["PlanningCodecs"] PatternsCodec["PatternsCodec[projection]"] - DocumentCodecs["DocumentCodecs"] - IndexCodec["IndexCodec"] - RichContentHelpers["RichContentHelpers"] - MermaidDiagramUtils["MermaidDiagramUtils"] DesignReviewCodec["DesignReviewCodec[projection]"] DecisionDocCodec["DecisionDocCodec[projection]"] CompositeCodec["CompositeCodec[projection]"] - ClaudeModuleCodec["ClaudeModuleCodec"] - BusinessRulesCodec["BusinessRulesCodec"] ArchitectureCodec["ArchitectureCodec[projection]"] - AdrDocumentCodec["AdrDocumentCodec"] - CodecBaseOptions["CodecBaseOptions"] end subgraph scanner["Scanner BC"] Pattern_Scanner["Pattern Scanner[infrastructure]"] @@ -147,183 +92,42 @@ graph TB TypeScript_AST_Parser["TypeScript AST Parser[infrastructure]"] end subgraph taxonomy["Taxonomy BC"] - StatusValues["StatusValues"] - RiskLevels["RiskLevels"] TagRegistryBuilder["TagRegistryBuilder[service]"] - NormalizedStatus["NormalizedStatus"] - LayerTypes["LayerTypes"] - HierarchyLevels["HierarchyLevels"] - FormatTypes["FormatTypes"] - DeliverableStatusTaxonomy["DeliverableStatusTaxonomy"] CategoryDefinitions["CategoryDefinitions[read-model]"] - CategoryDefinition["CategoryDefinition"] - end - subgraph types["Types BC"] - ResultMonadTypes["ResultMonadTypes"] - ErrorFactoryTypes["ErrorFactoryTypes"] end subgraph validation["Validation BC"] - WorkflowConfigSchema["WorkflowConfigSchema"] - Tag_Registry_Configuration["Tag Registry Configuration"] - OutputSchemas["OutputSchemas"] - ExtractedShapeSchema["ExtractedShapeSchema"] - ExtractedPatternSchema["ExtractedPatternSchema"] - DualSourceSchemas["DualSourceSchemas"] - DocDirectiveSchema["DocDirectiveSchema"] - CodecUtils["CodecUtils"] - DoDValidationTypes["DoDValidationTypes"] - ValidationModule["ValidationModule"] DoDValidator["DoDValidator[service]"] AntiPatternDetector["AntiPatternDetector[service]"] FSMValidator["FSMValidator[decider]"] FSMTransitions["FSMTransitions[read-model]"] FSMStates["FSMStates[read-model]"] - FSMModule["FSMModule"] end subgraph shared["Shared Infrastructure"] - WorkflowConfigSchema["WorkflowConfigSchema"] - Tag_Registry_Configuration["Tag Registry Configuration"] - OutputSchemas["OutputSchemas"] - ExtractedShapeSchema["ExtractedShapeSchema"] - ExtractedPatternSchema["ExtractedPatternSchema"] - DualSourceSchemas["DualSourceSchemas"] - DocDirectiveSchema["DocDirectiveSchema"] - CodecUtils["CodecUtils"] - ResultMonadTypes["ResultMonadTypes"] - ErrorFactoryTypes["ErrorFactoryTypes"] - DoDValidationTypes["DoDValidationTypes"] - ValidationModule["ValidationModule"] - StatusValues["StatusValues"] - RiskLevels["RiskLevels"] - NormalizedStatus["NormalizedStatus"] - LayerTypes["LayerTypes"] - HierarchyLevels["HierarchyLevels"] - FormatTypes["FormatTypes"] - DeliverableStatusTaxonomy["DeliverableStatusTaxonomy"] - CategoryDefinition["CategoryDefinition"] - LintModule["LintModule"] - WarningCollector["WarningCollector"] - GeneratorTypes["GeneratorTypes"] - SourceMappingValidator["SourceMappingValidator"] - GeneratorRegistry["GeneratorRegistry"] - RenderableUtils["RenderableUtils"] - SectionBlock["SectionBlock"] - RenderableDocumentModel_RDM_["RenderableDocumentModel(RDM)"] - ShapeExtractor["ShapeExtractor"] - LayerInference["LayerInference"] - ProcessStateTypes["ProcessStateTypes"] - StubResolverImpl["StubResolverImpl"] - RulesQueryModule["RulesQueryModule"] - APIModule["APIModule"] - CLIVersionHelper["CLIVersionHelper"] - ValidatePatternsCLI["ValidatePatternsCLI"] - LintProcessCLI["LintProcessCLI"] - LintPatternsCLI["LintPatternsCLI"] - TagTaxonomyCLI["TagTaxonomyCLI"] - Documentation_Generator_CLI["Documentation Generator CLI"] - CLIErrorHandler["CLIErrorHandler"] Convention_Annotation_Example___DD_3_Decision["Convention Annotation Example — DD-3 Decision[decider]"] - FSMModule["FSMModule"] - ProcessGuardTypes["ProcessGuardTypes"] - ProcessGuardModule["ProcessGuardModule"] - DetectChanges["DetectChanges"] - DeriveProcessState["DeriveProcessState"] - MergePatterns["MergePatterns"] - PipelineModule["PipelineModule"] - PipelineFactory["PipelineFactory"] - ReferenceGeneratorRegistration["ReferenceGeneratorRegistration"] - BuiltInGenerators["BuiltInGenerators"] - CodecGeneratorRegistration["CodecGeneratorRegistration"] - ValidationRulesCodec["ValidationRulesCodec"] - TimelineCodec["TimelineCodec"] - TaxonomyCodec["TaxonomyCodec"] - SharedCodecSchema["SharedCodecSchema"] - RequirementsCodec["RequirementsCodec"] - ReportingCodecs["ReportingCodecs"] - ReferenceDocumentCodec["ReferenceDocumentCodec"] - PrChangesCodec["PrChangesCodec"] - PlanningCodecs["PlanningCodecs"] - DocumentCodecs["DocumentCodecs"] - IndexCodec["IndexCodec"] - RichContentHelpers["RichContentHelpers"] - ClaudeModuleCodec["ClaudeModuleCodec"] - BusinessRulesCodec["BusinessRulesCodec"] - AdrDocumentCodec["AdrDocumentCodec"] - CodecBaseOptions["CodecBaseOptions"] - ADR006SingleReadModelArchitecture["ADR006SingleReadModelArchitecture"] - ADR005CodecBasedMarkdownRendering["ADR005CodecBasedMarkdownRendering"] - ADR003SourceFirstPatternArchitecture["ADR003SourceFirstPatternArchitecture"] - ADR002GherkinOnlyTesting["ADR002GherkinOnlyTesting"] - ADR001TaxonomyCanonicalValues["ADR001TaxonomyCanonicalValues"] - ValidatorReadModelConsolidation["ValidatorReadModelConsolidation"] - StepDefinitionCompletion["StepDefinitionCompletion"] - SessionGuidesModuleSource["SessionGuidesModuleSource"] - SessionFileCleanup["SessionFileCleanup"] - ProcessAPILayeredExtraction["ProcessAPILayeredExtraction"] - OrchestratorPipelineFactoryMigration["OrchestratorPipelineFactoryMigration"] - MvpWorkflowImplementation["MvpWorkflowImplementation"] - LivingRoadmapCLI["LivingRoadmapCLI"] - EffortVarianceTracking["EffortVarianceTracking"] - ConfigBasedWorkflowDefinition["ConfigBasedWorkflowDefinition"] - CliBehaviorTesting["CliBehaviorTesting"] - ProcessGuardTesting["ProcessGuardTesting"] - ResultMonad["ResultMonad"] - ErrorFactories["ErrorFactories"] - StringUtils["StringUtils"] - SessionHandoffs["SessionHandoffs"] - SessionFileLifecycle["SessionFileLifecycle"] - KebabCaseSlugs["KebabCaseSlugs"] - ErrorHandlingUnification["ErrorHandlingUnification"] end - ExtractedPatternSchema --> DocDirectiveSchema - DualSourceSchemas ..-> MvpWorkflowImplementation - DocDirectiveSchema ..-> MvpWorkflowImplementation - ResultMonadTypes ..-> ResultMonad - ErrorFactoryTypes ..-> ErrorFactories - DoDValidator --> DoDValidationTypes DoDValidator --> DualSourceExtractor - AntiPatternDetector --> DoDValidationTypes - CategoryDefinition ..-> CategoryDefinitions - LintModule --> LintRules - LintModule --> LintEngine - LintEngine --> LintRules - LintEngine --> CodecUtils GherkinScanner --> GherkinASTParser - TypeScript_AST_Parser --> DocDirectiveSchema + LintEngine --> LintRules SourceMapper -.-> DecisionDocCodec - SourceMapper -.-> ShapeExtractor SourceMapper -.-> GherkinASTParser - GeneratorRegistry --> GeneratorTypes Documentation_Generation_Orchestrator --> Pattern_Scanner - SectionBlock ..-> RenderableDocument - WorkflowLoader --> WorkflowConfigSchema - WorkflowLoader --> CodecUtils - ConfigResolver --> ProjectConfigTypes - ConfigResolver --> DeliveryProcessFactory - ConfigResolver --> ConfigurationDefaults - RegexBuilders --> ConfigurationTypes - ProjectConfigTypes --> ConfigurationTypes - ProjectConfigTypes --> ConfigurationPresets - ProjectConfigSchema --> ProjectConfigTypes - ConfigurationPresets --> ConfigurationTypes - SourceMerger --> ProjectConfigTypes - DeliveryProcessFactory --> ConfigurationTypes - DeliveryProcessFactory --> ConfigurationPresets - DeliveryProcessFactory --> RegexBuilders - DefineConfig --> ProjectConfigTypes - ConfigLoader --> DeliveryProcessFactory - ConfigLoader --> ConfigurationTypes GherkinExtractor --> GherkinASTParser DualSourceExtractor --> GherkinExtractor DualSourceExtractor --> GherkinScanner Document_Extractor --> Pattern_Scanner + ReplMode --> ProcessStateAPI + ProcessAPICLIImpl --> ProcessStateAPI + ProcessAPICLIImpl --> MasterDataset + ProcessAPICLIImpl --> PatternSummarizerImpl + ProcessAPICLIImpl --> FuzzyMatcherImpl + ProcessAPICLIImpl --> OutputPipelineImpl + OutputPipelineImpl --> PatternSummarizerImpl + ConfigResolver --> DeliveryProcessFactory + DeliveryProcessFactory --> RegexBuilders + ConfigLoader --> DeliveryProcessFactory PatternSummarizerImpl --> ProcessStateAPI - StubResolverImpl --> ProcessStateAPI ScopeValidatorImpl --> ProcessStateAPI ScopeValidatorImpl --> MasterDataset - ScopeValidatorImpl --> StubResolverImpl - RulesQueryModule --> BusinessRulesCodec - RulesQueryModule ..-> ProcessAPILayeredExtraction ProcessStateAPI --> MasterDataset ProcessStateAPI --> FSMValidator HandoffGeneratorImpl --> ProcessStateAPI @@ -336,74 +140,19 @@ graph TB ContextAssemblerImpl --> MasterDataset ContextAssemblerImpl --> PatternSummarizerImpl ContextAssemblerImpl --> FuzzyMatcherImpl - ContextAssemblerImpl --> StubResolverImpl ArchQueriesImpl --> ProcessStateAPI ArchQueriesImpl --> MasterDataset - ValidatePatternsCLI --> GherkinScanner - ValidatePatternsCLI --> GherkinExtractor - ValidatePatternsCLI --> MasterDataset - ValidatePatternsCLI --> CodecUtils - ReplMode --> PipelineFactory - ReplMode --> ProcessStateAPI - ProcessAPICLIImpl --> ProcessStateAPI - ProcessAPICLIImpl --> MasterDataset - ProcessAPICLIImpl --> PipelineFactory - ProcessAPICLIImpl --> RulesQueryModule - ProcessAPICLIImpl --> PatternSummarizerImpl - ProcessAPICLIImpl --> FuzzyMatcherImpl - ProcessAPICLIImpl --> OutputPipelineImpl - OutputPipelineImpl --> PatternSummarizerImpl - LintProcessCLI --> ProcessGuardModule - LintPatternsCLI --> LintEngine - LintPatternsCLI --> LintRules - TagTaxonomyCLI --> ConfigLoader - DatasetCache --> PipelineFactory - DatasetCache --> WorkflowConfigSchema FSMValidator --> FSMTransitions FSMValidator --> FSMStates - DetectChanges --> DeriveProcessState - DeriveProcessState --> GherkinScanner - DeriveProcessState --> FSMValidator + DesignReviewCodec --> MasterDataset + ArchitectureCodec --> MasterDataset ProcessGuardDecider --> FSMValidator - ProcessGuardDecider --> DeriveProcessState - ProcessGuardDecider --> DetectChanges TransformDataset --> MasterDataset SequenceTransformUtils --> MasterDataset - MergePatterns --> PatternHelpers - MergePatterns ..-> OrchestratorPipelineFactoryMigration - PipelineModule --> TransformDataset - PipelineFactory --> GherkinScanner - PipelineFactory --> GherkinExtractor - PipelineFactory --> MasterDataset - PipelineFactory ..-> ProcessAPILayeredExtraction - BuiltInGenerators --> GeneratorRegistry - BuiltInGenerators --> CodecBasedGenerator DesignReviewGenerator --> DesignReviewCodec DesignReviewGenerator --> MasterDataset DecisionDocGenerator -.-> DecisionDocCodec DecisionDocGenerator -.-> SourceMapper - CodecGeneratorRegistration --> DesignReviewGenerator - CodecGeneratorRegistration --> DecisionDocGenerator - CodecGeneratorRegistration --> ProcessApiReferenceGenerator - CodecGeneratorRegistration --> CliRecipeGenerator - DesignReviewCodec --> MasterDataset - DesignReviewCodec --> MermaidDiagramUtils - ArchitectureCodec --> MasterDataset - ADR006SingleReadModelArchitecture -.-> ADR005CodecBasedMarkdownRendering - ADR003SourceFirstPatternArchitecture -.-> ADR001TaxonomyCanonicalValues - ValidatorReadModelConsolidation -.-> ADR006SingleReadModelArchitecture - StepDefinitionCompletion -.-> ADR002GherkinOnlyTesting - SessionFileCleanup -.-> SessionFileLifecycle - ProcessAPILayeredExtraction -.-> ValidatorReadModelConsolidation - OrchestratorPipelineFactoryMigration -.-> ProcessAPILayeredExtraction - LivingRoadmapCLI -.-> MvpWorkflowImplementation - EffortVarianceTracking -.-> MvpWorkflowImplementation - ConfigBasedWorkflowDefinition -.-> MvpWorkflowImplementation - CliBehaviorTesting -.-> ADR002GherkinOnlyTesting - ProcessGuardTesting -.-> AntiPatternDetector - KebabCaseSlugs -.-> StringUtils - ErrorHandlingUnification -.-> ResultMonad - ErrorHandlingUnification -.-> ErrorFactories ``` --- @@ -457,7 +206,13 @@ All components with architecture annotations: | ✅ Dual Source Extractor | extractor | service | application | src/extractor/dual-source-extractor.ts | | ✅ Gherkin Extractor | extractor | service | application | src/extractor/gherkin-extractor.ts | | Cli Recipe Generator | generator | - | application | src/generators/built-in/cli-recipe-generator.ts | +| ✅ Context Inference Impl | generator | - | application | src/generators/pipeline/context-inference.ts | +| 🚧 Git Branch Diff | generator | - | infrastructure | src/git/branch-diff.ts | +| 🚧 Git Helpers | generator | - | infrastructure | src/git/helpers.ts | +| 🚧 Git Module | generator | - | infrastructure | src/git/index.ts | +| 🚧 Git Name Status Parser | generator | - | infrastructure | src/git/name-status.ts | | ✅ Process Api Reference Generator | generator | - | application | src/generators/built-in/process-api-reference-generator.ts | +| 🚧 Transform Types | generator | - | application | src/generators/pipeline/transform-types.ts | | ✅ Content Deduplicator | generator | infrastructure | infrastructure | src/generators/content-deduplicator.ts | | 🚧 File Cache | generator | infrastructure | infrastructure | src/cache/file-cache.ts | | ✅ Source Mapper | generator | infrastructure | infrastructure | src/generators/source-mapper.ts | @@ -465,6 +220,7 @@ All components with architecture annotations: | ✅ Decision Doc Generator | generator | service | application | src/generators/built-in/decision-doc-generator.ts | | 🚧 Design Review Generator | generator | service | application | src/generators/built-in/design-review-generator.ts | | ✅ Documentation Generation Orchestrator | generator | service | application | src/generators/orchestrator.ts | +| 🚧 Relationship Resolver | generator | service | application | src/generators/pipeline/relationship-resolver.ts | | 🚧 Sequence Transform Utils | generator | service | application | src/generators/pipeline/sequence-utils.ts | | ✅ Transform Dataset | generator | service | application | src/generators/pipeline/transform-dataset.ts | | 🚧 Process Guard Decider | lint | decider | application | src/lint/process-guard/decider.ts | @@ -508,22 +264,19 @@ All components with architecture annotations: | ✅ CLI Version Helper | - | - | - | src/cli/version.ts | | ✅ Codec Base Options | - | - | - | src/renderable/codecs/types/base.ts | | ✅ Codec Generator Registration | - | - | - | src/generators/built-in/codec-generators.ts | -| ✅ Codec Utils | - | - | - | src/validation-schemas/codec-utils.ts | | ✅ Config Based Workflow Definition | - | - | - | delivery-process/specs/config-based-workflow-definition.feature | | 🚧 Deliverable Status Taxonomy | - | - | - | src/taxonomy/deliverable-status.ts | +| 🚧 Deliverable Status Taxonomy Testing | - | - | - | tests/features/types/deliverable-status.feature | | 🚧 Derive Process State | - | - | - | src/lint/process-guard/derive-state.ts | | 🚧 Detect Changes | - | - | - | src/lint/process-guard/detect-changes.ts | -| ✅ Doc Directive Schema | - | - | - | src/validation-schemas/doc-directive.ts | | ✅ Documentation Generator CLI | - | - | - | src/cli/generate-docs.ts | | ✅ Document Codecs | - | - | - | src/renderable/codecs/index.ts | | ✅ DoD Validation Types | - | - | - | src/validation/types.ts | -| ✅ Dual Source Schemas | - | - | - | src/validation-schemas/dual-source.ts | | 📋 Effort Variance Tracking | - | - | - | delivery-process/specs/effort-variance-tracking.feature | | ✅ Error Factories | - | - | - | tests/features/types/error-factories.feature | | ✅ Error Factory Types | - | - | - | src/types/errors.ts | | ✅ Error Handling Unification | - | - | - | tests/features/behavior/error-handling.feature | -| ✅ Extracted Pattern Schema | - | - | - | src/validation-schemas/extracted-pattern.ts | -| ✅ Extracted Shape Schema | - | - | - | src/validation-schemas/extracted-shape.ts | +| 🚧 File Cache Testing | - | - | - | tests/features/utils/file-cache.feature | | ✅ Format Types | - | - | - | src/taxonomy/format-types.ts | | 🚧 FSM Module | - | - | - | src/validation/fsm/index.ts | | ✅ Generator Registry | - | - | - | src/generators/registry.ts | @@ -540,8 +293,8 @@ All components with architecture annotations: | ✅ Merge Patterns | - | - | - | src/generators/pipeline/merge-patterns.ts | | ✅ Mvp Workflow Implementation | - | - | - | delivery-process/specs/mvp-workflow-implementation.feature | | ✅ Normalized Status | - | - | - | src/taxonomy/normalized-status.ts | +| 🚧 Normalized Status Testing | - | - | - | tests/features/types/normalized-status.feature | | ✅ Orchestrator Pipeline Factory Migration | - | - | - | delivery-process/specs/orchestrator-pipeline-factory-migration.feature | -| ✅ Output Schemas | - | - | - | src/validation-schemas/output-schemas.ts | | ✅ Pipeline Factory | - | - | - | src/generators/pipeline/build-pipeline.ts | | ✅ Pipeline Module | - | - | - | src/generators/pipeline/index.ts | | ✅ Planning Codecs | - | - | - | src/renderable/codecs/planning.ts | @@ -574,7 +327,7 @@ All components with architecture annotations: | 📋 Step Definition Completion | - | - | - | delivery-process/specs/step-definition-completion.feature | | ✅ String Utils | - | - | - | tests/features/utils/string-utils.feature | | 🚧 Stub Resolver Impl | - | - | - | src/api/stub-resolver.ts | -| ✅ Tag Registry Configuration | - | - | - | src/validation-schemas/tag-registry.ts | +| 🚧 Tag Registry Builder Testing | - | - | - | tests/features/types/tag-registry-builder.feature | | ⏸️ Tag Taxonomy CLI | - | - | - | src/cli/generate-tag-taxonomy.ts | | ✅ Taxonomy Codec | - | - | - | src/renderable/codecs/taxonomy.ts | | ✅ Timeline Codec | - | - | - | src/renderable/codecs/timeline.ts | @@ -583,5 +336,4 @@ All components with architecture annotations: | ✅ Validation Rules Codec | - | - | - | src/renderable/codecs/validation-rules.ts | | ✅ Validator Read Model Consolidation | - | - | - | delivery-process/specs/validator-read-model-consolidation.feature | | ✅ Warning Collector | - | - | - | src/generators/warning-collector.ts | -| ✅ Workflow Config Schema | - | - | - | src/validation-schemas/workflow-config.ts | | 📋 Convention Annotation Example — DD-3 Decision | - | decider | - | delivery-process/stubs/error-guide-codec/convention-annotation-example.ts | diff --git a/docs-live/BUSINESS-RULES.md b/docs-live/BUSINESS-RULES.md index aff4128d..b6ca9dcf 100644 --- a/docs-live/BUSINESS-RULES.md +++ b/docs-live/BUSINESS-RULES.md @@ -5,7 +5,7 @@ --- -**Domain constraints and invariants extracted from feature specifications. 598 rules from 131 features across 7 product areas.** +**Domain constraints and invariants extracted from feature specifications. 620 rules from 139 features across 7 product areas.** --- @@ -15,10 +15,10 @@ | ------------------------------------------------ | -------- | ----- | --------------- | | [Annotation](business-rules/annotation.md) | 20 | 88 | 88 | | [Configuration](business-rules/configuration.md) | 7 | 32 | 32 | -| [Core Types](business-rules/core-types.md) | 5 | 22 | 22 | +| [Core Types](business-rules/core-types.md) | 9 | 34 | 34 | | [Data API](business-rules/data-api.md) | 26 | 95 | 95 | -| [Generation](business-rules/generation.md) | 60 | 300 | 300 | +| [Generation](business-rules/generation.md) | 61 | 303 | 303 | | [Process](business-rules/process.md) | 2 | 7 | 7 | -| [Validation](business-rules/validation.md) | 11 | 54 | 54 | +| [Validation](business-rules/validation.md) | 14 | 61 | 61 | --- diff --git a/docs-live/CHANGELOG-GENERATED.md b/docs-live/CHANGELOG-GENERATED.md index f698f57a..56e79ccb 100644 --- a/docs-live/CHANGELOG-GENERATED.md +++ b/docs-live/CHANGELOG-GENERATED.md @@ -14,17 +14,20 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Added -- **Deliverable Status Taxonomy**: Canonical status values for deliverables in Gherkin Background tables. -- **Repl Mode**: Loads the pipeline once and accepts multiple queries on stdin. -- **Process API CLI Impl**: Exposes ProcessStateAPI methods as CLI subcommands with JSON output. -- **Output Pipeline Impl**: Post-processing pipeline that transforms raw API results into shaped CLI output. -- **Lint Process CLI**: Validates git changes against delivery process rules. -- **Dataset Cache**: Caches the full PipelineResult (MasterDataset + ValidationSummary + warnings) to a JSON file. +- **Git Name Status Parser**: Parses NUL-delimited git name-status output into categorized file lists. +- **Git Module**: Shared git utilities used by both generators and lint layers. +- **Git Helpers**: Low-level helpers for safe git command execution and input sanitization. +- **Git Branch Diff**: Provides lightweight git diff operations for determining which files changed relative to a base branch. - **Config Resolver**: Resolves a raw `DeliveryProcessProjectConfig` into a fully-resolved `ResolvedConfig` with all defaults applied, stubs... - **Project Config Types**: Unified project configuration for the delivery-process package. - **Project Config Schema**: Zod validation schema for `DeliveryProcessProjectConfig`. - **Source Merger**: Computes effective sources for a specific generator by applying per-generator overrides to the base resolved sources. - **Define Config**: Identity function for type-safe project configuration. +- **Repl Mode**: Loads the pipeline once and accepts multiple queries on stdin. +- **Process API CLI Impl**: Exposes ProcessStateAPI methods as CLI subcommands with JSON output. +- **Output Pipeline Impl**: Post-processing pipeline that transforms raw API results into shaped CLI output. +- **Lint Process CLI**: Validates git changes against delivery process rules. +- **Dataset Cache**: Caches the full PipelineResult (MasterDataset + ValidationSummary + warnings) to a JSON file. - **File Cache**: Simple Map-based cache for file contents during a single generation run. - **Process State Types**: :MasterDataset Type definitions for the ProcessStateAPI query interface. - **Pattern Summarizer Impl**: Projects the full ExtractedPattern (~3.5KB per pattern) down to a PatternSummary (~100 bytes) for list queries. @@ -37,6 +40,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Context Formatter Impl**: First plain-text formatter in the codebase. - **Context Assembler Impl**: Pure function composition over MasterDataset. - **Arch Queries Impl**: Pure functions over MasterDataset for deep architecture exploration. +- **Deliverable Status Taxonomy**: Canonical status values for deliverables in Gherkin Background tables. - **FSM Validator**: :PDR005MvpWorkflow Pure validation functions following the Decider pattern: - No I/O, no side effects - Return... - **FSM Transitions**: :PDR005MvpWorkflow Defines valid transitions between FSM states per PDR-005: ``` roadmap ──→ active ──→ completed │ ... - **FSM States**: :PDR005MvpWorkflow Defines the 4-state FSM from PDR-005 MVP Workflow: - roadmap: Planned work (fully editable) -... @@ -45,15 +49,25 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Design Review Codec**: :Generation Transforms MasterDataset into a RenderableDocument containing design review artifacts: sequence diagrams,... - **Composite Codec**: :Generation Assembles reference documents from multiple codec outputs by concatenating RenderableDocument sections. - **Claude Module Codec**: :Generation Transforms MasterDataset into RenderableDocuments for CLAUDE.md module generation. -- **Sequence Transform Utils**: :Generation Builds pre-computed SequenceIndexEntry objects from patterns that have sequence diagram annotations. -- **Reference Generator Registration**: Registers all reference document generators. -- **Design Review Generator**: :Generation Generates design review documents for patterns with sequence annotations. - **Process Guard Types**: :FSMValidator Defines types for the process guard linter including: - Process state derived from file annotations -... - **Process Guard Module**: :FSMValidator,DeriveProcessState,DetectChanges,ProcessGuardDecider Enforces delivery process rules by validating... - **Detect Changes**: Detects changes from git diff including: - Modified, added, deleted files - Status transitions (@libar-docs-status... - **Derive Process State**: :GherkinScanner,FSMValidator Derives process state from @libar-docs-\* annotations in files. - **Process Guard Decider**: :FSMValidator,DeriveProcessState,DetectChanges Pure function that validates changes against process rules. +- **Reference Generator Registration**: Registers all reference document generators. +- **Design Review Generator**: :Generation Generates design review documents for patterns with sequence annotations. +- **Transform Types**: Type definitions for the dataset transformation pipeline. +- **Sequence Transform Utils**: :Generation Builds pre-computed SequenceIndexEntry objects from patterns that have sequence diagram annotations. +- **Relationship Resolver**: Computes reverse relationship lookups (implementedBy, extendedBy, enables, usedBy) and detects dangling references in... - **Design Review Generation**: Design reviews require manual creation of sequence and component diagrams that duplicate information already captured... +- **Workflow Config Schemas Validation**: The workflow configuration module defines Zod schemas for validating delivery workflow definitions with statuses,... +- **Tag Registry Schemas Validation**: The tag registry configuration module provides schema-validated taxonomy definitions for organizing patterns by... +- **Codec Utils Validation**: The codec utilities provide factory functions for creating type-safe JSON parsing and serialization pipelines using... +- **Git Branch Diff Testing**: The branch diff utility returns changed files relative to a base branch for PR-scoped generation. +- **File Cache Testing**: The file cache provides request-scoped content caching for generation runs. +- **Tag Registry Builder Testing**: The tag registry builder constructs a complete TagRegistry from TypeScript constants. +- **Normalized Status Testing**: The normalized status module maps any status input — raw FSM states (roadmap, active, completed, deferred),... +- **Deliverable Status Taxonomy Testing**: The deliverable status module defines the 6 canonical status values for deliverables in Gherkin Background tables:... - **Load Preamble Parser**: The parseMarkdownToBlocks function converts raw markdown content into a readonly SectionBlock[] array using a 5-state... - **Design Review Generation Tests**: Tests the full design review generation pipeline: sequence annotations are extracted from patterns with business... - **Design Review Generator Lifecycle Tests**: The design review generator cleans up stale markdown files when annotated patterns are renamed or removed from the... @@ -65,13 +79,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Process Api Cli Cache**: MasterDataset caching between CLI invocations: cache hits, mtime invalidation, and --no-cache bypass. - **Stub Taxonomy Tag Tests**: Stub metadata (target path, design session) was stored as plain text in JSDoc descriptions, invisible to structured... - **Stub Resolver Tests**: Design session stubs need structured discovery and resolution to determine which stubs have been implemented and... -- **Context Formatter Tests**: Tests for formatContextBundle(), formatDepTree(), formatFileReadingList(), and formatOverview() plain text rendering... -- **Context Assembler Tests**: Tests for assembleContext(), buildDepTree(), buildFileReadingList(), and buildOverview() pure functions that operate... +- **Arch Queries Test** - **Pattern Summarize Tests**: Validates that summarizePattern() projects ExtractedPattern (~3.5KB) to PatternSummary (~100 bytes) with the correct... - **Pattern Helpers Tests** - **Output Pipeline Tests**: Validates the output pipeline transforms: summarization, modifiers, list filters, empty stripping, and format output. - **Fuzzy Match Tests**: Validates tiered fuzzy matching: exact > prefix > substring > Levenshtein. -- **Arch Queries Test** +- **Context Formatter Tests**: Tests for formatContextBundle(), formatDepTree(), formatFileReadingList(), and formatOverview() plain text rendering... +- **Context Assembler Tests**: Tests for assembleContext(), buildDepTree(), buildFileReadingList(), and buildOverview() pure functions that operate... - **Uses Tag Testing**: Tests extraction and processing of @libar-docs-uses and @libar-docs-used-by relationship tags from TypeScript files. - **Depends On Tag Testing**: Tests extraction of @libar-docs-depends-on and @libar-docs-enables relationship tags from Gherkin files. @@ -95,20 +109,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Added - **Public API**: Main entry point for the @libar-dev/delivery-process package. -- **DoD Validation Types**: Types and schemas for Definition of Done (DoD) validation and anti-pattern detection. -- **Validation Module**: Barrel export for validation module providing: - Definition of Done (DoD) validation for completed phases -... -- **DoD Validator**: Validates that completed phases meet Definition of Done criteria: 1. -- **Anti Pattern Detector**: Detects violations of the dual-source documentation architecture and process hygiene issues that lead to... -- **String Utilities**: Provides shared utilities for string manipulation used across the delivery-process package, including slugification... -- **Utils Module**: Common helper functions used across the delivery-process package. -- **Pattern Id Generator**: Generates unique, deterministic pattern IDs based on file path and line number. -- **Collection Utilities**: Provides shared utilities for working with arrays and collections, such as grouping items by a key function. -- **Result Monad Types**: Explicit error handling via discriminated union. -- **Error Factory Types**: Structured, discriminated error types with factory functions. -- **Pattern Scanner**: Discovers TypeScript files matching glob patterns and filters to only those with `@libar-docs` opt-in. -- **Gherkin Scanner**: Scans .feature files for pattern metadata encoded in Gherkin tags. -- **Gherkin AST Parser**: Parses Gherkin feature files using @cucumber/gherkin and extracts structured data including feature metadata, tags,... -- **TypeScript AST Parser**: Parses TypeScript source files using @typescript-eslint/typescript-estree to extract @libar-docs-\* directives with... +- **Index Preamble Configuration — DD-3, DD-4 Decisions**: Decision DD-3 (Audience paths: preamble vs annotation-derived): Use full preamble for audience reading paths. +- **IndexCodec Factory — DD-1 Implementation Stub**: Creates the IndexCodec as a Zod codec (MasterDataset -> RenderableDocument). +- **IndexCodecOptions — DD-1, DD-5 Decisions**: Decision DD-1 (New IndexCodec vs extend existing): Create a new IndexCodec registered in CodecRegistry, NOT a... - **Workflow Config Schema**: Zod schemas for validating workflow configuration files that define status models, phase definitions, and artifact... - **Tag Registry Configuration**: Defines the structure and validation for tag taxonomy configuration. - **Output Schemas**: Zod schemas for JSON output formats used by CLI tools. @@ -118,24 +121,28 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Dual Source Schemas**: Zod schemas for dual-source extraction types. - **Doc Directive Schema**: Zod schemas for validating parsed @libar-docs-\* directives from JSDoc comments. - **Codec Utils**: Provides factory functions for creating type-safe JSON parsing and serialization pipelines using Zod schemas. +- **Result Monad Types**: Explicit error handling via discriminated union. +- **Error Factory Types**: Structured, discriminated error types with factory functions. +- **String Utilities**: Provides shared utilities for string manipulation used across the delivery-process package, including slugification... +- **Utils Module**: Common helper functions used across the delivery-process package. +- **Pattern Id Generator**: Generates unique, deterministic pattern IDs based on file path and line number. +- **Collection Utilities**: Provides shared utilities for working with arrays and collections, such as grouping items by a key function. +- **DoD Validation Types**: Types and schemas for Definition of Done (DoD) validation and anti-pattern detection. +- **Validation Module**: Barrel export for validation module providing: - Definition of Done (DoD) validation for completed phases -... +- **DoD Validator**: Validates that completed phases meet Definition of Done criteria: 1. +- **Anti Pattern Detector**: Detects violations of the dual-source documentation architecture and process hygiene issues that lead to... +- **Pattern Scanner**: Discovers TypeScript files matching glob patterns and filters to only those with `@libar-docs` opt-in. +- **Gherkin Scanner**: Scans .feature files for pattern metadata encoded in Gherkin tags. +- **Gherkin AST Parser**: Parses Gherkin feature files using @cucumber/gherkin and extracts structured data including feature metadata, tags,... +- **TypeScript AST Parser**: Parses TypeScript source files using @typescript-eslint/typescript-estree to extract @libar-docs-\* directives with... - **Renderable Utils**: Utility functions for document codecs. - **Renderable Document**: Universal intermediate format for all generated documentation. - **Universal Renderer**: Converts RenderableDocument to output strings. - **Renderable Document Model(RDM)**: Unified document generation using codecs and a universal renderer. - **Document Generator**: Simplified document generation using codecs. -- **Status Values**: THE single source of truth for FSM state values in the monorepo (per PDR-005 FSM). -- **Risk Levels**: Three-tier risk classification for roadmap planning. -- **Tag Registry Builder**: Constructs a complete TagRegistry from TypeScript constants. -- **Normalized Status**: The delivery-process system uses a two-level status taxonomy: 1. -- **Layer Types**: Inferred from feature file directory paths: - timeline: Process/workflow features (delivery-process) - domain:... -- **Hierarchy Levels**: Three-level hierarchy for organizing work: - epic: Multi-quarter strategic initiatives - phase: Standard work units... -- **Format Types**: Defines how tag values are parsed and validated. -- **Category Definitions**: Categories are used to classify patterns and organize documentation. -- **Shape Extractor**: Extracts TypeScript type definitions (interfaces, type aliases, enums, function signatures) from source files for... -- **Layer Inference**: Infers feature file layer (timeline, domain, integration, e2e, component) from directory path patterns. -- **Gherkin Extractor**: Transforms scanned Gherkin feature files into ExtractedPattern objects for inclusion in generated documentation. -- **Dual Source Extractor**: Extracts pattern metadata from both TypeScript code stubs (@libar-docs-_) and Gherkin feature files (@libar-docs-_),... -- **Document Extractor**: Converts scanned file data into complete ExtractedPattern objects with unique IDs, inferred names, categories, and... +- **Lint Rules**: Defines lint rules that check @libar-docs-\* directives for completeness and quality. +- **Lint Module**: Provides lint rules and engine for pattern annotation quality checking. +- **Lint Engine**: Orchestrates lint rule execution against parsed directives. - **Warning Collector**: Provides a unified system for capturing, categorizing, and reporting non-fatal issues during document generation. - **Generator Types**: Minimal interface for pluggable generators that produce documentation from patterns. - **Source Mapping Validator**: Performs pre-flight checks on source mapping tables before extraction begins. @@ -144,15 +151,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Documentation Generation Orchestrator**: Invariant: The orchestrator is the integration boundary for full docs generation: it delegates dataset construction... - **Content Deduplicator**: Identifies and merges duplicate sections extracted from multiple sources. - **Codec Based Generator**: Adapts the new RenderableDocument Model (RDM) codec system to the existing DocumentGenerator interface. -- **CLI Version Helper**: Reads package version from package.json for CLI --version flag. -- **Validate Patterns CLI**: Cross-validates TypeScript patterns vs Gherkin feature files. -- **Lint Patterns CLI**: Validates pattern annotations for quality and completeness. -- **Documentation Generator CLI**: Replaces multiple specialized CLIs with one unified interface that supports multiple generators in a single run. -- **CLI Error Handler**: Provides type-safe error handling for all CLI commands using the DocError discriminated union pattern. -- **CLI Schema**: :DataAPI Declarative schema defining all CLI options for the process-api command. -- **Lint Rules**: Defines lint rules that check @libar-docs-\* directives for completeness and quality. -- **Lint Module**: Provides lint rules and engine for pattern annotation quality checking. -- **Lint Engine**: Orchestrates lint rule execution against parsed directives. +- **Shape Extractor**: Extracts TypeScript type definitions (interfaces, type aliases, enums, function signatures) from source files for... +- **Layer Inference**: Infers feature file layer (timeline, domain, integration, e2e, component) from directory path patterns. +- **Gherkin Extractor**: Transforms scanned Gherkin feature files into ExtractedPattern objects for inclusion in generated documentation. +- **Dual Source Extractor**: Extracts pattern metadata from both TypeScript code stubs (@libar-docs-_) and Gherkin feature files (@libar-docs-_),... +- **Document Extractor**: Converts scanned file data into complete ExtractedPattern objects with unique IDs, inferred names, categories, and... - **Workflow Loader**: Provides the default 6-phase workflow as an inline constant and loads custom workflow overrides from JSON files via... - **Configuration Types**: Type definitions for the delivery process configuration system. - **Regex Builders**: Type-safe regex factory functions for tag detection and normalization. @@ -160,12 +163,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Delivery Process Factory**: Main factory function for creating configured delivery process instances. - **Configuration Defaults**: Centralized default constants for the delivery-process package. - **Config Loader**: Discovers and loads `delivery-process.config.ts` files for hierarchical configuration. +- **CLI Version Helper**: Reads package version from package.json for CLI --version flag. +- **Validate Patterns CLI**: Cross-validates TypeScript patterns vs Gherkin feature files. +- **Lint Patterns CLI**: Validates pattern annotations for quality and completeness. +- **Documentation Generator CLI**: Replaces multiple specialized CLIs with one unified interface that supports multiple generators in a single run. +- **CLI Error Handler**: Provides type-safe error handling for all CLI commands using the DocError discriminated union pattern. +- **CLI Schema**: :DataAPI Declarative schema defining all CLI options for the process-api command. - **Scope Validator Impl**: Pure function composition over ProcessStateAPI and MasterDataset. - **Rules Query Module**: Pure query function for business rules extracted from Gherkin Rule: blocks. - **Handoff Generator Impl**: Pure function that assembles a handoff document from ProcessStateAPI and MasterDataset. -- **Index Preamble Configuration — DD-3, DD-4 Decisions**: Decision DD-3 (Audience paths: preamble vs annotation-derived): Use full preamble for audience reading paths. -- **IndexCodec Factory — DD-1 Implementation Stub**: Creates the IndexCodec as a Zod codec (MasterDataset -> RenderableDocument). -- **IndexCodecOptions — DD-1, DD-5 Decisions**: Decision DD-1 (New IndexCodec vs extend existing): Create a new IndexCodec registered in CodecRegistry, NOT a... +- **Status Values**: THE single source of truth for FSM state values in the monorepo (per PDR-005 FSM). +- **Risk Levels**: Three-tier risk classification for roadmap planning. +- **Tag Registry Builder**: Constructs a complete TagRegistry from TypeScript constants. +- **Normalized Status**: The delivery-process system uses a two-level status taxonomy: 1. +- **Layer Types**: Inferred from feature file directory paths: - timeline: Process/workflow features (delivery-process) - domain:... +- **Hierarchy Levels**: Three-level hierarchy for organizing work: - epic: Multi-quarter strategic initiatives - phase: Standard work units... +- **Format Types**: Defines how tag values are parsed and validated. +- **Category Definitions**: Categories are used to classify patterns and organize documentation. - **Validation Rules Codec**: :Generation Transforms MasterDataset into a RenderableDocument for Process Guard validation rules reference. - **Timeline Codec**: :Generation Purpose: Development roadmap organized by phase with progress tracking. - **Taxonomy Codec**: :Generation Transforms MasterDataset into a RenderableDocument for taxonomy reference output. @@ -184,14 +198,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Business Rules Codec**: :Generation Transforms MasterDataset into a RenderableDocument for business rules output. - **Architecture Codec**: :Generation Transforms MasterDataset into a RenderableDocument containing architecture diagrams (Mermaid) generated... - **Adr Document Codec**: :Generation Transforms MasterDataset into RenderableDocument for Architecture Decision Records. -- **Transform Dataset**: Transforms raw extracted patterns into a MasterDataset with all pre-computed views. -- **Merge Patterns**: Merges patterns from TypeScript and Gherkin sources with conflict detection. -- **Pipeline Module**: Barrel export for the unified transformation pipeline components. -- **Pipeline Factory**: Invariant: `buildMasterDataset()` is the shared factory for Steps 1-8 of the architecture pipeline and returns... - **Process Api Reference Generator**: :Generation Generates `PROCESS-API-REFERENCE.md` from the declarative CLI schema. - **Built In Generators**: Registers all codec-based generators on import using the RDM (RenderableDocument Model) architecture. - **Decision Doc Generator**: Orchestrates the full pipeline for generating documentation from decision documents (ADR/PDR in .feature format): 1. - **Codec Generator Registration**: Registers codec-based generators for the RenderableDocument Model (RDM) system. +- **Transform Dataset**: Transforms raw extracted patterns into a MasterDataset with all pre-computed views. +- **Merge Patterns**: Merges patterns from TypeScript and Gherkin sources with conflict detection. +- **Pipeline Module**: Barrel export for the unified transformation pipeline components. +- **Context Inference Impl**: Auto-infers bounded context from file paths using configurable rules. +- **Pipeline Factory**: Invariant: `buildMasterDataset()` is the shared factory for Steps 1-8 of the architecture pipeline and returns... - **Codec Base Options**: Shared types, interfaces, and utilities for all document codecs. - **ADR 006 Single Read Model Architecture**: The delivery-process package applies event sourcing to itself: git is the event store, annotated source files are... - **ADR 005 Codec Based Markdown Rendering**: The documentation generator needs to transform structured pattern data (MasterDataset) into markdown files. @@ -242,9 +257,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Detect Changes Testing**: Tests for the detectDeliverableChanges function that parses git diff output. - **Config Schema Validation**: Configuration schemas validate scanner and generator inputs with security constraints to prevent path traversal... - **Anti Pattern Detector Testing**: Detects violations of the dual-source documentation architecture and process hygiene issues that lead to... +- **String Utils**: String utilities provide consistent text transformations across the codebase. - **Result Monad**: The Result type provides explicit error handling via a discriminated union. - **Error Factories**: Error factories create structured, discriminated error types with consistent message formatting. -- **String Utils**: String utilities provide consistent text transformations across the codebase. - **Gherkin Ast Parser**: The Gherkin AST parser extracts feature metadata, scenarios, and steps from .feature files for timeline generation... - **File Discovery**: The file discovery system uses glob patterns to find TypeScript files for documentation extraction. - **Doc String Media Type**: DocString language hints (mediaType) should be preserved through the parsing pipeline from feature files to rendered... @@ -284,6 +299,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Configuration API**: The createDeliveryProcess factory provides a type-safe way to configure the delivery process with custom tag prefixes... - **Config Resolution**: resolveProjectConfig transforms a raw DeliveryProcessProjectConfig into a fully resolved ResolvedConfig with all... - **Config Loader Testing**: The config loader discovers and loads `delivery-process.config.ts` files for hierarchical configuration, enabling... +- **Process State API Testing**: Programmatic interface for querying delivery process state. - **Validate Patterns Cli**: Command-line interface for cross-validating TypeScript patterns vs Gherkin feature files. - **Process Api Cli Subcommands**: Discovery subcommands: list, search, context assembly, tags/sources, extended arch, unannotated. - **Process Api Cli Modifiers And Rules**: Output modifiers, arch health, and rules subcommand. @@ -292,7 +308,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Lint Patterns Cli**: Command-line interface for validating pattern annotation quality. - **Generate Tag Taxonomy Cli**: Command-line interface for generating TAG_TAXONOMY.md from tag registry configuration. - **Generate Docs Cli**: Command-line interface for generating documentation from annotated TypeScript. -- **Process State API Testing**: Programmatic interface for querying delivery process state. - **Transform Dataset Testing**: The transformToMasterDataset function transforms raw extracted patterns into a MasterDataset with all pre-computed... - **Session Handoffs**: The delivery process supports mid-phase handoffs between sessions and coordination across multiple developers through... - **Session File Lifecycle**: Orphaned session files are automatically cleaned up during generation, maintaining a clean docs-living/sessions/... diff --git a/docs-live/INDEX.md b/docs-live/INDEX.md index 70f0c34e..de1c65de 100644 --- a/docs-live/INDEX.md +++ b/docs-live/INDEX.md @@ -10,7 +10,7 @@ | ----------------- | ----------------------------------------------------- | | **Package** | @libar-dev/delivery-process | | **Purpose** | Code-first documentation and delivery process toolkit | -| **Patterns** | 371 tracked (257 completed, 60 active, 54 planned) | +| **Patterns** | 386 tracked (258 completed, 74 active, 54 planned) | | **Product Areas** | 7 | | **License** | MIT | @@ -118,15 +118,18 @@ ### Reference Guides -| Document | Description | Audience | -| --------------------------------------------------------------- | ----------------------------------------------------- | ---------- | -| [Annotation Reference](reference/ANNOTATION-REFERENCE.md) | Annotation mechanics, shape extraction, tag reference | Developers | -| [Session Workflow Guide](reference/SESSION-WORKFLOW-GUIDE.md) | Planning, Design, Implementation session workflows | AI/Devs | -| [Process API Reference](reference/PROCESS-API-REFERENCE.md) | CLI command reference with flags and examples | AI/Devs | -| [Process API Recipes](reference/PROCESS-API-RECIPES.md) | CLI workflow recipes and session guides | AI/Devs | -| [Process Guard Reference](reference/PROCESS-GUARD-REFERENCE.md) | Pre-commit hooks, error codes, programmatic API | Team Leads | -| [Architecture Codecs](reference/ARCHITECTURE-CODECS.md) | All codecs with factory patterns and options | Developers | -| [Architecture Types](reference/ARCHITECTURE-TYPES.md) | MasterDataset interface and type shapes | Developers | +| Document | Description | Audience | +| --------------------------------------------------------------- | -------------------------------------------------------------------- | ---------- | +| [Annotation Reference](reference/ANNOTATION-REFERENCE.md) | Annotation mechanics, shape extraction, tag reference | Developers | +| [Session Workflow Guide](reference/SESSION-WORKFLOW-GUIDE.md) | Planning, Design, Implementation session workflows | AI/Devs | +| [Process API Reference](reference/PROCESS-API-REFERENCE.md) | CLI command reference with flags and examples | AI/Devs | +| [Process API Recipes](reference/PROCESS-API-RECIPES.md) | CLI workflow recipes and session guides | AI/Devs | +| [Process Guard Reference](reference/PROCESS-GUARD-REFERENCE.md) | Pre-commit hooks, error codes, programmatic API | Team Leads | +| [Architecture Codecs](reference/ARCHITECTURE-CODECS.md) | All codecs with factory patterns and options | Developers | +| [Architecture Types](reference/ARCHITECTURE-TYPES.md) | MasterDataset interface and type shapes | Developers | +| [Configuration Guide](reference/CONFIGURATION-GUIDE.md) | Presets, config files, sources, output, and monorepo setup | Users | +| [Validation Tools Guide](reference/VALIDATION-TOOLS-GUIDE.md) | lint-patterns, lint-steps, lint-process, validate-patterns reference | CI/CD | +| [Gherkin Authoring Guide](reference/GHERKIN-AUTHORING-GUIDE.md) | Roadmap specs, Rule blocks, DataTables, tag conventions | Developers | ### Product Area Details @@ -148,24 +151,24 @@ | ------------- | -------- | --------- | ------ | ------- | -------------------------- | | Annotation | 26 | 23 | 2 | 1 | [███████░] 23/26 88% | | Configuration | 11 | 8 | 0 | 3 | [██████░░] 8/11 73% | -| CoreTypes | 7 | 7 | 0 | 0 | [████████] 7/7 100% | +| CoreTypes | 11 | 7 | 4 | 0 | [█████░░░] 7/11 64% | | DataAPI | 41 | 24 | 14 | 3 | [█████░░░] 24/41 59% | -| Generation | 94 | 81 | 5 | 8 | [███████░] 81/94 86% | +| Generation | 95 | 81 | 6 | 8 | [███████░] 81/95 85% | | Process | 11 | 4 | 0 | 7 | [███░░░░░] 4/11 36% | -| Validation | 22 | 16 | 0 | 6 | [██████░░] 16/22 73% | -| **Total** | **212** | **163** | **21** | **28** | **[██████░░] 163/212 77%** | +| Validation | 25 | 16 | 3 | 6 | [█████░░░] 16/25 64% | +| **Total** | **220** | **163** | **29** | **28** | **[██████░░] 163/220 74%** | --- ## Phase Progress -**371** patterns total: **257** completed (69%), **60** active, **54** planned. [██████████████░░░░░░] 257/371 +**386** patterns total: **258** completed (67%), **74** active, **54** planned. [█████████████░░░░░░░] 258/386 | Status | Count | Percentage | | --------- | ----- | ---------- | -| Completed | 257 | 69% | -| Active | 60 | 16% | -| Planned | 54 | 15% | +| Completed | 258 | 67% | +| Active | 74 | 19% | +| Planned | 54 | 14% | ### By Phase diff --git a/docs-live/PRODUCT-AREAS.md b/docs-live/PRODUCT-AREAS.md index a8cbd8b0..a27ebd37 100644 --- a/docs-live/PRODUCT-AREAS.md +++ b/docs-live/PRODUCT-AREAS.md @@ -31,7 +31,7 @@ Configuration is the entry boundary — it transforms a user-authored `delivery- The generation pipeline transforms annotated source code into markdown documents through a four-stage architecture: Scanner discovers files, Extractor produces `ExtractedPattern` objects, Transformer builds MasterDataset with pre-computed views, and Codecs render to markdown via RenderableDocument IR. Nine specialized codecs handle reference docs, planning, session, reporting, timeline, ADRs, business rules, taxonomy, and composite output — each supporting three detail levels (detailed, standard, summary). The Orchestrator runs generators in registration order, producing both detailed `docs-live/` references and compact `_claude-md/` summaries. -**94 patterns** — 81 completed, 5 active, 8 planned +**95 patterns** — 81 completed, 6 active, 8 planned **Key patterns:** ADR005CodecBasedMarkdownRendering, CodecDrivenReferenceGeneration, CrossCuttingDocumentInclusion, ArchitectureDiagramGeneration, ScopedArchitecturalView, CompositeCodec, RenderableDocument, ProductAreaOverview @@ -41,7 +41,7 @@ The generation pipeline transforms annotated source code into markdown documents Validation is the enforcement boundary — it ensures that every change to annotated source files respects the delivery lifecycle rules defined by the FSM, protection levels, and scope constraints. The system operates in three layers: the FSM validator checks status transitions against a 4-state directed graph, the Process Guard orchestrates commit-time validation using a Decider pattern (state derived from annotations, not stored separately), and the lint engine provides pluggable rule execution with pretty and JSON output. Anti-pattern detection enforces dual-source ownership boundaries — `@libar-docs-uses` belongs on TypeScript, `@libar-docs-depends-on` belongs on Gherkin — preventing cross-domain tag confusion that causes documentation drift. Definition of Done validation ensures completed patterns have all deliverables marked done and at least one acceptance-criteria scenario. -**22 patterns** — 16 completed, 0 active, 6 planned +**25 patterns** — 16 completed, 3 active, 6 planned **Key patterns:** ProcessGuardLinter, PhaseStateMachineValidation, DoDValidation, StepLintVitestCucumber, ProgressiveGovernance @@ -61,7 +61,7 @@ The Data API provides direct terminal access to delivery process state. It repla CoreTypes provides the foundational type system used across all other areas. Three pillars enforce discipline at compile time: the Result monad replaces try/catch with explicit error handling — functions return `Result.ok(value)` or `Result.err(error)` instead of throwing. The DocError discriminated union provides structured error context with type, file, line, and reason fields, enabling exhaustive pattern matching in error handlers. Branded types create nominal typing from structural TypeScript — `PatternId`, `CategoryName`, and `SourceFilePath` are compile-time distinct despite all being strings. String utilities handle slugification and case conversion with acronym-aware title casing. -**7 patterns** — 7 completed, 0 active, 0 planned +**11 patterns** — 7 completed, 4 active, 0 planned **Key patterns:** ResultMonad, ErrorHandlingUnification, ErrorFactories, StringUtils, KebabCaseSlugs @@ -83,12 +83,12 @@ Process defines the USDP-inspired session workflow that governs how work moves t | ----------------------------------------------- | -------- | --------- | ------ | ------- | | [Annotation](product-areas/ANNOTATION.md) | 26 | 23 | 2 | 1 | | [Configuration](product-areas/CONFIGURATION.md) | 11 | 8 | 0 | 3 | -| [Generation](product-areas/GENERATION.md) | 94 | 81 | 5 | 8 | -| [Validation](product-areas/VALIDATION.md) | 22 | 16 | 0 | 6 | +| [Generation](product-areas/GENERATION.md) | 95 | 81 | 6 | 8 | +| [Validation](product-areas/VALIDATION.md) | 25 | 16 | 3 | 6 | | [DataAPI](product-areas/DATA-API.md) | 41 | 24 | 14 | 3 | -| [CoreTypes](product-areas/CORE-TYPES.md) | 7 | 7 | 0 | 0 | +| [CoreTypes](product-areas/CORE-TYPES.md) | 11 | 7 | 4 | 0 | | [Process](product-areas/PROCESS.md) | 11 | 4 | 0 | 7 | -| **Total** | **212** | **163** | **21** | **28** | +| **Total** | **220** | **163** | **29** | **28** | --- diff --git a/docs-live/_claude-md/annotation/annotation-reference.md b/docs-live/_claude-md/annotation/annotation-reference.md index 405eccaf..511d25c1 100644 --- a/docs-live/_claude-md/annotation/annotation-reference.md +++ b/docs-live/_claude-md/annotation/annotation-reference.md @@ -98,6 +98,89 @@ For Zod files, extract the **schema constant** (with `Schema` suffix), not the i | `@extract-shapes MasterDataset` | `@extract-shapes MasterDatasetSchema` | | Shows: `z.infer` (unhelpful) | Shows: `z.object({...})` (full structure) | +#### Annotation Patterns by File Type + +##### Zod Schema Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern MasterDataset + * @libar-docs-status completed + * @libar-docs-extract-shapes MasterDatasetSchema, StatusGroupsSchema, PhaseGroupSchema + */ +``` + +##### Interface / Type Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern DocumentGenerator + * @libar-docs-status completed + * @libar-docs-extract-shapes DocumentGenerator, GeneratorContext, GeneratorOutput + */ +``` + +##### Function / Service Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern TransformDataset + * @libar-docs-status completed + * @libar-docs-arch-context generator + * @libar-docs-arch-layer application + * @libar-docs-extract-shapes transformToMasterDataset, RuntimeMasterDataset + */ +``` + +##### Gherkin Feature Files + +```gherkin +@libar-docs +@libar-docs-pattern:ProcessGuardLinter +@libar-docs-status:roadmap +@libar-docs-phase:99 +@libar-docs-depends-on:StateMachine,ValidationRules +Feature: Process Guard Linter + + Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | + | State derivation | Pending | src/lint/derive.ts | + + Rule: Completed specs require unlock reason + + **Invariant:** A completed spec cannot be modified without explicit unlock. + **Rationale:** Prevents accidental regression of validated work. + + @acceptance-criteria @happy-path + Scenario: Reject modification without unlock + Given a spec with status "completed" + When I modify a deliverable + Then validation fails with "completed-protection" +``` + +#### Tag Groups Quick Reference + +Tags are organized into 12 functional groups. For the complete reference with all values, see the generated [Taxonomy Reference](../docs-live/TAXONOMY.md). + +| Group | Tags (representative) | Format Types | +| ---------------- | ---------------------------------------------------- | ------------------------- | +| **Core** | `pattern`, `status`, `core`, `brief` | value, enum, flag | +| **Relationship** | `uses`, `used-by`, `implements`, `depends-on` | csv, value | +| **Process** | `phase`, `quarter`, `effort`, `team`, `priority` | number, value, enum | +| **PRD** | `product-area`, `user-role`, `business-value` | value | +| **ADR** | `adr`, `adr-status`, `adr-category`, `adr-theme` | value, enum | +| **Hierarchy** | `level`, `parent`, `title` | enum, value, quoted-value | +| **Traceability** | `executable-specs`, `roadmap-spec`, `behavior-file` | csv, value | +| **Discovery** | `discovered-gap`, `discovered-improvement` | value (repeatable) | +| **Architecture** | `arch-role`, `arch-context`, `arch-layer`, `include` | enum, value, csv | +| **Extraction** | `extract-shapes`, `shape` | csv, value | +| **Stub** | `target`, `since` | value | +| **Convention** | `convention` | csv (enum values) | + #### Verification ##### CLI Commands diff --git a/docs-live/_claude-md/architecture/architecture-codecs.md b/docs-live/_claude-md/architecture/architecture-codecs.md index 812899d8..ec646d5a 100644 --- a/docs-live/_claude-md/architecture/architecture-codecs.md +++ b/docs-live/_claude-md/architecture/architecture-codecs.md @@ -151,11 +151,12 @@ #### ArchitectureDocumentCodec -| Option | Type | Default | Description | -| ---------------- | ------------------------ | ----------- | ----------------------------------------- | -| diagramType | "component" \| "layered" | "component" | Type of diagram to generate | -| includeInventory | boolean | true | Include component inventory table | -| includeLegend | boolean | true | Include legend for arrow styles | -| filterContexts | string[] | [] | Filter to specific contexts (empty = all) | +| Option | Type | Default | Description | +| ------------------------ | ------------------------ | ----------- | ---------------------------------------------- | +| diagramType | "component" \| "layered" | "component" | Type of diagram to generate | +| includeInventory | boolean | true | Include component inventory table | +| includeLegend | boolean | true | Include legend for arrow styles | +| filterContexts | string[] | [] | Filter to specific contexts (empty = all) | +| diagramKeyComponentsOnly | boolean | true | Only show components with archRole in diagrams | #### AdrDocumentCodec diff --git a/docs-live/_claude-md/architecture/reference-sample.md b/docs-live/_claude-md/architecture/reference-sample.md index 74126574..dfad372c 100644 --- a/docs-live/_claude-md/architecture/reference-sample.md +++ b/docs-live/_claude-md/architecture/reference-sample.md @@ -106,10 +106,10 @@ | Type | Kind | | ------------------------- | --------- | +| SectionBlock | type | | normalizeStatus | function | | DELIVERABLE_STATUS_VALUES | const | | CategoryDefinition | interface | -| SectionBlock | type | #### Behavior Specifications diff --git a/docs-live/_claude-md/authoring/gherkin-authoring-guide.md b/docs-live/_claude-md/authoring/gherkin-authoring-guide.md new file mode 100644 index 00000000..762b1ed8 --- /dev/null +++ b/docs-live/_claude-md/authoring/gherkin-authoring-guide.md @@ -0,0 +1,245 @@ +### Gherkin Authoring Guide + +#### Essential Patterns + +##### Roadmap Spec Structure + +Roadmap specs define planned work with Problem/Solution descriptions and a Background deliverables table. + +```gherkin +@libar-docs +@libar-docs-pattern:ProcessGuardLinter +@libar-docs-status:roadmap +@libar-docs-phase:99 +Feature: Process Guard Linter + + **Problem:** + During planning and implementation sessions, accidental modifications occur: + - Specs outside the intended scope get modified in bulk + - Completed/approved work gets inadvertently changed + + **Solution:** + Implement a Decider-based linter that: + 1. Derives process state from existing file annotations + 2. Validates proposed changes against derived state + 3. Enforces file protection levels per PDR-005 + + Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | + | State derivation | Pending | src/lint/process-guard/derive.ts | + | Git diff change detection | Pending | src/lint/process-guard/detect.ts | + | CLI integration | Pending | src/cli/lint-process.ts | +``` + +**Key elements:** + +- `@libar-docs` -- bare opt-in marker (required) +- `@libar-docs-pattern:Name` -- unique identifier (required) +- `@libar-docs-status:roadmap` -- FSM state +- `**Problem:**` / `**Solution:**` -- extracted by generators +- Background deliverables table -- tracks implementation progress + +##### Rule Blocks for Business Constraints + +Use `Rule:` to group related scenarios under a business constraint. + +```gherkin +Rule: Status transitions must follow PDR-005 FSM + + **Invariant:** Only valid FSM transitions are allowed. + + **Rationale:** The FSM enforces deliberate progression through planning, implementation, and completion. + + **Verified by:** Valid transitions pass, Invalid transitions fail + + @happy-path + Scenario Outline: Valid transitions pass validation + Given a file with status "" + When the status changes to "" + Then validation passes + + Examples: + | from | to | + | roadmap | active | + | roadmap | deferred | + | active | completed | + | deferred | roadmap | +``` + +| Element | Purpose | Extracted By | +| ------------------ | --------------------------------------- | ------------------------------------------- | +| `**Invariant:**` | Business constraint (what must be true) | Business Rules generator | +| `**Rationale:**` | Business justification (why it exists) | Business Rules generator | +| `**Verified by:**` | Comma-separated scenario names | Multiple codecs (Business Rules, Reference) | + +##### Scenario Outline for Variations + +When the same pattern applies with different inputs, use `Scenario Outline` with an `Examples` table: + +```gherkin +Scenario Outline: Protection levels by status + Given a file with status "" + When checking protection level + Then protection is "" + And unlock required is "" + + Examples: + | status | protection | unlock | + | roadmap | none | no | + | active | scope | no | + | completed | hard | yes | + | deferred | none | no | +``` + +##### Executable Test Features + +Test features focus on behavior verification with section dividers for organization. + +```gherkin +@behavior @scanner-core +@libar-docs-pattern:ScannerCore +Feature: Scanner Core Integration + + Background: + Given a scanner integration context with temp directory + + @happy-path + Scenario: Scan files and extract directives + Given a file "src/auth.ts" with valid content + When scanning with pattern "src/**/*.ts" + Then the scan should succeed with 1 file +``` + +Section comments (`# ====`) improve readability in large feature files. + +#### DataTable and DocString Usage + +##### Background DataTable (Reference Data) + +Use for data that applies to all scenarios -- deliverables, definitions, etc. + +```gherkin +Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | Tests | + | Category types | Done | src/types.ts | Yes | + | Validation logic | Pending | src/validate.ts | Yes | +``` + +##### Scenario DataTable (Test Data) + +Use for scenario-specific test inputs. + +```gherkin +Scenario: Session file defines modification scope + Given a session file with in-scope specs: + | spec | intent | + | mvp-workflow-implementation | modify | + | short-form-tag-migration | review | + When deriving process state + Then "mvp-workflow-implementation" is modifiable +``` + +##### DocString for Code Examples + +Use `"""typescript` for code blocks. Essential when content contains pipes or special characters. + +```gherkin +Scenario: Extract directive from TypeScript + Given a file with content: + """typescript + /** @libar-docs */ + export function authenticate() {} + """ + When scanning the file + Then directive should have tag "@libar-docs-core" +``` + +#### Tag Conventions + +##### Semantic Tags (Extracted by Generators) + +| Tag | Purpose | +| ---------------------- | ------------------------------------------------- | +| `@acceptance-criteria` | Required for DoD validation of completed patterns | +| `@happy-path` | Primary success scenario | +| `@validation` | Input validation, constraint checks | +| `@business-rule` | Business invariant verification | +| `@business-failure` | Expected business failure scenario | +| `@edge-case` | Boundary conditions, unusual inputs | +| `@error-handling` | Error recovery, graceful degradation | + +#### Feature Description Patterns + +Choose headers that fit your pattern: + +| Structure | Headers | Best For | +| ---------------- | ------------------------------------------ | ------------------------- | +| Problem/Solution | `**Problem:**`, `**Solution:**` | Pain point to fix | +| Value-First | `**Business Value:**`, `**How It Works:**` | TDD-style, Gherkin spirit | +| Context/Approach | `**Context:**`, `**Approach:**` | Technical patterns | + +The **Problem/Solution** pattern is the dominant style in this codebase. + +#### Feature File Rich Content + +Feature files serve dual purposes: **executable specs** and **documentation source**. Content in the Feature description section appears in generated docs. + +##### Code-First Principle + +**Prefer code stubs over DocStrings for complex examples.** Feature files should reference code, not duplicate it. + +| Approach | When to Use | +| ---------------------------- | ------------------------------------------------------------ | +| DocStrings (`"""typescript`) | Brief examples (5-10 lines), current/target state comparison | +| Code stub reference | Complex APIs, interfaces, full implementations | + +Code stubs are annotated TypeScript files with `throw new Error("not yet implemented")`, located in `delivery-process/stubs/{pattern-name}/`. + +##### Valid Rich Content + +| Content Type | Syntax | Appears in Docs | +| ------------- | ----------------------- | ---------------- | +| Plain text | Regular paragraphs | Yes | +| Bold/emphasis | `**bold**`, `*italic*` | Yes | +| Tables | Markdown pipe tables | Yes | +| Lists | `- item` or `1. item` | Yes | +| DocStrings | `"""typescript`...`"""` | Yes (code block) | +| Comments | `# comment` | No (ignored) | + +#### Syntax Notes and Gotchas + +##### Forbidden in Feature Descriptions + +| Forbidden | Why | Alternative | +| ----------------------------- | -------------------------------- | ----------------------------------- | +| Code fences (triple backtick) | Not Gherkin syntax | Use DocStrings with lang hint | +| `@prefix` in free text | Interpreted as Gherkin tag | Remove `@` or use `libar-dev` | +| Nested DocStrings | Gherkin parser error | Reference code stub file | +| `#` at line start | Gherkin comment -- kills parsing | Remove, use `//`, or step DocString | + +##### Tag Value Constraints + +**Tag values cannot contain spaces.** Use hyphens: + +| Invalid | Valid | +| -------------------------------- | ------------------------------- | +| `@unlock-reason:Fix for issue` | `@unlock-reason:Fix-for-issue` | +| `@libar-docs-pattern:My Pattern` | `@libar-docs-pattern:MyPattern` | + +For values with spaces, use the `quoted-value` format where supported: + +```gherkin +@libar-docs-usecase "When handling command failures" +``` + +#### Quick Reference + +| Element | Use For | Example | +| -------------------- | -------------------------------------- | ----------------------------------- | +| Background DataTable | Deliverables, shared reference data | Deliverables table in roadmap specs | +| Rule: | Group scenarios by business constraint | Invariant + Rationale + Verified by | +| Scenario Outline | Same pattern with variations | Examples tables with multiple rows | +| DocString `"""` | Code examples, content with pipes | TypeScript/Gherkin code blocks | +| Section comments `#` | Organize large feature files | `# ========= Section ==========` | diff --git a/docs-live/_claude-md/configuration/configuration-guide.md b/docs-live/_claude-md/configuration/configuration-guide.md new file mode 100644 index 00000000..7774bae1 --- /dev/null +++ b/docs-live/_claude-md/configuration/configuration-guide.md @@ -0,0 +1,230 @@ +### Configuration Guide + +#### Quick Reference + +| Preset | Tag Prefix | Categories | Use Case | +| ----------------------------- | -------------- | ---------- | ------------------------------------ | +| **`libar-generic`** (default) | `@libar-docs-` | 3 | Simple projects (this package) | +| `generic` | `@docs-` | 3 | Simple projects with `@docs-` prefix | +| `ddd-es-cqrs` | `@libar-docs-` | 21 | DDD/Event Sourcing architectures | + +```typescript +// delivery-process.config.ts +import { defineConfig } from '@libar-dev/delivery-process/config'; + +// Default: libar-generic preset (simple 3-category taxonomy) +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + features: ['specs/*.feature'], + }, + output: { directory: 'docs-generated' }, +}); +``` + +#### Preset Selection + +##### When to Use Each Preset + +| Preset | Use When | Categories | +| --------------- | ------------------------------------------------------------ | ---------------------------------------------------------------------------------------- | +| `libar-generic` | Simple projects, standard `@libar-docs-` prefix | 3 (core, api, infra) | +| `generic` | Prefer shorter `@docs-` prefix | 3 (core, api, infra) | +| `ddd-es-cqrs` | DDD architecture with bounded contexts, event sourcing, CQRS | 21 (domain, ddd, bounded-context, event-sourcing, decider, cqrs, saga, projection, etc.) | + +**Design decision:** Presets **replace** the base taxonomy categories entirely (not merged). If you need DDD categories, use the `ddd-es-cqrs` preset. + +##### Default Preset Selection + +All entry points default to `libar-generic`: + +| Entry Point | Default Preset | Context | +| ------------------------------ | ------------------------------ | -------------------------------- | +| `defineConfig()` | `libar-generic` (3 categories) | Config file | +| `loadProjectConfig()` fallback | `libar-generic` (3 categories) | CLI tools (no config file found) | +| This package's config file | `libar-generic` (3 categories) | Standalone package usage | + +#### Unified Config File + +The `defineConfig()` function centralizes taxonomy, sources, output, and generator overrides in a single `delivery-process.config.ts` file. CLI tools discover this file automatically. + +##### Discovery Order + +1. Current directory: check `delivery-process.config.ts`, then `.js` +2. Walk up to repo root (`.git` folder), checking each directory +3. Fall back to libar-generic preset (3 categories, `@libar-docs-` prefix) + +##### Config File Format + +```typescript +// delivery-process.config.ts +import { defineConfig } from '@libar-dev/delivery-process/config'; + +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + stubs: ['delivery-process/stubs/**/*.ts'], + features: ['delivery-process/specs/*.feature'], + }, + output: { + directory: 'docs-generated', + overwrite: true, + }, +}); +``` + +##### Sources Configuration + +| Field | Type | Description | +| ------------ | ---------- | ---------------------------------------------------- | +| `typescript` | `string[]` | Glob patterns for TypeScript source files (required) | +| `features` | `string[]` | Glob patterns for Gherkin feature files | +| `stubs` | `string[]` | Glob patterns for design stub files | +| `exclude` | `string[]` | Glob patterns to exclude from all scanning | + +Stubs are merged into TypeScript sources at resolution time. No parent directory traversal (`..`) is allowed in globs. + +##### Output Configuration + +| Field | Type | Default | Description | +| ----------- | --------- | --------------------- | ----------------------------------- | +| `directory` | `string` | `'docs/architecture'` | Output directory for generated docs | +| `overwrite` | `boolean` | `false` | Overwrite existing files | + +##### Generator Overrides + +Some generators need different sources than the base config. Use `generatorOverrides` for per-generator customization: + +```typescript +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + features: ['delivery-process/specs/*.feature'], + }, + output: { directory: 'docs-generated', overwrite: true }, + generatorOverrides: { + changelog: { + additionalFeatures: ['delivery-process/decisions/*.feature'], + }, + 'doc-from-decision': { + replaceFeatures: ['delivery-process/decisions/*.feature'], + }, + }, +}); +``` + +| Override Field | Description | +| -------------------- | ---------------------------------------------------- | +| `additionalFeatures` | Feature globs appended to base features | +| `additionalInput` | TypeScript globs appended to base TypeScript sources | +| `replaceFeatures` | Feature globs used INSTEAD of base features | +| `outputDirectory` | Override output directory for this generator | + +**Constraint:** `replaceFeatures` and `additionalFeatures` are mutually exclusive when both are non-empty. + +#### Monorepo Setup + +```my-monorepo/ delivery-process.config.ts # Repo-level: ddd-es-cqrs packages/ my-package/ delivery-process.config.ts # Package-level: generic + +``` + +CLI tools use the nearest config file to the working directory. Each package can have its own preset and source globs. + +#### Custom Configuration + +##### Custom Tag Prefix + +Keep a preset's taxonomy but change the prefix: + +```typescript +export default defineConfig({ + preset: 'libar-generic', + tagPrefix: '@team-', + fileOptInTag: '@team', + sources: { typescript: ['src/**/*.ts'] }, +}); + +// Your annotations: +// /** @team */ +// /** @team-pattern DualSourceExtractor */ +// /** @team-core */ +``` + +##### Custom Categories + +Define your own taxonomy: + +```typescript +export default defineConfig({ + tagPrefix: '@docs-', + fileOptInTag: '@docs', + categories: [ + { tag: 'scanner', domain: 'Scanner', priority: 1, description: 'File scanning', aliases: [] }, + { + tag: 'extractor', + domain: 'Extractor', + priority: 2, + description: 'Pattern extraction', + aliases: [], + }, + { + tag: 'generator', + domain: 'Generator', + priority: 3, + description: 'Doc generation', + aliases: [], + }, + ], + sources: { typescript: ['src/**/*.ts'] }, +}); +``` + +#### Programmatic Config Loading + +For tools that need to load configuration files: + +```typescript +import { loadProjectConfig } from '@libar-dev/delivery-process/config'; + +const result = await loadProjectConfig(process.cwd()); + +if (!result.ok) { + console.error(result.error.message); + process.exit(1); +} + +const resolved = result.value; +// resolved.instance - DeliveryProcessInstance (registry + regexBuilders) +// resolved.project - ResolvedProjectConfig (sources, output, generators) +// resolved.isDefault - true if no config file found +// resolved.configPath - config file path (if found) +``` + +For per-generator source resolution: + +```typescript +import { mergeSourcesForGenerator } from '@libar-dev/delivery-process/config'; + +const effectiveSources = mergeSourcesForGenerator( + resolved.project.sources, + 'changelog', + resolved.project.generatorOverrides +); +// effectiveSources.typescript - merged TypeScript globs +// effectiveSources.features - merged or replaced feature globs +``` + +#### Backward Compatibility + +The legacy `createDeliveryProcess()` API is still exported and supported. Config files using the old format are detected automatically by `loadProjectConfig()` and wrapped in a `ResolvedConfig` with default project settings. + +```typescript +// Legacy format (still works) +import { createDeliveryProcess } from '@libar-dev/delivery-process'; +export default createDeliveryProcess({ preset: 'ddd-es-cqrs' }); +``` + +New projects should use `defineConfig()` for the unified configuration experience. diff --git a/docs-live/_claude-md/core-types/core-types-overview.md b/docs-live/_claude-md/core-types/core-types-overview.md index fc647af8..9eeab236 100644 --- a/docs-live/_claude-md/core-types/core-types-overview.md +++ b/docs-live/_claude-md/core-types/core-types-overview.md @@ -9,7 +9,7 @@ - Branded nominal types: `Branded` creates compile-time distinct types from structural TypeScript. Prevents mixing `PatternId` with `CategoryName` even though both are `string` at runtime - String transformation consistency: `slugify` produces URL-safe identifiers, `camelCaseToTitleCase` preserves acronyms (e.g., "APIEndpoint" becomes "API Endpoint"), `toKebabCase` handles consecutive uppercase correctly -**Components:** Other (StringUtils, ResultMonad, ErrorFactories, KebabCaseSlugs, ErrorHandlingUnification) +**Components:** Other (StringUtils, FileCacheTesting, TagRegistryBuilderTesting, ResultMonad, NormalizedStatusTesting, ErrorFactories, DeliverableStatusTaxonomyTesting, KebabCaseSlugs, ErrorHandlingUnification) #### API Types diff --git a/docs-live/_claude-md/validation/process-guard.md b/docs-live/_claude-md/validation/process-guard.md index 171ff143..cec83ae5 100644 --- a/docs-live/_claude-md/validation/process-guard.md +++ b/docs-live/_claude-md/validation/process-guard.md @@ -1,5 +1,76 @@ ### Process Guard Reference +#### Quick Reference + +##### Protection Levels + +| Status | Level | Allowed | Blocked | +| ----------- | ----- | -------------------------- | ------------------------------------- | +| `roadmap` | none | Full editing | - | +| `deferred` | none | Full editing | - | +| `active` | scope | Edit existing deliverables | Adding new deliverables | +| `completed` | hard | Nothing | Any change without `@*-unlock-reason` | + +##### Valid Transitions + +| From | To | Notes | +| ----------- | ---------------------- | -------------------------------- | +| `roadmap` | `active`, `deferred` | Start work or postpone | +| `active` | `completed`, `roadmap` | Finish or regress if blocked | +| `deferred` | `roadmap` | Resume planning | +| `completed` | _(none)_ | Terminal -- use unlock to modify | + +##### Escape Hatches + +| Situation | Solution | Example | +| ----------------------------- | ---------------------------------- | --------------------------------------------- | +| Fix bug in completed spec | Add `@*-unlock-reason:'reason'` | `@libar-docs-unlock-reason:'Fix typo'` | +| Modify outside session scope | `--ignore-session` flag | `lint-process --staged --ignore-session` | +| CI treats warnings as errors | `--strict` flag | `lint-process --all --strict` | +| Skip workflow (legacy import) | Multiple transitions in one commit | Set `roadmap` then `completed` in same commit | + +#### CLI Usage + +```bash +lint-process [options] +``` + +##### Modes + +| Flag | Description | Use Case | +| ---------- | --------------------------------- | ------------------ | +| `--staged` | Validate staged changes (default) | Pre-commit hooks | +| `--all` | Validate all changes vs main | CI/CD pipelines | +| `--files` | Validate specific files | Development checks | + +##### Options + +| Flag | Description | +| ------------------- | -------------------------------------- | +| `--strict` | Treat warnings as errors (exit 1) | +| `--ignore-session` | Skip session scope rules | +| `--show-state` | Debug: show derived process state | +| `--format json` | Machine-readable output | +| `-f, --file ` | Specific file to validate (repeatable) | +| `-b, --base-dir` | Base directory for file resolution | + +##### Exit Codes + +| Code | Meaning | +| ---- | -------------------------------------------- | +| `0` | No errors (warnings allowed unless --strict) | +| `1` | Errors found | + +##### Examples + +```bash +lint-process --staged # Pre-commit hook (recommended) +lint-process --all --strict # CI pipeline with strict mode +lint-process --file specs/my-feature.feature # Validate specific file +lint-process --staged --show-state # Debug: see derived state +lint-process --staged --ignore-session # Override session scope +``` + #### Pre-commit Setup Configure Process Guard as a pre-commit hook using Husky. diff --git a/docs-live/_claude-md/validation/validation-tools-guide.md b/docs-live/_claude-md/validation/validation-tools-guide.md new file mode 100644 index 00000000..0ed8986c --- /dev/null +++ b/docs-live/_claude-md/validation/validation-tools-guide.md @@ -0,0 +1,242 @@ +### Validation Tools Guide + +#### Which Command Do I Run? + +```text +Need to check annotation quality? + Yes -> lint-patterns + +Need to check vitest-cucumber compatibility? + Yes -> lint-steps + +Need FSM workflow validation? + Yes -> lint-process + +Need cross-source or DoD validation? + Yes -> validate-patterns + +Running pre-commit hook? + lint-process --staged (default) +``` + +#### Command Summary + +| Command | Purpose | When to Use | +| ------------------- | --------------------------------- | --------------------------------------------- | +| `lint-patterns` | Annotation quality | Ensure patterns have required tags | +| `lint-steps` | vitest-cucumber compatibility | After writing/modifying feature or step files | +| `lint-process` | FSM workflow enforcement | Pre-commit hooks, CI pipelines | +| `validate-patterns` | Cross-source + DoD + anti-pattern | Release validation, comprehensive | + +#### lint-patterns + +Validates `@-*` annotation quality in TypeScript files. + +```bash +npx lint-patterns -i "src/**/*.ts" +npx lint-patterns -i "src/**/*.ts" --strict # CI +``` + +##### CLI Flags + +| Flag | Short | Description | Default | +| ------------------------ | ----- | ----------------------------------- | -------- | +| `--input ` | `-i` | Glob pattern (required, repeatable) | required | +| `--exclude ` | `-e` | Exclude pattern (repeatable) | - | +| `--base-dir ` | `-b` | Base directory | cwd | +| `--strict` | | Treat warnings as errors | false | +| `--format ` | `-f` | Output: `pretty` or `json` | `pretty` | +| `--quiet` | `-q` | Only show errors | false | +| `--min-severity ` | | `error`, `warning`, `info` | - | + +##### Rules + +| Rule | Severity | What It Checks | +| -------------------------------- | -------- | -------------------------------------------------- | +| `missing-pattern-name` | error | Must have `@-pattern` | +| `invalid-status` | error | Status must be valid FSM value | +| `tautological-description` | error | Description cannot just repeat name | +| `pattern-conflict-in-implements` | error | Pattern cannot implement itself (circular ref) | +| `missing-relationship-target` | warning | Relationship targets must reference known patterns | +| `missing-status` | warning | Should have status tag | +| `missing-when-to-use` | warning | Should have "When to Use" section | +| `missing-relationships` | info | Consider adding uses/used-by | + +#### lint-steps + +Static analyzer for vitest-cucumber feature/step compatibility. Catches mismatches that cause cryptic runtime failures. + +```bash +pnpm lint:steps # Standard check +pnpm lint:steps --strict # CI +``` + +12 rules across 3 categories (9 error, 3 warning). + +##### Feature File Rules + +| Rule ID | Severity | What It Catches | +| ------------------------ | -------- | ------------------------------------------------------------------------- | +| `hash-in-description` | error | `#` at line start inside `"""` block in description -- terminates parsing | +| `keyword-in-description` | error | Description line starting with Given/When/Then/And/But -- breaks parser | +| `duplicate-and-step` | error | Multiple `And` steps with identical text in same scenario | +| `dollar-in-step-text` | warning | `$` in step text (outside quotes) causes matching issues | +| `hash-in-step-text` | warning | Mid-line `#` in step text (outside quotes) silently truncates the step | + +##### Step Definition Rules + +| Rule ID | Severity | What It Catches | +| ------------------------- | -------- | ----------------------------------------------------------- | +| `regex-step-pattern` | error | Regex pattern in step registration -- use string patterns | +| `unsupported-phrase-type` | error | `{phrase}` in step string -- use `{string}` instead | +| `repeated-step-pattern` | error | Same pattern registered twice -- second silently overwrites | + +##### Cross-File Rules + +| Rule ID | Severity | What It Catches | +| ---------------------------------- | -------- | -------------------------------------------------------------------- | +| `scenario-outline-function-params` | error | Function params in ScenarioOutline callback (should use variables) | +| `missing-and-destructuring` | error | Feature has `And` steps but step file does not destructure `And` | +| `missing-rule-wrapper` | error | Feature has `Rule:` blocks but step file does not destructure `Rule` | +| `outline-quoted-values` | warning | Quoted values in Outline steps instead of `` syntax | + +##### CLI Reference + +| Flag | Short | Description | Default | +| ------------------ | ----- | -------------------------- | -------- | +| `--strict` | | Treat warnings as errors | false | +| `--format ` | | Output: `pretty` or `json` | `pretty` | +| `--base-dir ` | `-b` | Base directory for paths | cwd | + +#### lint-process + +FSM validation for delivery workflow. Enforces status transitions and protection levels. + +```bash +npx lint-process --staged # Pre-commit (default) +npx lint-process --all --strict # CI pipeline +``` + +**What it validates:** + +- Status transitions follow FSM (`roadmap` -> `active` -> `completed`) +- Completed specs require unlock reason to modify +- Active specs cannot add new deliverables (scope protection) +- Session scope rules (optional) + +For detailed rules, escape hatches, and error fixes, see the [Process Guard Reference](PROCESS-GUARD-REFERENCE.md). + +#### validate-patterns + +Cross-source validator combining multiple checks. + +```bash +npx validate-patterns \ + -i "src/**/*.ts" \ + -F "specs/**/*.feature" \ + --dod \ + --anti-patterns +``` + +##### CLI Flags + +| Flag | Short | Description | Default | +| ----------------- | ----- | ------------------------------------------------ | -------- | +| `--input` | `-i` | Glob for TypeScript files (required, repeatable) | required | +| `--features` | `-F` | Glob for Gherkin files (required, repeatable) | required | +| `--exclude` | `-e` | Exclude pattern (repeatable) | - | +| `--base-dir` | `-b` | Base directory | cwd | +| `--strict` | | Treat warnings as errors (exit 2) | false | +| `--verbose` | | Show info-level messages | false | +| `--format` | `-f` | Output: `pretty` or `json` | `pretty` | +| `--dod` | | Enable Definition of Done validation | false | +| `--anti-patterns` | | Enable anti-pattern detection | false | + +##### Anti-Pattern Detection + +Detects process metadata tags that belong in feature files but appear in TypeScript code: + +| Tag Suffix (Feature-Only) | What It Tracks | +| ------------------------- | -------------------- | +| `@-quarter` | Timeline metadata | +| `@-team` | Ownership metadata | +| `@-effort` | Estimation metadata | +| `@-completed` | Completion timestamp | + +Additional checks: + +| ID | Severity | What It Detects | +| ----------------- | -------- | ----------------------------------- | +| `process-in-code` | error | Feature-only tags found in TS code | +| `magic-comments` | warning | Generator hints in feature files | +| `scenario-bloat` | warning | Too many scenarios per feature file | +| `mega-feature` | warning | Feature file exceeds line threshold | + +##### DoD Validation + +For patterns with `completed` status, checks: + +- All deliverables are in a terminal state (`complete`, `n/a`, or `superseded`) +- At least one `@acceptance-criteria` scenario exists in the spec + +#### CI/CD Integration + +##### Recommended package.json Scripts + +```json +{ + "scripts": { + "lint:patterns": "lint-patterns -i 'src/**/*.ts'", + "lint:steps": "lint-steps", + "lint:steps:ci": "lint-steps --strict", + "lint:process": "lint-process --staged", + "lint:process:ci": "lint-process --all --strict", + "validate:all": "validate-patterns -i 'src/**/*.ts' -F 'specs/**/*.feature' --dod --anti-patterns" + } +} +``` + +##### Pre-commit Hook + +```bash +npx lint-process --staged +``` + +##### GitHub Actions + +```yaml +- name: Lint annotations + run: npx lint-patterns -i "src/**/*.ts" --strict + +- name: Lint steps + run: npx lint-steps --strict + +- name: Validate patterns + run: npx validate-patterns -i "src/**/*.ts" -F "specs/**/*.feature" --dod --anti-patterns +``` + +#### Exit Codes + +| Code | lint-patterns / lint-steps / lint-process | validate-patterns | +| ---- | -------------------------------------------- | ----------------------------------- | +| `0` | No errors (warnings allowed unless --strict) | No issues found | +| `1` | Errors found (or warnings with --strict) | Errors found | +| `2` | -- | Warnings found (with --strict only) | + +#### Programmatic API + +All validation tools expose programmatic APIs: + +```typescript +// Pattern linting +import { lintFiles, hasFailures } from '@libar-dev/delivery-process/lint'; + +// Step linting +import { runStepLint, STEP_LINT_RULES } from '@libar-dev/delivery-process/lint'; + +// Process guard +import { deriveProcessState, validateChanges } from '@libar-dev/delivery-process/lint'; + +// Anti-patterns and DoD +import { detectAntiPatterns, validateDoD } from '@libar-dev/delivery-process/validation'; +``` diff --git a/docs-live/business-rules/core-types.md b/docs-live/business-rules/core-types.md index 64a60555..cff0f955 100644 --- a/docs-live/business-rules/core-types.md +++ b/docs-live/business-rules/core-types.md @@ -4,7 +4,7 @@ --- -**22 rules** from 5 features. 22 rules have explicit invariants. +**34 rules** from 9 features. 34 rules have explicit invariants. --- @@ -70,6 +70,50 @@ _kebab-case-slugs.feature_ ## Uncategorized +### Deliverable Status Taxonomy + +_The deliverable status module defines the 6 canonical status values for_ + +--- + +#### isDeliverableStatusTerminal identifies terminal statuses for DoD validation + +> **Invariant:** Only complete, n/a, and superseded are terminal. Deferred is NOT terminal because it implies unfinished work that should block DoD. +> +> **Rationale:** Marking a pattern as completed when deliverables are merely deferred creates a hard-locked state with incomplete work, violating delivery process integrity. + +**Verified by:** + +- Terminal status classification + +--- + +#### Status predicates classify individual deliverable states + +> **Invariant:** isDeliverableStatusComplete, isDeliverableStatusInProgress, and isDeliverableStatusPending each match exactly one status value. +> +> **Rationale:** Single-value predicates provide type-safe branching for consumers that need to distinguish specific states rather than terminal vs non-terminal groupings. + +**Verified by:** + +- isDeliverableStatusComplete classification +- isDeliverableStatusInProgress classification +- isDeliverableStatusPending classification + +--- + +#### getDeliverableStatusEmoji returns display emoji for all statuses + +> **Invariant:** getDeliverableStatusEmoji returns a non-empty string for all 6 canonical statuses. No status value is unmapped. +> +> **Rationale:** Missing emoji mappings would cause empty display cells in generated documentation tables, breaking visual consistency. + +**Verified by:** + +- Emoji mapping for all statuses + +_deliverable-status.feature_ + ### Error Factories _Error factories create structured, discriminated error types with consistent_ @@ -205,6 +249,98 @@ _- Raw errors lack context (no file path, line number, or pattern name)_ _error-handling.feature_ +### File Cache + +_The file cache provides request-scoped content caching for generation runs._ + +--- + +#### Store and retrieve round-trip preserves content + +> **Invariant:** Content stored via set is returned identically by get. No transformation or encoding occurs. +> +> **Rationale:** File content must survive caching verbatim; any mutation would cause extraction to produce different results on cache hits vs misses. + +**Verified by:** + +- Store and retrieve returns same content +- Non-existent path returns undefined + +--- + +#### has checks membership without affecting stats + +> **Invariant:** has returns true for cached paths and false for uncached paths. It does not increment hit or miss counters. +> +> **Rationale:** has is used for guard checks before get; double-counting would inflate stats and misrepresent actual cache effectiveness. + +**Verified by:** + +- has returns true for cached path +- has returns false for uncached path + +--- + +#### Stats track hits and misses accurately + +> **Invariant:** Every get call increments either hits or misses. hitRate is computed as (hits / total) \* 100 with a zero-division guard returning 0 when total is 0. +> +> **Rationale:** Accurate stats enable performance analysis of generation runs; incorrect counts would lead to wrong caching decisions. + +**Verified by:** + +- Stats track hits and misses +- Hit rate starts at zero for empty cache +- Hit rate is 100 when all gets are hits + +--- + +#### Clear resets cache and stats + +> **Invariant:** clear removes all cached entries and resets hit/miss counters to zero. +> +> **Rationale:** Per-run scoping requires a clean slate; stale entries from a previous run would cause the extractor to use outdated content. + +**Verified by:** + +- Clear resets everything + +_file-cache.feature_ + +### Normalized Status + +_The normalized status module maps any status input — raw FSM states (roadmap,_ + +--- + +#### normalizeStatus maps raw FSM states to display buckets + +> **Invariant:** normalizeStatus must map every raw FSM status to exactly one of three display buckets: completed, active, or planned. Unknown or undefined inputs default to planned. +> +> **Rationale:** UI and generated documentation need a simplified status model; the raw 4-state FSM is an implementation detail that should not leak into display logic. + +**Verified by:** + +- Status normalization +- normalizeStatus defaults undefined to planned +- normalizeStatus defaults unknown status to planned + +--- + +#### Pattern status predicates check normalized state + +> **Invariant:** isPatternComplete, isPatternActive, and isPatternPlanned are mutually exclusive for any given status input. Exactly one returns true. +> +> **Rationale:** Consumers branch on these predicates; overlapping true values would cause double-rendering or contradictory UI states. + +**Verified by:** + +- isPatternComplete classification +- isPatternActive classification +- isPatternPlanned classification + +_normalized-status.feature_ + ### Result Monad _The Result type provides explicit error handling via a discriminated union._ @@ -344,6 +480,52 @@ _String utilities provide consistent text transformations across the codebase._ _string-utils.feature_ +### Tag Registry Builder + +_The tag registry builder constructs a complete TagRegistry from TypeScript_ + +--- + +#### buildRegistry returns a well-formed TagRegistry + +> **Invariant:** buildRegistry always returns a TagRegistry with version, categories, metadataTags, aggregationTags, formatOptions, tagPrefix, and fileOptInTag properties. +> +> **Rationale:** All downstream consumers (scanner, extractor, validator) depend on registry structure. A malformed registry would cause silent extraction failures across the entire pipeline. + +**Verified by:** + +- Registry has correct version +- Registry has expected category count +- Registry has required metadata tags + +--- + +#### Metadata tags have correct configuration + +> **Invariant:** The pattern tag is required, the status tag has a default value, and tags with transforms apply them correctly. +> +> **Rationale:** Misconfigured tag metadata would cause the extractor to skip required fields or apply wrong defaults, producing silently corrupt patterns. + +**Verified by:** + +- Pattern tag is marked as required +- Status tag has default value +- Transform functions work correctly + +--- + +#### Registry includes standard prefixes and opt-in tag + +> **Invariant:** tagPrefix is the standard annotation prefix and fileOptInTag is the bare opt-in marker. These are non-empty strings. +> +> **Rationale:** Changing these values without updating all annotated files would break scanner opt-in detection across the entire monorepo. + +**Verified by:** + +- Registry has standard tag prefix and opt-in tag + +_tag-registry-builder.feature_ + --- [← Back to Business Rules](../BUSINESS-RULES.md) diff --git a/docs-live/business-rules/generation.md b/docs-live/business-rules/generation.md index 34cd9fde..e343917a 100644 --- a/docs-live/business-rules/generation.md +++ b/docs-live/business-rules/generation.md @@ -4,7 +4,7 @@ --- -**300 rules** from 60 features. 300 rules have explicit invariants. +**303 rules** from 61 features. 303 rules have explicit invariants. --- @@ -1815,6 +1815,48 @@ _Tests the GeneratorRegistry registration, lookup, and listing capabilities._ _registry.feature_ +### Git Branch Diff + +_The branch diff utility returns changed files relative to a base branch for_ + +--- + +#### getChangedFilesList returns only existing changed files + +> **Invariant:** Modified and added files are returned, while deleted tracked files are excluded from the final list. +> +> **Rationale:** PR-scoped generation only needs files that still exist on the current branch; including deleted paths would force consumers to chase files that cannot be read. + +**Verified by:** + +- Modified and added files are returned while deleted files are excluded + +--- + +#### Paths with spaces are preserved + +> **Invariant:** A filename containing spaces is returned as the exact original path, not split into multiple tokens. +> +> **Rationale:** Whitespace splitting corrupts file paths and breaks PR-scoped generation in repositories with descriptive filenames. + +**Verified by:** + +- File paths with spaces are preserved + +--- + +#### NUL-delimited rename and copy statuses use the new path + +> **Invariant:** Rename and copy statuses with similarity scores must record the current path, not the old/source path. +> +> **Rationale:** Git emits statuses like R100 and C087 in real diffs; parsing the wrong side of the pair causes generators to scope output to stale paths. + +**Verified by:** + +- Similarity status maps to the new path + +_git-branch-diff.feature_ + ### Implementation Link Path Normalization _Links to implementation files in generated pattern documents should have_ diff --git a/docs-live/business-rules/validation.md b/docs-live/business-rules/validation.md index b25d73f0..641eb046 100644 --- a/docs-live/business-rules/validation.md +++ b/docs-live/business-rules/validation.md @@ -4,7 +4,7 @@ --- -**54 rules** from 11 features. 54 rules have explicit invariants. +**61 rules** from 14 features. 61 rules have explicit invariants. --- @@ -94,6 +94,42 @@ _- Dependencies in features (should be code-only) cause drift_ _anti-patterns.feature_ +### Codec Utils Validation + +_The codec utilities provide factory functions for creating type-safe JSON_ + +--- + +#### createJsonInputCodec parses and validates JSON strings + +> **Invariant:** createJsonInputCodec returns an ok Result when the input is valid JSON that conforms to the provided Zod schema, and an err Result with a descriptive CodecError otherwise. +> +> **Rationale:** Combining JSON parsing and schema validation into a single operation eliminates the class of bugs where parsed-but-invalid data leaks into the application. + +**Verified by:** + +- Input codec parses valid JSON matching schema +- Input codec rejects invalid JSON syntax +- Input codec rejects valid JSON that fails schema validation +- Input codec includes source in error when provided +- Input codec safeParse returns value for valid input +- Input codec safeParse returns undefined for invalid input + +--- + +#### formatCodecError formats errors for display + +> **Invariant:** formatCodecError always returns a non-empty string that includes the operation type and message, and appends validation errors when present. +> +> **Rationale:** Consistent error formatting across all codec consumers avoids duplicated formatting logic and ensures error messages always contain enough context for debugging. + +**Verified by:** + +- formatCodecError formats error without validation details +- formatCodecError formats error with validation details + +_codec-utils.feature_ + ### Config Schema Validation _Configuration schemas validate scanner and generator inputs with security_ @@ -854,6 +890,94 @@ _Tests for the detectStatusTransitions function that parses git diff output._ _status-transition-detection.feature_ +### Tag Registry Schemas Validation + +_The tag registry configuration module provides schema-validated taxonomy_ + +--- + +#### createDefaultTagRegistry produces a valid registry from taxonomy source + +> **Invariant:** createDefaultTagRegistry always returns a TagRegistry that passes TagRegistrySchema validation, with non-empty categories, metadataTags, and aggregationTags arrays. +> +> **Rationale:** The default registry is the foundation for all pattern extraction. An invalid or empty default registry would silently break extraction for every consumer. + +**Verified by:** + +- Default registry passes schema validation +- Default registry has non-empty categories +- Default registry has non-empty metadata tags +- Default registry has expected tag prefix + +--- + +#### mergeTagRegistries deep-merges registries by tag + +> **Invariant:** mergeTagRegistries merges categories, metadataTags, and aggregationTags by their tag field, with override entries replacing base entries of the same tag and new entries being appended. Scalar fields (version, tagPrefix, fileOptInTag, formatOptions) are fully replaced when provided. +> +> **Rationale:** Consumers need to customize the taxonomy without losing default definitions. Tag-based merging prevents accidental duplication while allowing targeted overrides. + +**Verified by:** + +- Merge overrides a category by tag +- Merge adds new categories from override +- Merge replaces scalar fields when provided +- Merge preserves base when override is empty + +_tag-registry-schemas.feature_ + +### Workflow Config Schemas Validation + +_The workflow configuration module defines Zod schemas for validating_ + +--- + +#### WorkflowConfigSchema validates workflow configurations + +> **Invariant:** WorkflowConfigSchema accepts objects with a name, semver version, at least one status, and at least one phase, and rejects objects missing any required field or with invalid semver format. +> +> **Rationale:** Workflow configurations drive FSM validation and phase-based document routing. Malformed configs would cause silent downstream failures in process guard and documentation generation. + +**Verified by:** + +- Valid workflow config passes schema validation +- Config without name is rejected +- Config with invalid semver version is rejected +- Config without statuses is rejected +- Config without phases is rejected + +--- + +#### createLoadedWorkflow builds efficient lookup maps + +> **Invariant:** createLoadedWorkflow produces a LoadedWorkflow whose statusMap and phaseMap contain all statuses and phases from the config, keyed by lowercase name for case-insensitive lookup. +> +> **Rationale:** O(1) status and phase lookup eliminates repeated linear scans during validation and rendering, where each pattern may reference multiple statuses. + +**Verified by:** + +- Loaded workflow has status lookup map +- Status lookup is case-insensitive +- Loaded workflow has phase lookup map +- Phase lookup is case-insensitive + +--- + +#### isWorkflowConfig type guard validates at runtime + +> **Invariant:** isWorkflowConfig returns true only for values that conform to WorkflowConfigSchema and false for all other values including null, undefined, primitives, and partial objects. +> +> **Rationale:** Runtime type guards enable safe narrowing in dynamic contexts (config loading, API responses) where TypeScript compile-time types are unavailable. + +**Verified by:** + +- Type guard accepts valid workflow config +- Type guard rejects null +- Type guard rejects partial config +- Type guard rejects non-object + +_workflow-config-schemas.feature_ + --- [← Back to Business Rules](../BUSINESS-RULES.md) diff --git a/docs-live/product-areas/CORE-TYPES.md b/docs-live/product-areas/CORE-TYPES.md index 7ac1d6cb..999f7864 100644 --- a/docs-live/product-areas/CORE-TYPES.md +++ b/docs-live/product-areas/CORE-TYPES.md @@ -137,7 +137,15 @@ type DocError = ## Business Rules -5 patterns, 22 rules with invariants (22 total) +9 patterns, 34 rules with invariants (34 total) + +### Deliverable Status Taxonomy Testing + +| Rule | Invariant | Rationale | +| --------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| isDeliverableStatusTerminal identifies terminal statuses for DoD validation | Only complete, n/a, and superseded are terminal. Deferred is NOT terminal because it implies unfinished work that should block DoD. | Marking a pattern as completed when deliverables are merely deferred creates a hard-locked state with incomplete work, violating delivery process integrity. | +| Status predicates classify individual deliverable states | isDeliverableStatusComplete, isDeliverableStatusInProgress, and isDeliverableStatusPending each match exactly one status value. | Single-value predicates provide type-safe branching for consumers that need to distinguish specific states rather than terminal vs non-terminal groupings. | +| getDeliverableStatusEmoji returns display emoji for all statuses | getDeliverableStatusEmoji returns a non-empty string for all 6 canonical statuses. No status value is unmapped. | Missing emoji mappings would cause empty display cells in generated documentation tables, breaking visual consistency. | ### Error Factories @@ -158,6 +166,15 @@ type DocError = | Gherkin extractor collects errors without console side effects | Extraction errors must include structured context (file path, pattern name, validation errors) and must never use console.warn to report warnings. | console.warn bypasses error collection, making warnings invisible to callers and untestable. Structured error objects enable programmatic handling across all consumers. | | CLI error handler formats unknown errors gracefully | Unknown error values (non-DocError, non-Error) must be formatted as "Error: {value}" strings for safe display without crashing. | CLI commands can receive arbitrary thrown values (strings, numbers, objects); coercing them to a safe string prevents the error handler itself from crashing on unexpected types. | +### File Cache Testing + +| Rule | Invariant | Rationale | +| ----------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | +| Store and retrieve round-trip preserves content | Content stored via set is returned identically by get. No transformation or encoding occurs. | File content must survive caching verbatim; any mutation would cause extraction to produce different results on cache hits vs misses. | +| has checks membership without affecting stats | has returns true for cached paths and false for uncached paths. It does not increment hit or miss counters. | has is used for guard checks before get; double-counting would inflate stats and misrepresent actual cache effectiveness. | +| Stats track hits and misses accurately | Every get call increments either hits or misses. hitRate is computed as (hits / total) \* 100 with a zero-division guard returning 0 when total is 0. | Accurate stats enable performance analysis of generation runs; incorrect counts would lead to wrong caching decisions. | +| Clear resets cache and stats | clear removes all cached entries and resets hit/miss counters to zero. | Per-run scoping requires a clean slate; stale entries from a previous run would cause the extractor to use outdated content. | + ### Kebab Case Slugs | Rule | Invariant | Rationale | @@ -167,6 +184,13 @@ type DocError = | Requirements include phase prefix | Requirement slugs must be prefixed with "phase-NN-" where NN is the zero-padded phase number, defaulting to "00" when no phase is assigned. | Phase prefixes enable lexicographic sorting of requirement files by delivery order, so directory listings naturally reflect the roadmap sequence. | | Phase slugs use kebab-case for names | Phase slugs must combine a zero-padded phase number with the kebab-case name in the format "phase-NN-name", defaulting to "unnamed" when no name is provided. | A consistent "phase-NN-name" format ensures phase files sort numerically and remain identifiable even when the phase number alone would be ambiguous across roadmap versions. | +### Normalized Status Testing + +| Rule | Invariant | Rationale | +| ------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | +| normalizeStatus maps raw FSM states to display buckets | normalizeStatus must map every raw FSM status to exactly one of three display buckets: completed, active, or planned. Unknown or undefined inputs default to planned. | UI and generated documentation need a simplified status model; the raw 4-state FSM is an implementation detail that should not leak into display logic. | +| Pattern status predicates check normalized state | isPatternComplete, isPatternActive, and isPatternPlanned are mutually exclusive for any given status input. Exactly one returns true. | Consumers branch on these predicates; overlapping true values would cause double-rendering or contradictory UI states. | + ### Result Monad | Rule | Invariant | Rationale | @@ -186,4 +210,12 @@ type DocError = | slugify generates URL-safe slugs | slugify must produce lowercase, alphanumeric, hyphen-only strings with no leading/trailing hyphens. | URL slugs appear in file paths and links across all generated documentation; inconsistent slugification would break cross-references. | | camelCaseToTitleCase generates readable titles | camelCaseToTitleCase must insert spaces at camelCase boundaries and preserve known acronyms (HTTP, XML, API, DoD, AST, GraphQL). | Pattern names stored as PascalCase identifiers appear as human-readable titles in generated documentation; incorrect splitting would produce unreadable headings. | +### Tag Registry Builder Testing + +| Rule | Invariant | Rationale | +| -------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| buildRegistry returns a well-formed TagRegistry | buildRegistry always returns a TagRegistry with version, categories, metadataTags, aggregationTags, formatOptions, tagPrefix, and fileOptInTag properties. | All downstream consumers (scanner, extractor, validator) depend on registry structure. A malformed registry would cause silent extraction failures across the entire pipeline. | +| Metadata tags have correct configuration | The pattern tag is required, the status tag has a default value, and tags with transforms apply them correctly. | Misconfigured tag metadata would cause the extractor to skip required fields or apply wrong defaults, producing silently corrupt patterns. | +| Registry includes standard prefixes and opt-in tag | tagPrefix is the standard annotation prefix and fileOptInTag is the bare opt-in marker. These are non-empty strings. | Changing these values without updating all annotated files would break scanner opt-in detection across the entire monorepo. | + --- diff --git a/docs-live/product-areas/DATA-API.md b/docs-live/product-areas/DATA-API.md index 29debd55..2838dd79 100644 --- a/docs-live/product-areas/DATA-API.md +++ b/docs-live/product-areas/DATA-API.md @@ -100,23 +100,6 @@ graph TB DataAPICLIErgonomics["DataAPICLIErgonomics"]:::neighbor DataAPIArchitectureQueries["DataAPIArchitectureQueries"]:::neighbor end - ReplMode -->|uses| PipelineFactory - ReplMode -->|uses| ProcessStateAPI - ReplMode ..->|implements| DataAPICLIErgonomics - ProcessAPICLIImpl -->|uses| ProcessStateAPI - ProcessAPICLIImpl -->|uses| MasterDataset - ProcessAPICLIImpl -->|uses| PipelineFactory - ProcessAPICLIImpl -->|uses| RulesQueryModule - ProcessAPICLIImpl -->|uses| PatternSummarizerImpl - ProcessAPICLIImpl -->|uses| FuzzyMatcherImpl - ProcessAPICLIImpl -->|uses| OutputPipelineImpl - ProcessAPICLIImpl ..->|implements| ProcessStateAPICLI - OutputPipelineImpl -->|uses| PatternSummarizerImpl - OutputPipelineImpl ..->|implements| DataAPIOutputShaping - DatasetCache -->|uses| PipelineFactory - DatasetCache -->|uses| WorkflowConfigSchema - DatasetCache ..->|implements| DataAPICLIErgonomics - CLISchema ..->|implements| ProcessApiHybridGeneration PatternSummarizerImpl -->|uses| ProcessStateAPI PatternSummarizerImpl ..->|implements| DataAPIOutputShaping ScopeValidatorImpl -->|uses| ProcessStateAPI @@ -146,6 +129,23 @@ graph TB ArchQueriesImpl -->|uses| ProcessStateAPI ArchQueriesImpl -->|uses| MasterDataset ArchQueriesImpl ..->|implements| DataAPIArchitectureQueries + ReplMode -->|uses| PipelineFactory + ReplMode -->|uses| ProcessStateAPI + ReplMode ..->|implements| DataAPICLIErgonomics + ProcessAPICLIImpl -->|uses| ProcessStateAPI + ProcessAPICLIImpl -->|uses| MasterDataset + ProcessAPICLIImpl -->|uses| PipelineFactory + ProcessAPICLIImpl -->|uses| RulesQueryModule + ProcessAPICLIImpl -->|uses| PatternSummarizerImpl + ProcessAPICLIImpl -->|uses| FuzzyMatcherImpl + ProcessAPICLIImpl -->|uses| OutputPipelineImpl + ProcessAPICLIImpl ..->|implements| ProcessStateAPICLI + OutputPipelineImpl -->|uses| PatternSummarizerImpl + OutputPipelineImpl ..->|implements| DataAPIOutputShaping + DatasetCache -->|uses| PipelineFactory + DatasetCache -->|uses| WorkflowConfigSchema + DatasetCache ..->|implements| DataAPICLIErgonomics + CLISchema ..->|implements| ProcessApiHybridGeneration StubResolverImpl -->|uses| ProcessStateAPI FSMValidator ..->|implements| PhaseStateMachineValidation PipelineFactory -->|uses| MasterDataset diff --git a/docs-live/product-areas/GENERATION.md b/docs-live/product-areas/GENERATION.md index 025433ed..0911e859 100644 --- a/docs-live/product-areas/GENERATION.md +++ b/docs-live/product-areas/GENERATION.md @@ -59,14 +59,20 @@ Scoped architecture diagram showing component relationships: ```mermaid graph TB subgraph generator["Generator"] + GitModule["GitModule"] + GitHelpers["GitHelpers"] + GitBranchDiff["GitBranchDiff"] SourceMapper[/"SourceMapper"/] Documentation_Generation_Orchestrator("Documentation Generation Orchestrator") - TransformDataset("TransformDataset") - SequenceTransformUtils("SequenceTransformUtils") ProcessApiReferenceGenerator["ProcessApiReferenceGenerator"] DesignReviewGenerator("DesignReviewGenerator") DecisionDocGenerator("DecisionDocGenerator") CliRecipeGenerator["CliRecipeGenerator"] + TransformTypes["TransformTypes"] + TransformDataset("TransformDataset") + SequenceTransformUtils("SequenceTransformUtils") + RelationshipResolver("RelationshipResolver") + ContextInferenceImpl["ContextInferenceImpl"] end subgraph renderer["Renderer"] loadPreambleFromMarkdown___Shared_Markdown_to_SectionBlock_Parser["loadPreambleFromMarkdown — Shared Markdown-to-SectionBlock Parser"] @@ -81,6 +87,7 @@ graph TB MasterDataset["MasterDataset"]:::neighbor Pattern_Scanner["Pattern Scanner"]:::neighbor GherkinASTParser["GherkinASTParser"]:::neighbor + PatternHelpers["PatternHelpers"]:::neighbor ShapeExtractor["ShapeExtractor"]:::neighbor ReferenceDocShowcase["ReferenceDocShowcase"]:::neighbor ProcessApiHybridGeneration["ProcessApiHybridGeneration"]:::neighbor @@ -88,8 +95,11 @@ graph TB PatternRelationshipModel["PatternRelationshipModel"]:::neighbor DesignReviewGeneration["DesignReviewGeneration"]:::neighbor CliRecipeCodec["CliRecipeCodec"]:::neighbor + ContextInference["ContextInference"]:::neighbor end loadPreambleFromMarkdown___Shared_Markdown_to_SectionBlock_Parser ..->|implements| ProceduralGuideCodec + GitModule -->|uses| GitBranchDiff + GitModule -->|uses| GitHelpers SourceMapper -.->|depends on| DecisionDocCodec SourceMapper -.->|depends on| ShapeExtractor SourceMapper -.->|depends on| GherkinASTParser @@ -100,10 +110,6 @@ graph TB DesignReviewCodec ..->|implements| DesignReviewGeneration CompositeCodec ..->|implements| ReferenceDocShowcase ArchitectureCodec -->|uses| MasterDataset - TransformDataset -->|uses| MasterDataset - TransformDataset ..->|implements| PatternRelationshipModel - SequenceTransformUtils -->|uses| MasterDataset - SequenceTransformUtils ..->|implements| DesignReviewGeneration ProcessApiReferenceGenerator ..->|implements| ProcessApiHybridGeneration DesignReviewGenerator -->|uses| DesignReviewCodec DesignReviewGenerator -->|uses| MasterDataset @@ -111,6 +117,13 @@ graph TB DecisionDocGenerator -.->|depends on| DecisionDocCodec DecisionDocGenerator -.->|depends on| SourceMapper CliRecipeGenerator ..->|implements| CliRecipeCodec + TransformTypes -->|uses| MasterDataset + TransformDataset -->|uses| MasterDataset + TransformDataset ..->|implements| PatternRelationshipModel + SequenceTransformUtils -->|uses| MasterDataset + SequenceTransformUtils ..->|implements| DesignReviewGeneration + RelationshipResolver -->|uses| PatternHelpers + ContextInferenceImpl ..->|implements| ContextInference DesignReviewGeneration -.->|depends on| MermaidDiagramUtils CliRecipeCodec -.->|depends on| ProcessApiHybridGeneration classDef neighbor stroke-dasharray: 5 5 @@ -245,7 +258,7 @@ type CollapsibleBlock = { ### transformToMasterDataset (function) -````typescript +```typescript /** * Transform raw extracted data into a MasterDataset with all pre-computed views. * @@ -263,22 +276,8 @@ type CollapsibleBlock = { * * @param raw - Raw dataset with patterns, registry, and optional workflow * @returns MasterDataset with all pre-computed views - * - * @example - * ```typescript - * const masterDataset = transformToMasterDataset({ - * patterns: mergedPatterns, - * tagRegistry: registry, - * workflow, - * }); - * - * // Access pre-computed views - * const completed = masterDataset.byStatus.completed; - * const phase3Patterns = masterDataset.byPhase.find(p => p.phaseNumber === 3); - * const q42024 = masterDataset.byQuarter["Q4-2024"]; - * ``` */ -```` +``` ```typescript function transformToMasterDataset(raw: RawDataset): RuntimeMasterDataset; @@ -294,7 +293,7 @@ function transformToMasterDataset(raw: RawDataset): RuntimeMasterDataset; ## Business Rules -91 patterns, 439 rules with invariants (440 total) +92 patterns, 442 rules with invariants (443 total) ### ADR 005 Codec Based Markdown Rendering @@ -698,6 +697,14 @@ function transformToMasterDataset(raw: RawDataset): RuntimeMasterDataset; | GHERKIN-PATTERNS.md remains the authoring guide | GHERKIN-PATTERNS.md covers only Gherkin writing patterns, not tooling reference. | The writing guide is useful during spec authoring. Quality tool reference is useful during CI setup and debugging. Mixing them forces authors to scroll past 148 lines of tooling reference they do not need during writing, and forces CI engineers to look in the wrong file for lint rule documentation. | | INDEX.md reflects current document structure | INDEX.md section tables and line counts must be updated when content moves between docs. | INDEX.md serves as the navigation hub for all documentation. Stale line counts and missing section entries cause developers to land in the wrong part of a document or miss content entirely. Both GHERKIN-PATTERNS.md and VALIDATION.md entries must reflect the restructure. | +### Git Branch Diff Testing + +| Rule | Invariant | Rationale | +| ------------------------------------------------------- | ------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| getChangedFilesList returns only existing changed files | Modified and added files are returned, while deleted tracked files are excluded from the final list. | PR-scoped generation only needs files that still exist on the current branch; including deleted paths would force consumers to chase files that cannot be read. | +| Paths with spaces are preserved | A filename containing spaces is returned as the exact original path, not split into multiple tokens. | Whitespace splitting corrupts file paths and breaks PR-scoped generation in repositories with descriptive filenames. | +| NUL-delimited rename and copy statuses use the new path | Rename and copy statuses with similarity scores must record the current path, not the old/source path. | Git emits statuses like R100 and C087 in real diffs; parsing the wrong side of the pair causes generators to scope output to stale paths. | + ### Implementation Link Path Normalization | Rule | Invariant | Rationale | diff --git a/docs-live/product-areas/VALIDATION.md b/docs-live/product-areas/VALIDATION.md index 51226c6d..21e18c61 100644 --- a/docs-live/product-areas/VALIDATION.md +++ b/docs-live/product-areas/VALIDATION.md @@ -912,7 +912,7 @@ const missingStatus: LintRule; ## Business Rules -20 patterns, 95 rules with invariants (95 total) +23 patterns, 102 rules with invariants (102 total) ### Anti Pattern Detector Testing @@ -925,6 +925,13 @@ const missingStatus: LintRule; | All anti-patterns can be detected in one pass | The anti-pattern detector must evaluate all registered rules in a single scan pass over the source files. | Single-pass detection ensures consistent results and avoids O(n\*m) performance degradation with multiple file traversals. | | Violations can be formatted for console output | Anti-pattern violations must be renderable as grouped, human-readable console output. | Developers need actionable feedback at commit time — ungrouped or unformatted violations are hard to triage and fix. | +### Codec Utils Validation + +| Rule | Invariant | Rationale | +| ------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| createJsonInputCodec parses and validates JSON strings | createJsonInputCodec returns an ok Result when the input is valid JSON that conforms to the provided Zod schema, and an err Result with a descriptive CodecError otherwise. | Combining JSON parsing and schema validation into a single operation eliminates the class of bugs where parsed-but-invalid data leaks into the application. | +| formatCodecError formats errors for display | formatCodecError always returns a non-empty string that includes the operation type and message, and appends validation errors when present. | Consistent error formatting across all codec consumers avoids duplicated formatting logic and ensures error messages always contain enough context for debugging. | + ### Config Schema Validation | Rule | Invariant | Rationale | @@ -1101,6 +1108,13 @@ const missingStatus: LintRule; | Diff content is parsed as it streams | Status transitions and deliverable changes must be extracted incrementally as each file section completes, not after the entire diff is collected. | Batch-processing the full diff reintroduces the memory bottleneck that streaming is designed to eliminate. | | Streaming errors are handled gracefully | Stream failures and malformed diff lines must return Result errors or be skipped without throwing exceptions. | Unhandled stream errors crash the CLI process, preventing any validation output from reaching the user. | +### Tag Registry Schemas Validation + +| Rule | Invariant | Rationale | +| ----------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| createDefaultTagRegistry produces a valid registry from taxonomy source | createDefaultTagRegistry always returns a TagRegistry that passes TagRegistrySchema validation, with non-empty categories, metadataTags, and aggregationTags arrays. | The default registry is the foundation for all pattern extraction. An invalid or empty default registry would silently break extraction for every consumer. | +| mergeTagRegistries deep-merges registries by tag | mergeTagRegistries merges categories, metadataTags, and aggregationTags by their tag field, with override entries replacing base entries of the same tag and new entries being appended. Scalar fields (version, tagPrefix, fileOptInTag, formatOptions) are fully replaced when provided. | Consumers need to customize the taxonomy without losing default definitions. Tag-based merging prevents accidental duplication while allowing targeted overrides. | + ### Validator Read Model Consolidation | Rule | Invariant | Rationale | @@ -1109,4 +1123,12 @@ const missingStatus: LintRule; | No lossy local types in the validator | The validator operates on `ExtractedPattern` from the MasterDataset, not a consumer-local DTO that discards fields. | GherkinPatternInfo keeps only name, phase, status, file, and deliverables — discarding uses, dependsOn, implementsPatterns, include, productArea, rules, and 20+ other fields. When the validator needs relationship data, it cannot access it through the lossy type. | | Utility patterns without specs are not false positives | Internal utility patterns that have a `@libar-docs-phase` but will never have a Gherkin spec should not carry phase metadata. Phase tags signal roadmap participation. | Five utility patterns (ContentDeduplicator, FileCache, WarningCollector, SourceMappingValidator, SourceMapper) have phase tags from the phase when they were built. They are infrastructure, not roadmap features. The validator correctly reports missing Gherkin for patterns with phases — the fix is removing the phase tag, not suppressing the warning. | +### Workflow Config Schemas Validation + +| Rule | Invariant | Rationale | +| ------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| WorkflowConfigSchema validates workflow configurations | WorkflowConfigSchema accepts objects with a name, semver version, at least one status, and at least one phase, and rejects objects missing any required field or with invalid semver format. | Workflow configurations drive FSM validation and phase-based document routing. Malformed configs would cause silent downstream failures in process guard and documentation generation. | +| createLoadedWorkflow builds efficient lookup maps | createLoadedWorkflow produces a LoadedWorkflow whose statusMap and phaseMap contain all statuses and phases from the config, keyed by lowercase name for case-insensitive lookup. | O(1) status and phase lookup eliminates repeated linear scans during validation and rendering, where each pattern may reference multiple statuses. | +| isWorkflowConfig type guard validates at runtime | isWorkflowConfig returns true only for values that conform to WorkflowConfigSchema and false for all other values including null, undefined, primitives, and partial objects. | Runtime type guards enable safe narrowing in dynamic contexts (config loading, API responses) where TypeScript compile-time types are unavailable. | + --- diff --git a/docs-live/reference/ANNOTATION-REFERENCE.md b/docs-live/reference/ANNOTATION-REFERENCE.md index 19e6f164..8d63029e 100644 --- a/docs-live/reference/ANNOTATION-REFERENCE.md +++ b/docs-live/reference/ANNOTATION-REFERENCE.md @@ -109,6 +109,93 @@ For Zod files, extract the **schema constant** (with `Schema` suffix), not the i --- +## Annotation Patterns by File Type + +### Zod Schema Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern MasterDataset + * @libar-docs-status completed + * @libar-docs-extract-shapes MasterDatasetSchema, StatusGroupsSchema, PhaseGroupSchema + */ +``` + +### Interface / Type Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern DocumentGenerator + * @libar-docs-status completed + * @libar-docs-extract-shapes DocumentGenerator, GeneratorContext, GeneratorOutput + */ +``` + +### Function / Service Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern TransformDataset + * @libar-docs-status completed + * @libar-docs-arch-context generator + * @libar-docs-arch-layer application + * @libar-docs-extract-shapes transformToMasterDataset, RuntimeMasterDataset + */ +``` + +### Gherkin Feature Files + +```gherkin +@libar-docs +@libar-docs-pattern:ProcessGuardLinter +@libar-docs-status:roadmap +@libar-docs-phase:99 +@libar-docs-depends-on:StateMachine,ValidationRules +Feature: Process Guard Linter + + Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | + | State derivation | Pending | src/lint/derive.ts | + + Rule: Completed specs require unlock reason + + **Invariant:** A completed spec cannot be modified without explicit unlock. + **Rationale:** Prevents accidental regression of validated work. + + @acceptance-criteria @happy-path + Scenario: Reject modification without unlock + Given a spec with status "completed" + When I modify a deliverable + Then validation fails with "completed-protection" +``` + +--- + +## Tag Groups Quick Reference + +Tags are organized into 12 functional groups. For the complete reference with all values, see the generated [Taxonomy Reference](../docs-live/TAXONOMY.md). + +| Group | Tags (representative) | Format Types | +| ---------------- | ---------------------------------------------------- | ------------------------- | +| **Core** | `pattern`, `status`, `core`, `brief` | value, enum, flag | +| **Relationship** | `uses`, `used-by`, `implements`, `depends-on` | csv, value | +| **Process** | `phase`, `quarter`, `effort`, `team`, `priority` | number, value, enum | +| **PRD** | `product-area`, `user-role`, `business-value` | value | +| **ADR** | `adr`, `adr-status`, `adr-category`, `adr-theme` | value, enum | +| **Hierarchy** | `level`, `parent`, `title` | enum, value, quoted-value | +| **Traceability** | `executable-specs`, `roadmap-spec`, `behavior-file` | csv, value | +| **Discovery** | `discovered-gap`, `discovered-improvement` | value (repeatable) | +| **Architecture** | `arch-role`, `arch-context`, `arch-layer`, `include` | enum, value, csv | +| **Extraction** | `extract-shapes`, `shape` | csv, value | +| **Stub** | `target`, `since` | value | +| **Convention** | `convention` | csv (enum values) | + +--- + ## Verification ### CLI Commands diff --git a/docs-live/reference/ARCHITECTURE-CODECS.md b/docs-live/reference/ARCHITECTURE-CODECS.md index 6fec55f5..64427915 100644 --- a/docs-live/reference/ARCHITECTURE-CODECS.md +++ b/docs-live/reference/ARCHITECTURE-CODECS.md @@ -618,12 +618,13 @@ Or use the default export for standard behavior: - **component**: System overview with bounded context subgraphs - **layered**: Components organized by architectural layer -| Option | Type | Default | Description | -| ---------------- | ------------------------ | ----------- | ----------------------------------------- | -| diagramType | "component" \| "layered" | "component" | Type of diagram to generate | -| includeInventory | boolean | true | Include component inventory table | -| includeLegend | boolean | true | Include legend for arrow styles | -| filterContexts | string[] | [] | Filter to specific contexts (empty = all) | +| Option | Type | Default | Description | +| ------------------------ | ------------------------ | ----------- | ---------------------------------------------- | +| diagramType | "component" \| "layered" | "component" | Type of diagram to generate | +| includeInventory | boolean | true | Include component inventory table | +| includeLegend | boolean | true | Include legend for arrow styles | +| filterContexts | string[] | [] | Filter to specific contexts (empty = all) | +| diagramKeyComponentsOnly | boolean | true | Only show components with archRole in diagrams | ```typescript const codec = createArchitectureCodec({ diagramType: 'component' }); diff --git a/docs-live/reference/CONFIGURATION-GUIDE.md b/docs-live/reference/CONFIGURATION-GUIDE.md new file mode 100644 index 00000000..c533fae5 --- /dev/null +++ b/docs-live/reference/CONFIGURATION-GUIDE.md @@ -0,0 +1,249 @@ +# Configuration Guide + +**Purpose:** Reference document: Configuration Guide +**Detail Level:** Full reference + +--- + +## Quick Reference + +| Preset | Tag Prefix | Categories | Use Case | +| ----------------------------- | -------------- | ---------- | ------------------------------------ | +| **`libar-generic`** (default) | `@libar-docs-` | 3 | Simple projects (this package) | +| `generic` | `@docs-` | 3 | Simple projects with `@docs-` prefix | +| `ddd-es-cqrs` | `@libar-docs-` | 21 | DDD/Event Sourcing architectures | + +```typescript +// delivery-process.config.ts +import { defineConfig } from '@libar-dev/delivery-process/config'; + +// Default: libar-generic preset (simple 3-category taxonomy) +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + features: ['specs/*.feature'], + }, + output: { directory: 'docs-generated' }, +}); +``` + +--- + +## Preset Selection + +### When to Use Each Preset + +| Preset | Use When | Categories | +| --------------- | ------------------------------------------------------------ | ---------------------------------------------------------------------------------------- | +| `libar-generic` | Simple projects, standard `@libar-docs-` prefix | 3 (core, api, infra) | +| `generic` | Prefer shorter `@docs-` prefix | 3 (core, api, infra) | +| `ddd-es-cqrs` | DDD architecture with bounded contexts, event sourcing, CQRS | 21 (domain, ddd, bounded-context, event-sourcing, decider, cqrs, saga, projection, etc.) | + +**Design decision:** Presets **replace** the base taxonomy categories entirely (not merged). If you need DDD categories, use the `ddd-es-cqrs` preset. + +### Default Preset Selection + +All entry points default to `libar-generic`: + +| Entry Point | Default Preset | Context | +| ------------------------------ | ------------------------------ | -------------------------------- | +| `defineConfig()` | `libar-generic` (3 categories) | Config file | +| `loadProjectConfig()` fallback | `libar-generic` (3 categories) | CLI tools (no config file found) | +| This package's config file | `libar-generic` (3 categories) | Standalone package usage | + +--- + +## Unified Config File + +The `defineConfig()` function centralizes taxonomy, sources, output, and generator overrides in a single `delivery-process.config.ts` file. CLI tools discover this file automatically. + +### Discovery Order + +1. Current directory: check `delivery-process.config.ts`, then `.js` +2. Walk up to repo root (`.git` folder), checking each directory +3. Fall back to libar-generic preset (3 categories, `@libar-docs-` prefix) + +### Config File Format + +```typescript +// delivery-process.config.ts +import { defineConfig } from '@libar-dev/delivery-process/config'; + +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + stubs: ['delivery-process/stubs/**/*.ts'], + features: ['delivery-process/specs/*.feature'], + }, + output: { + directory: 'docs-generated', + overwrite: true, + }, +}); +``` + +### Sources Configuration + +| Field | Type | Description | +| ------------ | ---------- | ---------------------------------------------------- | +| `typescript` | `string[]` | Glob patterns for TypeScript source files (required) | +| `features` | `string[]` | Glob patterns for Gherkin feature files | +| `stubs` | `string[]` | Glob patterns for design stub files | +| `exclude` | `string[]` | Glob patterns to exclude from all scanning | + +Stubs are merged into TypeScript sources at resolution time. No parent directory traversal (`..`) is allowed in globs. + +### Output Configuration + +| Field | Type | Default | Description | +| ----------- | --------- | --------------------- | ----------------------------------- | +| `directory` | `string` | `'docs/architecture'` | Output directory for generated docs | +| `overwrite` | `boolean` | `false` | Overwrite existing files | + +### Generator Overrides + +Some generators need different sources than the base config. Use `generatorOverrides` for per-generator customization: + +```typescript +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + features: ['delivery-process/specs/*.feature'], + }, + output: { directory: 'docs-generated', overwrite: true }, + generatorOverrides: { + changelog: { + additionalFeatures: ['delivery-process/decisions/*.feature'], + }, + 'doc-from-decision': { + replaceFeatures: ['delivery-process/decisions/*.feature'], + }, + }, +}); +``` + +| Override Field | Description | +| -------------------- | ---------------------------------------------------- | +| `additionalFeatures` | Feature globs appended to base features | +| `additionalInput` | TypeScript globs appended to base TypeScript sources | +| `replaceFeatures` | Feature globs used INSTEAD of base features | +| `outputDirectory` | Override output directory for this generator | + +**Constraint:** `replaceFeatures` and `additionalFeatures` are mutually exclusive when both are non-empty. + +--- + +## Monorepo Setup + +```my-monorepo/ delivery-process.config.ts # Repo-level: ddd-es-cqrs packages/ my-package/ delivery-process.config.ts # Package-level: generic + +``` + +CLI tools use the nearest config file to the working directory. Each package can have its own preset and source globs. + +--- + +## Custom Configuration + +### Custom Tag Prefix + +Keep a preset's taxonomy but change the prefix: + +```typescript +export default defineConfig({ + preset: 'libar-generic', + tagPrefix: '@team-', + fileOptInTag: '@team', + sources: { typescript: ['src/**/*.ts'] }, +}); + +// Your annotations: +// /** @team */ +// /** @team-pattern DualSourceExtractor */ +// /** @team-core */ +``` + +### Custom Categories + +Define your own taxonomy: + +```typescript +export default defineConfig({ + tagPrefix: '@docs-', + fileOptInTag: '@docs', + categories: [ + { tag: 'scanner', domain: 'Scanner', priority: 1, description: 'File scanning', aliases: [] }, + { + tag: 'extractor', + domain: 'Extractor', + priority: 2, + description: 'Pattern extraction', + aliases: [], + }, + { + tag: 'generator', + domain: 'Generator', + priority: 3, + description: 'Doc generation', + aliases: [], + }, + ], + sources: { typescript: ['src/**/*.ts'] }, +}); +``` + +--- + +## Programmatic Config Loading + +For tools that need to load configuration files: + +```typescript +import { loadProjectConfig } from '@libar-dev/delivery-process/config'; + +const result = await loadProjectConfig(process.cwd()); + +if (!result.ok) { + console.error(result.error.message); + process.exit(1); +} + +const resolved = result.value; +// resolved.instance - DeliveryProcessInstance (registry + regexBuilders) +// resolved.project - ResolvedProjectConfig (sources, output, generators) +// resolved.isDefault - true if no config file found +// resolved.configPath - config file path (if found) +``` + +For per-generator source resolution: + +```typescript +import { mergeSourcesForGenerator } from '@libar-dev/delivery-process/config'; + +const effectiveSources = mergeSourcesForGenerator( + resolved.project.sources, + 'changelog', + resolved.project.generatorOverrides +); +// effectiveSources.typescript - merged TypeScript globs +// effectiveSources.features - merged or replaced feature globs +``` + +--- + +## Backward Compatibility + +The legacy `createDeliveryProcess()` API is still exported and supported. Config files using the old format are detected automatically by `loadProjectConfig()` and wrapped in a `ResolvedConfig` with default project settings. + +```typescript +// Legacy format (still works) +import { createDeliveryProcess } from '@libar-dev/delivery-process'; +export default createDeliveryProcess({ preset: 'ddd-es-cqrs' }); +``` + +New projects should use `defineConfig()` for the unified configuration experience. + +--- diff --git a/docs-live/reference/GHERKIN-AUTHORING-GUIDE.md b/docs-live/reference/GHERKIN-AUTHORING-GUIDE.md new file mode 100644 index 00000000..75777ec1 --- /dev/null +++ b/docs-live/reference/GHERKIN-AUTHORING-GUIDE.md @@ -0,0 +1,270 @@ +# Gherkin Authoring Guide + +**Purpose:** Reference document: Gherkin Authoring Guide +**Detail Level:** Full reference + +--- + +## Essential Patterns + +### Roadmap Spec Structure + +Roadmap specs define planned work with Problem/Solution descriptions and a Background deliverables table. + +```gherkin +@libar-docs +@libar-docs-pattern:ProcessGuardLinter +@libar-docs-status:roadmap +@libar-docs-phase:99 +Feature: Process Guard Linter + + **Problem:** + During planning and implementation sessions, accidental modifications occur: + - Specs outside the intended scope get modified in bulk + - Completed/approved work gets inadvertently changed + + **Solution:** + Implement a Decider-based linter that: + 1. Derives process state from existing file annotations + 2. Validates proposed changes against derived state + 3. Enforces file protection levels per PDR-005 + + Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | + | State derivation | Pending | src/lint/process-guard/derive.ts | + | Git diff change detection | Pending | src/lint/process-guard/detect.ts | + | CLI integration | Pending | src/cli/lint-process.ts | +``` + +**Key elements:** + +- `@libar-docs` -- bare opt-in marker (required) +- `@libar-docs-pattern:Name` -- unique identifier (required) +- `@libar-docs-status:roadmap` -- FSM state +- `**Problem:**` / `**Solution:**` -- extracted by generators +- Background deliverables table -- tracks implementation progress + +--- + +### Rule Blocks for Business Constraints + +Use `Rule:` to group related scenarios under a business constraint. + +```gherkin +Rule: Status transitions must follow PDR-005 FSM + + **Invariant:** Only valid FSM transitions are allowed. + + **Rationale:** The FSM enforces deliberate progression through planning, implementation, and completion. + + **Verified by:** Valid transitions pass, Invalid transitions fail + + @happy-path + Scenario Outline: Valid transitions pass validation + Given a file with status "" + When the status changes to "" + Then validation passes + + Examples: + | from | to | + | roadmap | active | + | roadmap | deferred | + | active | completed | + | deferred | roadmap | +``` + +| Element | Purpose | Extracted By | +| ------------------ | --------------------------------------- | ------------------------------------------- | +| `**Invariant:**` | Business constraint (what must be true) | Business Rules generator | +| `**Rationale:**` | Business justification (why it exists) | Business Rules generator | +| `**Verified by:**` | Comma-separated scenario names | Multiple codecs (Business Rules, Reference) | + +--- + +### Scenario Outline for Variations + +When the same pattern applies with different inputs, use `Scenario Outline` with an `Examples` table: + +```gherkin +Scenario Outline: Protection levels by status + Given a file with status "" + When checking protection level + Then protection is "" + And unlock required is "" + + Examples: + | status | protection | unlock | + | roadmap | none | no | + | active | scope | no | + | completed | hard | yes | + | deferred | none | no | +``` + +--- + +### Executable Test Features + +Test features focus on behavior verification with section dividers for organization. + +```gherkin +@behavior @scanner-core +@libar-docs-pattern:ScannerCore +Feature: Scanner Core Integration + + Background: + Given a scanner integration context with temp directory + + @happy-path + Scenario: Scan files and extract directives + Given a file "src/auth.ts" with valid content + When scanning with pattern "src/**/*.ts" + Then the scan should succeed with 1 file +``` + +Section comments (`# ====`) improve readability in large feature files. + +--- + +## DataTable and DocString Usage + +### Background DataTable (Reference Data) + +Use for data that applies to all scenarios -- deliverables, definitions, etc. + +```gherkin +Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | Tests | + | Category types | Done | src/types.ts | Yes | + | Validation logic | Pending | src/validate.ts | Yes | +``` + +### Scenario DataTable (Test Data) + +Use for scenario-specific test inputs. + +```gherkin +Scenario: Session file defines modification scope + Given a session file with in-scope specs: + | spec | intent | + | mvp-workflow-implementation | modify | + | short-form-tag-migration | review | + When deriving process state + Then "mvp-workflow-implementation" is modifiable +``` + +### DocString for Code Examples + +Use `"""typescript` for code blocks. Essential when content contains pipes or special characters. + +```gherkin +Scenario: Extract directive from TypeScript + Given a file with content: + """typescript + /** @libar-docs */ + export function authenticate() {} + """ + When scanning the file + Then directive should have tag "@libar-docs-core" +``` + +--- + +## Tag Conventions + +### Semantic Tags (Extracted by Generators) + +| Tag | Purpose | +| ---------------------- | ------------------------------------------------- | +| `@acceptance-criteria` | Required for DoD validation of completed patterns | +| `@happy-path` | Primary success scenario | +| `@validation` | Input validation, constraint checks | +| `@business-rule` | Business invariant verification | +| `@business-failure` | Expected business failure scenario | +| `@edge-case` | Boundary conditions, unusual inputs | +| `@error-handling` | Error recovery, graceful degradation | + +--- + +## Feature Description Patterns + +Choose headers that fit your pattern: + +| Structure | Headers | Best For | +| ---------------- | ------------------------------------------ | ------------------------- | +| Problem/Solution | `**Problem:**`, `**Solution:**` | Pain point to fix | +| Value-First | `**Business Value:**`, `**How It Works:**` | TDD-style, Gherkin spirit | +| Context/Approach | `**Context:**`, `**Approach:**` | Technical patterns | + +The **Problem/Solution** pattern is the dominant style in this codebase. + +--- + +## Feature File Rich Content + +Feature files serve dual purposes: **executable specs** and **documentation source**. Content in the Feature description section appears in generated docs. + +### Code-First Principle + +**Prefer code stubs over DocStrings for complex examples.** Feature files should reference code, not duplicate it. + +| Approach | When to Use | +| ---------------------------- | ------------------------------------------------------------ | +| DocStrings (`"""typescript`) | Brief examples (5-10 lines), current/target state comparison | +| Code stub reference | Complex APIs, interfaces, full implementations | + +Code stubs are annotated TypeScript files with `throw new Error("not yet implemented")`, located in `delivery-process/stubs/{pattern-name}/`. + +### Valid Rich Content + +| Content Type | Syntax | Appears in Docs | +| ------------- | ----------------------- | ---------------- | +| Plain text | Regular paragraphs | Yes | +| Bold/emphasis | `**bold**`, `*italic*` | Yes | +| Tables | Markdown pipe tables | Yes | +| Lists | `- item` or `1. item` | Yes | +| DocStrings | `"""typescript`...`"""` | Yes (code block) | +| Comments | `# comment` | No (ignored) | + +--- + +## Syntax Notes and Gotchas + +### Forbidden in Feature Descriptions + +| Forbidden | Why | Alternative | +| ----------------------------- | -------------------------------- | ----------------------------------- | +| Code fences (triple backtick) | Not Gherkin syntax | Use DocStrings with lang hint | +| `@prefix` in free text | Interpreted as Gherkin tag | Remove `@` or use `libar-dev` | +| Nested DocStrings | Gherkin parser error | Reference code stub file | +| `#` at line start | Gherkin comment -- kills parsing | Remove, use `//`, or step DocString | + +### Tag Value Constraints + +**Tag values cannot contain spaces.** Use hyphens: + +| Invalid | Valid | +| -------------------------------- | ------------------------------- | +| `@unlock-reason:Fix for issue` | `@unlock-reason:Fix-for-issue` | +| `@libar-docs-pattern:My Pattern` | `@libar-docs-pattern:MyPattern` | + +For values with spaces, use the `quoted-value` format where supported: + +```gherkin +@libar-docs-usecase "When handling command failures" +``` + +--- + +## Quick Reference + +| Element | Use For | Example | +| -------------------- | -------------------------------------- | ----------------------------------- | +| Background DataTable | Deliverables, shared reference data | Deliverables table in roadmap specs | +| Rule: | Group scenarios by business constraint | Invariant + Rationale + Verified by | +| Scenario Outline | Same pattern with variations | Examples tables with multiple rows | +| DocString `"""` | Code examples, content with pipes | TypeScript/Gherkin code blocks | +| Section comments `#` | Organize large feature files | `# ========= Section ==========` | + +--- diff --git a/docs-live/reference/PROCESS-GUARD-REFERENCE.md b/docs-live/reference/PROCESS-GUARD-REFERENCE.md index eabc5ade..a46a4925 100644 --- a/docs-live/reference/PROCESS-GUARD-REFERENCE.md +++ b/docs-live/reference/PROCESS-GUARD-REFERENCE.md @@ -5,6 +5,81 @@ --- +## Quick Reference + +### Protection Levels + +| Status | Level | Allowed | Blocked | +| ----------- | ----- | -------------------------- | ------------------------------------- | +| `roadmap` | none | Full editing | - | +| `deferred` | none | Full editing | - | +| `active` | scope | Edit existing deliverables | Adding new deliverables | +| `completed` | hard | Nothing | Any change without `@*-unlock-reason` | + +### Valid Transitions + +| From | To | Notes | +| ----------- | ---------------------- | -------------------------------- | +| `roadmap` | `active`, `deferred` | Start work or postpone | +| `active` | `completed`, `roadmap` | Finish or regress if blocked | +| `deferred` | `roadmap` | Resume planning | +| `completed` | _(none)_ | Terminal -- use unlock to modify | + +### Escape Hatches + +| Situation | Solution | Example | +| ----------------------------- | ---------------------------------- | --------------------------------------------- | +| Fix bug in completed spec | Add `@*-unlock-reason:'reason'` | `@libar-docs-unlock-reason:'Fix typo'` | +| Modify outside session scope | `--ignore-session` flag | `lint-process --staged --ignore-session` | +| CI treats warnings as errors | `--strict` flag | `lint-process --all --strict` | +| Skip workflow (legacy import) | Multiple transitions in one commit | Set `roadmap` then `completed` in same commit | + +--- + +## CLI Usage + +```bash +lint-process [options] +``` + +### Modes + +| Flag | Description | Use Case | +| ---------- | --------------------------------- | ------------------ | +| `--staged` | Validate staged changes (default) | Pre-commit hooks | +| `--all` | Validate all changes vs main | CI/CD pipelines | +| `--files` | Validate specific files | Development checks | + +### Options + +| Flag | Description | +| ------------------- | -------------------------------------- | +| `--strict` | Treat warnings as errors (exit 1) | +| `--ignore-session` | Skip session scope rules | +| `--show-state` | Debug: show derived process state | +| `--format json` | Machine-readable output | +| `-f, --file ` | Specific file to validate (repeatable) | +| `-b, --base-dir` | Base directory for file resolution | + +### Exit Codes + +| Code | Meaning | +| ---- | -------------------------------------------- | +| `0` | No errors (warnings allowed unless --strict) | +| `1` | Errors found | + +### Examples + +```bash +lint-process --staged # Pre-commit hook (recommended) +lint-process --all --strict # CI pipeline with strict mode +lint-process --file specs/my-feature.feature # Validate specific file +lint-process --staged --show-state # Debug: see derived state +lint-process --staged --ignore-session # Override session scope +``` + +--- + ## Pre-commit Setup Configure Process Guard as a pre-commit hook using Husky. @@ -27,6 +102,8 @@ npx lint-process --staged } ``` +--- + ## Programmatic API Use Process Guard programmatically for custom validation workflows. @@ -71,6 +148,8 @@ if (hasErrors(result)) { | Results | hasErrors(result) | Check for blocking errors | | Results | summarizeResult(result) | Human-readable summary | +--- + ## Architecture Process Guard uses the Decider pattern: pure functions with no I/O. diff --git a/docs-live/reference/REFERENCE-SAMPLE.md b/docs-live/reference/REFERENCE-SAMPLE.md index 9a477e7b..29f24c16 100644 --- a/docs-live/reference/REFERENCE-SAMPLE.md +++ b/docs-live/reference/REFERENCE-SAMPLE.md @@ -242,18 +242,33 @@ Scoped architecture diagram showing component relationships: ```mermaid classDiagram + class GitModule { + +getChangedFilesList const + } + class GitHelpers { + } + class GitBranchDiff { + } class SourceMapper { <> } class Documentation_Generation_Orchestrator { <> } + class TransformTypes { + } class TransformDataset { <> } class SequenceTransformUtils { <> } + class RelationshipResolver { + <> + } + class ContextInferenceImpl { + +ContextInferenceRule interface + } class ProcessApiReferenceGenerator { } class DesignReviewGenerator { @@ -268,20 +283,27 @@ classDiagram class Pattern_Scanner class GherkinASTParser class ShapeExtractor + class PatternHelpers class DesignReviewCodec class DecisionDocCodec class ProcessApiHybridGeneration class PatternRelationshipModel class DesignReviewGeneration class CliRecipeCodec + class ContextInference + GitModule ..> GitBranchDiff : uses + GitModule ..> GitHelpers : uses SourceMapper ..> DecisionDocCodec : depends on SourceMapper ..> ShapeExtractor : depends on SourceMapper ..> GherkinASTParser : depends on Documentation_Generation_Orchestrator ..> Pattern_Scanner : uses + TransformTypes ..> MasterDataset : uses TransformDataset ..> MasterDataset : uses TransformDataset ..|> PatternRelationshipModel : implements SequenceTransformUtils ..> MasterDataset : uses SequenceTransformUtils ..|> DesignReviewGeneration : implements + RelationshipResolver ..> PatternHelpers : uses + ContextInferenceImpl ..|> ContextInference : implements ProcessApiReferenceGenerator ..|> ProcessApiHybridGeneration : implements DesignReviewGenerator ..> DesignReviewCodec : uses DesignReviewGenerator ..> MasterDataset : uses @@ -397,7 +419,6 @@ graph LR DataAPIOutputShaping["DataAPIOutputShaping"]:::neighbor DataAPIArchitectureQueries["DataAPIArchitectureQueries"]:::neighbor end - TagRegistryBuilder ..->|implements| TypeScriptTaxonomyImplementation loadPreambleFromMarkdown___Shared_Markdown_to_SectionBlock_Parser ..->|implements| ProceduralGuideCodec ProjectConfigTypes -->|uses| ConfigurationTypes ProjectConfigTypes -->|uses| ConfigurationPresets @@ -407,6 +428,7 @@ graph LR ArchQueriesImpl -->|uses| ProcessStateAPI ArchQueriesImpl -->|uses| MasterDataset ArchQueriesImpl ..->|implements| DataAPIArchitectureQueries + TagRegistryBuilder ..->|implements| TypeScriptTaxonomyImplementation FSMTransitions ..->|implements| PhaseStateMachineValidation FSMStates ..->|implements| PhaseStateMachineValidation ProcessStateAPI -->|uses| MasterDataset @@ -419,6 +441,21 @@ graph LR ## API Types +### SectionBlock (type) + +```typescript +type SectionBlock = + | HeadingBlock + | ParagraphBlock + | SeparatorBlock + | TableBlock + | ListBlock + | CodeBlock + | MermaidBlock + | CollapsibleBlock + | LinkOutBlock; +``` + ### normalizeStatus (function) ````typescript @@ -512,21 +549,6 @@ interface CategoryDefinition { | description | Brief description of the category's purpose and typical patterns | | aliases | Alternative tag names that map to this category (e.g., "es" for "event-sourcing") | -### SectionBlock (type) - -```typescript -type SectionBlock = - | HeadingBlock - | ParagraphBlock - | SeparatorBlock - | TableBlock - | ListBlock - | CodeBlock - | MermaidBlock - | CollapsibleBlock - | LinkOutBlock; -``` - --- ## Behavior Specifications diff --git a/docs-live/reference/VALIDATION-TOOLS-GUIDE.md b/docs-live/reference/VALIDATION-TOOLS-GUIDE.md new file mode 100644 index 00000000..3156b7cf --- /dev/null +++ b/docs-live/reference/VALIDATION-TOOLS-GUIDE.md @@ -0,0 +1,263 @@ +# Validation Tools Guide + +**Purpose:** Reference document: Validation Tools Guide +**Detail Level:** Full reference + +--- + +## Which Command Do I Run? + +```text +Need to check annotation quality? + Yes -> lint-patterns + +Need to check vitest-cucumber compatibility? + Yes -> lint-steps + +Need FSM workflow validation? + Yes -> lint-process + +Need cross-source or DoD validation? + Yes -> validate-patterns + +Running pre-commit hook? + lint-process --staged (default) +``` + +## Command Summary + +| Command | Purpose | When to Use | +| ------------------- | --------------------------------- | --------------------------------------------- | +| `lint-patterns` | Annotation quality | Ensure patterns have required tags | +| `lint-steps` | vitest-cucumber compatibility | After writing/modifying feature or step files | +| `lint-process` | FSM workflow enforcement | Pre-commit hooks, CI pipelines | +| `validate-patterns` | Cross-source + DoD + anti-pattern | Release validation, comprehensive | + +--- + +## lint-patterns + +Validates `@-*` annotation quality in TypeScript files. + +```bash +npx lint-patterns -i "src/**/*.ts" +npx lint-patterns -i "src/**/*.ts" --strict # CI +``` + +### CLI Flags + +| Flag | Short | Description | Default | +| ------------------------ | ----- | ----------------------------------- | -------- | +| `--input ` | `-i` | Glob pattern (required, repeatable) | required | +| `--exclude ` | `-e` | Exclude pattern (repeatable) | - | +| `--base-dir ` | `-b` | Base directory | cwd | +| `--strict` | | Treat warnings as errors | false | +| `--format ` | `-f` | Output: `pretty` or `json` | `pretty` | +| `--quiet` | `-q` | Only show errors | false | +| `--min-severity ` | | `error`, `warning`, `info` | - | + +### Rules + +| Rule | Severity | What It Checks | +| -------------------------------- | -------- | -------------------------------------------------- | +| `missing-pattern-name` | error | Must have `@-pattern` | +| `invalid-status` | error | Status must be valid FSM value | +| `tautological-description` | error | Description cannot just repeat name | +| `pattern-conflict-in-implements` | error | Pattern cannot implement itself (circular ref) | +| `missing-relationship-target` | warning | Relationship targets must reference known patterns | +| `missing-status` | warning | Should have status tag | +| `missing-when-to-use` | warning | Should have "When to Use" section | +| `missing-relationships` | info | Consider adding uses/used-by | + +--- + +## lint-steps + +Static analyzer for vitest-cucumber feature/step compatibility. Catches mismatches that cause cryptic runtime failures. + +```bash +pnpm lint:steps # Standard check +pnpm lint:steps --strict # CI +``` + +12 rules across 3 categories (9 error, 3 warning). + +### Feature File Rules + +| Rule ID | Severity | What It Catches | +| ------------------------ | -------- | ------------------------------------------------------------------------- | +| `hash-in-description` | error | `#` at line start inside `"""` block in description -- terminates parsing | +| `keyword-in-description` | error | Description line starting with Given/When/Then/And/But -- breaks parser | +| `duplicate-and-step` | error | Multiple `And` steps with identical text in same scenario | +| `dollar-in-step-text` | warning | `$` in step text (outside quotes) causes matching issues | +| `hash-in-step-text` | warning | Mid-line `#` in step text (outside quotes) silently truncates the step | + +### Step Definition Rules + +| Rule ID | Severity | What It Catches | +| ------------------------- | -------- | ----------------------------------------------------------- | +| `regex-step-pattern` | error | Regex pattern in step registration -- use string patterns | +| `unsupported-phrase-type` | error | `{phrase}` in step string -- use `{string}` instead | +| `repeated-step-pattern` | error | Same pattern registered twice -- second silently overwrites | + +### Cross-File Rules + +| Rule ID | Severity | What It Catches | +| ---------------------------------- | -------- | -------------------------------------------------------------------- | +| `scenario-outline-function-params` | error | Function params in ScenarioOutline callback (should use variables) | +| `missing-and-destructuring` | error | Feature has `And` steps but step file does not destructure `And` | +| `missing-rule-wrapper` | error | Feature has `Rule:` blocks but step file does not destructure `Rule` | +| `outline-quoted-values` | warning | Quoted values in Outline steps instead of `` syntax | + +### CLI Reference + +| Flag | Short | Description | Default | +| ------------------ | ----- | -------------------------- | -------- | +| `--strict` | | Treat warnings as errors | false | +| `--format ` | | Output: `pretty` or `json` | `pretty` | +| `--base-dir ` | `-b` | Base directory for paths | cwd | + +--- + +## lint-process + +FSM validation for delivery workflow. Enforces status transitions and protection levels. + +```bash +npx lint-process --staged # Pre-commit (default) +npx lint-process --all --strict # CI pipeline +``` + +**What it validates:** + +- Status transitions follow FSM (`roadmap` -> `active` -> `completed`) +- Completed specs require unlock reason to modify +- Active specs cannot add new deliverables (scope protection) +- Session scope rules (optional) + +For detailed rules, escape hatches, and error fixes, see the [Process Guard Reference](PROCESS-GUARD-REFERENCE.md). + +--- + +## validate-patterns + +Cross-source validator combining multiple checks. + +```bash +npx validate-patterns \ + -i "src/**/*.ts" \ + -F "specs/**/*.feature" \ + --dod \ + --anti-patterns +``` + +### CLI Flags + +| Flag | Short | Description | Default | +| ----------------- | ----- | ------------------------------------------------ | -------- | +| `--input` | `-i` | Glob for TypeScript files (required, repeatable) | required | +| `--features` | `-F` | Glob for Gherkin files (required, repeatable) | required | +| `--exclude` | `-e` | Exclude pattern (repeatable) | - | +| `--base-dir` | `-b` | Base directory | cwd | +| `--strict` | | Treat warnings as errors (exit 2) | false | +| `--verbose` | | Show info-level messages | false | +| `--format` | `-f` | Output: `pretty` or `json` | `pretty` | +| `--dod` | | Enable Definition of Done validation | false | +| `--anti-patterns` | | Enable anti-pattern detection | false | + +### Anti-Pattern Detection + +Detects process metadata tags that belong in feature files but appear in TypeScript code: + +| Tag Suffix (Feature-Only) | What It Tracks | +| ------------------------- | -------------------- | +| `@-quarter` | Timeline metadata | +| `@-team` | Ownership metadata | +| `@-effort` | Estimation metadata | +| `@-completed` | Completion timestamp | + +Additional checks: + +| ID | Severity | What It Detects | +| ----------------- | -------- | ----------------------------------- | +| `process-in-code` | error | Feature-only tags found in TS code | +| `magic-comments` | warning | Generator hints in feature files | +| `scenario-bloat` | warning | Too many scenarios per feature file | +| `mega-feature` | warning | Feature file exceeds line threshold | + +### DoD Validation + +For patterns with `completed` status, checks: + +- All deliverables are in a terminal state (`complete`, `n/a`, or `superseded`) +- At least one `@acceptance-criteria` scenario exists in the spec + +--- + +## CI/CD Integration + +### Recommended package.json Scripts + +```json +{ + "scripts": { + "lint:patterns": "lint-patterns -i 'src/**/*.ts'", + "lint:steps": "lint-steps", + "lint:steps:ci": "lint-steps --strict", + "lint:process": "lint-process --staged", + "lint:process:ci": "lint-process --all --strict", + "validate:all": "validate-patterns -i 'src/**/*.ts' -F 'specs/**/*.feature' --dod --anti-patterns" + } +} +``` + +### Pre-commit Hook + +```bash +npx lint-process --staged +``` + +### GitHub Actions + +```yaml +- name: Lint annotations + run: npx lint-patterns -i "src/**/*.ts" --strict + +- name: Lint steps + run: npx lint-steps --strict + +- name: Validate patterns + run: npx validate-patterns -i "src/**/*.ts" -F "specs/**/*.feature" --dod --anti-patterns +``` + +--- + +## Exit Codes + +| Code | lint-patterns / lint-steps / lint-process | validate-patterns | +| ---- | -------------------------------------------- | ----------------------------------- | +| `0` | No errors (warnings allowed unless --strict) | No issues found | +| `1` | Errors found (or warnings with --strict) | Errors found | +| `2` | -- | Warnings found (with --strict only) | + +--- + +## Programmatic API + +All validation tools expose programmatic APIs: + +```typescript +// Pattern linting +import { lintFiles, hasFailures } from '@libar-dev/delivery-process/lint'; + +// Step linting +import { runStepLint, STEP_LINT_RULES } from '@libar-dev/delivery-process/lint'; + +// Process guard +import { deriveProcessState, validateChanges } from '@libar-dev/delivery-process/lint'; + +// Anti-patterns and DoD +import { detectAntiPatterns, validateDoD } from '@libar-dev/delivery-process/validation'; +``` + +--- diff --git a/docs-sources/annotation-guide.md b/docs-sources/annotation-guide.md index 7d0898ce..31b5d270 100644 --- a/docs-sources/annotation-guide.md +++ b/docs-sources/annotation-guide.md @@ -102,6 +102,93 @@ For Zod files, extract the **schema constant** (with `Schema` suffix), not the i --- +## Annotation Patterns by File Type + +### Zod Schema Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern MasterDataset + * @libar-docs-status completed + * @libar-docs-extract-shapes MasterDatasetSchema, StatusGroupsSchema, PhaseGroupSchema + */ +``` + +### Interface / Type Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern DocumentGenerator + * @libar-docs-status completed + * @libar-docs-extract-shapes DocumentGenerator, GeneratorContext, GeneratorOutput + */ +``` + +### Function / Service Files + +```typescript +/** + * @libar-docs + * @libar-docs-pattern TransformDataset + * @libar-docs-status completed + * @libar-docs-arch-context generator + * @libar-docs-arch-layer application + * @libar-docs-extract-shapes transformToMasterDataset, RuntimeMasterDataset + */ +``` + +### Gherkin Feature Files + +```gherkin +@libar-docs +@libar-docs-pattern:ProcessGuardLinter +@libar-docs-status:roadmap +@libar-docs-phase:99 +@libar-docs-depends-on:StateMachine,ValidationRules +Feature: Process Guard Linter + + Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | + | State derivation | Pending | src/lint/derive.ts | + + Rule: Completed specs require unlock reason + + **Invariant:** A completed spec cannot be modified without explicit unlock. + **Rationale:** Prevents accidental regression of validated work. + + @acceptance-criteria @happy-path + Scenario: Reject modification without unlock + Given a spec with status "completed" + When I modify a deliverable + Then validation fails with "completed-protection" +``` + +--- + +## Tag Groups Quick Reference + +Tags are organized into 12 functional groups. For the complete reference with all values, see the generated [Taxonomy Reference](../docs-live/TAXONOMY.md). + +| Group | Tags (representative) | Format Types | +| ---------------- | ---------------------------------------------------- | ------------------------- | +| **Core** | `pattern`, `status`, `core`, `brief` | value, enum, flag | +| **Relationship** | `uses`, `used-by`, `implements`, `depends-on` | csv, value | +| **Process** | `phase`, `quarter`, `effort`, `team`, `priority` | number, value, enum | +| **PRD** | `product-area`, `user-role`, `business-value` | value | +| **ADR** | `adr`, `adr-status`, `adr-category`, `adr-theme` | value, enum | +| **Hierarchy** | `level`, `parent`, `title` | enum, value, quoted-value | +| **Traceability** | `executable-specs`, `roadmap-spec`, `behavior-file` | csv, value | +| **Discovery** | `discovered-gap`, `discovered-improvement` | value (repeatable) | +| **Architecture** | `arch-role`, `arch-context`, `arch-layer`, `include` | enum, value, csv | +| **Extraction** | `extract-shapes`, `shape` | csv, value | +| **Stub** | `target`, `since` | value | +| **Convention** | `convention` | csv (enum values) | + +--- + ## Verification ### CLI Commands diff --git a/docs-sources/configuration-guide.md b/docs-sources/configuration-guide.md new file mode 100644 index 00000000..7db4889b --- /dev/null +++ b/docs-sources/configuration-guide.md @@ -0,0 +1,244 @@ +## Quick Reference + +| Preset | Tag Prefix | Categories | Use Case | +| ----------------------------- | -------------- | ---------- | ------------------------------------ | +| **`libar-generic`** (default) | `@libar-docs-` | 3 | Simple projects (this package) | +| `generic` | `@docs-` | 3 | Simple projects with `@docs-` prefix | +| `ddd-es-cqrs` | `@libar-docs-` | 21 | DDD/Event Sourcing architectures | + +```typescript +// delivery-process.config.ts +import { defineConfig } from '@libar-dev/delivery-process/config'; + +// Default: libar-generic preset (simple 3-category taxonomy) +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + features: ['specs/*.feature'], + }, + output: { directory: 'docs-generated' }, +}); +``` + +--- + +## Preset Selection + +### When to Use Each Preset + +| Preset | Use When | Categories | +| --------------- | ------------------------------------------------------------ | ---------------------------------------------------------------------------------------- | +| `libar-generic` | Simple projects, standard `@libar-docs-` prefix | 3 (core, api, infra) | +| `generic` | Prefer shorter `@docs-` prefix | 3 (core, api, infra) | +| `ddd-es-cqrs` | DDD architecture with bounded contexts, event sourcing, CQRS | 21 (domain, ddd, bounded-context, event-sourcing, decider, cqrs, saga, projection, etc.) | + +**Design decision:** Presets **replace** the base taxonomy categories entirely (not merged). If you need DDD categories, use the `ddd-es-cqrs` preset. + +### Default Preset Selection + +All entry points default to `libar-generic`: + +| Entry Point | Default Preset | Context | +| ------------------------------ | ------------------------------ | -------------------------------- | +| `defineConfig()` | `libar-generic` (3 categories) | Config file | +| `loadProjectConfig()` fallback | `libar-generic` (3 categories) | CLI tools (no config file found) | +| This package's config file | `libar-generic` (3 categories) | Standalone package usage | + +--- + +## Unified Config File + +The `defineConfig()` function centralizes taxonomy, sources, output, and generator overrides in a single `delivery-process.config.ts` file. CLI tools discover this file automatically. + +### Discovery Order + +1. Current directory: check `delivery-process.config.ts`, then `.js` +2. Walk up to repo root (`.git` folder), checking each directory +3. Fall back to libar-generic preset (3 categories, `@libar-docs-` prefix) + +### Config File Format + +```typescript +// delivery-process.config.ts +import { defineConfig } from '@libar-dev/delivery-process/config'; + +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + stubs: ['delivery-process/stubs/**/*.ts'], + features: ['delivery-process/specs/*.feature'], + }, + output: { + directory: 'docs-generated', + overwrite: true, + }, +}); +``` + +### Sources Configuration + +| Field | Type | Description | +| ------------ | ---------- | ---------------------------------------------------- | +| `typescript` | `string[]` | Glob patterns for TypeScript source files (required) | +| `features` | `string[]` | Glob patterns for Gherkin feature files | +| `stubs` | `string[]` | Glob patterns for design stub files | +| `exclude` | `string[]` | Glob patterns to exclude from all scanning | + +Stubs are merged into TypeScript sources at resolution time. No parent directory traversal (`..`) is allowed in globs. + +### Output Configuration + +| Field | Type | Default | Description | +| ----------- | --------- | --------------------- | ----------------------------------- | +| `directory` | `string` | `'docs/architecture'` | Output directory for generated docs | +| `overwrite` | `boolean` | `false` | Overwrite existing files | + +### Generator Overrides + +Some generators need different sources than the base config. Use `generatorOverrides` for per-generator customization: + +```typescript +export default defineConfig({ + preset: 'libar-generic', + sources: { + typescript: ['src/**/*.ts'], + features: ['delivery-process/specs/*.feature'], + }, + output: { directory: 'docs-generated', overwrite: true }, + generatorOverrides: { + changelog: { + additionalFeatures: ['delivery-process/decisions/*.feature'], + }, + 'doc-from-decision': { + replaceFeatures: ['delivery-process/decisions/*.feature'], + }, + }, +}); +``` + +| Override Field | Description | +| -------------------- | ---------------------------------------------------- | +| `additionalFeatures` | Feature globs appended to base features | +| `additionalInput` | TypeScript globs appended to base TypeScript sources | +| `replaceFeatures` | Feature globs used INSTEAD of base features | +| `outputDirectory` | Override output directory for this generator | + +**Constraint:** `replaceFeatures` and `additionalFeatures` are mutually exclusive when both are non-empty. + +--- + +## Monorepo Setup + +``` +my-monorepo/ + delivery-process.config.ts # Repo-level: ddd-es-cqrs + packages/ + my-package/ + delivery-process.config.ts # Package-level: generic +``` + +CLI tools use the nearest config file to the working directory. Each package can have its own preset and source globs. + +--- + +## Custom Configuration + +### Custom Tag Prefix + +Keep a preset's taxonomy but change the prefix: + +```typescript +export default defineConfig({ + preset: 'libar-generic', + tagPrefix: '@team-', + fileOptInTag: '@team', + sources: { typescript: ['src/**/*.ts'] }, +}); + +// Your annotations: +// /** @team */ +// /** @team-pattern DualSourceExtractor */ +// /** @team-core */ +``` + +### Custom Categories + +Define your own taxonomy: + +```typescript +export default defineConfig({ + tagPrefix: '@docs-', + fileOptInTag: '@docs', + categories: [ + { tag: 'scanner', domain: 'Scanner', priority: 1, description: 'File scanning', aliases: [] }, + { + tag: 'extractor', + domain: 'Extractor', + priority: 2, + description: 'Pattern extraction', + aliases: [], + }, + { + tag: 'generator', + domain: 'Generator', + priority: 3, + description: 'Doc generation', + aliases: [], + }, + ], + sources: { typescript: ['src/**/*.ts'] }, +}); +``` + +--- + +## Programmatic Config Loading + +For tools that need to load configuration files: + +```typescript +import { loadProjectConfig } from '@libar-dev/delivery-process/config'; + +const result = await loadProjectConfig(process.cwd()); + +if (!result.ok) { + console.error(result.error.message); + process.exit(1); +} + +const resolved = result.value; +// resolved.instance - DeliveryProcessInstance (registry + regexBuilders) +// resolved.project - ResolvedProjectConfig (sources, output, generators) +// resolved.isDefault - true if no config file found +// resolved.configPath - config file path (if found) +``` + +For per-generator source resolution: + +```typescript +import { mergeSourcesForGenerator } from '@libar-dev/delivery-process/config'; + +const effectiveSources = mergeSourcesForGenerator( + resolved.project.sources, + 'changelog', + resolved.project.generatorOverrides +); +// effectiveSources.typescript - merged TypeScript globs +// effectiveSources.features - merged or replaced feature globs +``` + +--- + +## Backward Compatibility + +The legacy `createDeliveryProcess()` API is still exported and supported. Config files using the old format are detected automatically by `loadProjectConfig()` and wrapped in a `ResolvedConfig` with default project settings. + +```typescript +// Legacy format (still works) +import { createDeliveryProcess } from '@libar-dev/delivery-process'; +export default createDeliveryProcess({ preset: 'ddd-es-cqrs' }); +``` + +New projects should use `defineConfig()` for the unified configuration experience. diff --git a/docs-sources/gherkin-patterns.md b/docs-sources/gherkin-patterns.md new file mode 100644 index 00000000..f10f6e82 --- /dev/null +++ b/docs-sources/gherkin-patterns.md @@ -0,0 +1,261 @@ +## Essential Patterns + +### Roadmap Spec Structure + +Roadmap specs define planned work with Problem/Solution descriptions and a Background deliverables table. + +```gherkin +@libar-docs +@libar-docs-pattern:ProcessGuardLinter +@libar-docs-status:roadmap +@libar-docs-phase:99 +Feature: Process Guard Linter + + **Problem:** + During planning and implementation sessions, accidental modifications occur: + - Specs outside the intended scope get modified in bulk + - Completed/approved work gets inadvertently changed + + **Solution:** + Implement a Decider-based linter that: + 1. Derives process state from existing file annotations + 2. Validates proposed changes against derived state + 3. Enforces file protection levels per PDR-005 + + Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | + | State derivation | Pending | src/lint/process-guard/derive.ts | + | Git diff change detection | Pending | src/lint/process-guard/detect.ts | + | CLI integration | Pending | src/cli/lint-process.ts | +``` + +**Key elements:** + +- `@libar-docs` -- bare opt-in marker (required) +- `@libar-docs-pattern:Name` -- unique identifier (required) +- `@libar-docs-status:roadmap` -- FSM state +- `**Problem:**` / `**Solution:**` -- extracted by generators +- Background deliverables table -- tracks implementation progress + +--- + +### Rule Blocks for Business Constraints + +Use `Rule:` to group related scenarios under a business constraint. + +```gherkin +Rule: Status transitions must follow PDR-005 FSM + + **Invariant:** Only valid FSM transitions are allowed. + + **Rationale:** The FSM enforces deliberate progression through planning, implementation, and completion. + + **Verified by:** Valid transitions pass, Invalid transitions fail + + @happy-path + Scenario Outline: Valid transitions pass validation + Given a file with status "" + When the status changes to "" + Then validation passes + + Examples: + | from | to | + | roadmap | active | + | roadmap | deferred | + | active | completed | + | deferred | roadmap | +``` + +| Element | Purpose | Extracted By | +| ------------------ | --------------------------------------- | ------------------------------------------- | +| `**Invariant:**` | Business constraint (what must be true) | Business Rules generator | +| `**Rationale:**` | Business justification (why it exists) | Business Rules generator | +| `**Verified by:**` | Comma-separated scenario names | Multiple codecs (Business Rules, Reference) | + +--- + +### Scenario Outline for Variations + +When the same pattern applies with different inputs, use `Scenario Outline` with an `Examples` table: + +```gherkin +Scenario Outline: Protection levels by status + Given a file with status "" + When checking protection level + Then protection is "" + And unlock required is "" + + Examples: + | status | protection | unlock | + | roadmap | none | no | + | active | scope | no | + | completed | hard | yes | + | deferred | none | no | +``` + +--- + +### Executable Test Features + +Test features focus on behavior verification with section dividers for organization. + +```gherkin +@behavior @scanner-core +@libar-docs-pattern:ScannerCore +Feature: Scanner Core Integration + + Background: + Given a scanner integration context with temp directory + + @happy-path + Scenario: Scan files and extract directives + Given a file "src/auth.ts" with valid content + When scanning with pattern "src/**/*.ts" + Then the scan should succeed with 1 file +``` + +Section comments (`# ====`) improve readability in large feature files. + +--- + +## DataTable and DocString Usage + +### Background DataTable (Reference Data) + +Use for data that applies to all scenarios -- deliverables, definitions, etc. + +```gherkin +Background: Deliverables + Given the following deliverables: + | Deliverable | Status | Location | Tests | + | Category types | Done | src/types.ts | Yes | + | Validation logic | Pending | src/validate.ts | Yes | +``` + +### Scenario DataTable (Test Data) + +Use for scenario-specific test inputs. + +```gherkin +Scenario: Session file defines modification scope + Given a session file with in-scope specs: + | spec | intent | + | mvp-workflow-implementation | modify | + | short-form-tag-migration | review | + When deriving process state + Then "mvp-workflow-implementation" is modifiable +``` + +### DocString for Code Examples + +Use `"""typescript` for code blocks. Essential when content contains pipes or special characters. + +```gherkin +Scenario: Extract directive from TypeScript + Given a file with content: + """typescript + /** @libar-docs */ + export function authenticate() {} + """ + When scanning the file + Then directive should have tag "@libar-docs-core" +``` + +--- + +## Tag Conventions + +### Semantic Tags (Extracted by Generators) + +| Tag | Purpose | +| ---------------------- | ------------------------------------------------- | +| `@acceptance-criteria` | Required for DoD validation of completed patterns | +| `@happy-path` | Primary success scenario | +| `@validation` | Input validation, constraint checks | +| `@business-rule` | Business invariant verification | +| `@business-failure` | Expected business failure scenario | +| `@edge-case` | Boundary conditions, unusual inputs | +| `@error-handling` | Error recovery, graceful degradation | + +--- + +## Feature Description Patterns + +Choose headers that fit your pattern: + +| Structure | Headers | Best For | +| ---------------- | ------------------------------------------ | ------------------------- | +| Problem/Solution | `**Problem:**`, `**Solution:**` | Pain point to fix | +| Value-First | `**Business Value:**`, `**How It Works:**` | TDD-style, Gherkin spirit | +| Context/Approach | `**Context:**`, `**Approach:**` | Technical patterns | + +The **Problem/Solution** pattern is the dominant style in this codebase. + +--- + +## Feature File Rich Content + +Feature files serve dual purposes: **executable specs** and **documentation source**. Content in the Feature description section appears in generated docs. + +### Code-First Principle + +**Prefer code stubs over DocStrings for complex examples.** Feature files should reference code, not duplicate it. + +| Approach | When to Use | +| ---------------------------- | ------------------------------------------------------------ | +| DocStrings (`"""typescript`) | Brief examples (5-10 lines), current/target state comparison | +| Code stub reference | Complex APIs, interfaces, full implementations | + +Code stubs are annotated TypeScript files with `throw new Error("not yet implemented")`, located in `delivery-process/stubs/{pattern-name}/`. + +### Valid Rich Content + +| Content Type | Syntax | Appears in Docs | +| ------------- | ----------------------- | ---------------- | +| Plain text | Regular paragraphs | Yes | +| Bold/emphasis | `**bold**`, `*italic*` | Yes | +| Tables | Markdown pipe tables | Yes | +| Lists | `- item` or `1. item` | Yes | +| DocStrings | `"""typescript`...`"""` | Yes (code block) | +| Comments | `# comment` | No (ignored) | + +--- + +## Syntax Notes and Gotchas + +### Forbidden in Feature Descriptions + +| Forbidden | Why | Alternative | +| ----------------------------- | -------------------------------- | ----------------------------------- | +| Code fences (triple backtick) | Not Gherkin syntax | Use DocStrings with lang hint | +| `@prefix` in free text | Interpreted as Gherkin tag | Remove `@` or use `libar-dev` | +| Nested DocStrings | Gherkin parser error | Reference code stub file | +| `#` at line start | Gherkin comment -- kills parsing | Remove, use `//`, or step DocString | + +### Tag Value Constraints + +**Tag values cannot contain spaces.** Use hyphens: + +| Invalid | Valid | +| -------------------------------- | ------------------------------- | +| `@unlock-reason:Fix for issue` | `@unlock-reason:Fix-for-issue` | +| `@libar-docs-pattern:My Pattern` | `@libar-docs-pattern:MyPattern` | + +For values with spaces, use the `quoted-value` format where supported: + +```gherkin +@libar-docs-usecase "When handling command failures" +``` + +--- + +## Quick Reference + +| Element | Use For | Example | +| -------------------- | -------------------------------------- | ----------------------------------- | +| Background DataTable | Deliverables, shared reference data | Deliverables table in roadmap specs | +| Rule: | Group scenarios by business constraint | Invariant + Rationale + Verified by | +| Scenario Outline | Same pattern with variations | Examples tables with multiple rows | +| DocString `"""` | Code examples, content with pipes | TypeScript/Gherkin code blocks | +| Section comments `#` | Organize large feature files | `# ========= Section ==========` | diff --git a/docs-sources/process-guard.md b/docs-sources/process-guard.md new file mode 100644 index 00000000..880ee0fd --- /dev/null +++ b/docs-sources/process-guard.md @@ -0,0 +1,155 @@ +## Quick Reference + +### Protection Levels + +| Status | Level | Allowed | Blocked | +| ----------- | ----- | -------------------------- | ------------------------------------- | +| `roadmap` | none | Full editing | - | +| `deferred` | none | Full editing | - | +| `active` | scope | Edit existing deliverables | Adding new deliverables | +| `completed` | hard | Nothing | Any change without `@*-unlock-reason` | + +### Valid Transitions + +| From | To | Notes | +| ----------- | ---------------------- | -------------------------------- | +| `roadmap` | `active`, `deferred` | Start work or postpone | +| `active` | `completed`, `roadmap` | Finish or regress if blocked | +| `deferred` | `roadmap` | Resume planning | +| `completed` | _(none)_ | Terminal -- use unlock to modify | + +### Escape Hatches + +| Situation | Solution | Example | +| ----------------------------- | ---------------------------------- | --------------------------------------------- | +| Fix bug in completed spec | Add `@*-unlock-reason:'reason'` | `@libar-docs-unlock-reason:'Fix typo'` | +| Modify outside session scope | `--ignore-session` flag | `lint-process --staged --ignore-session` | +| CI treats warnings as errors | `--strict` flag | `lint-process --all --strict` | +| Skip workflow (legacy import) | Multiple transitions in one commit | Set `roadmap` then `completed` in same commit | + +--- + +## CLI Usage + +```bash +lint-process [options] +``` + +### Modes + +| Flag | Description | Use Case | +| ---------- | --------------------------------- | ------------------ | +| `--staged` | Validate staged changes (default) | Pre-commit hooks | +| `--all` | Validate all changes vs main | CI/CD pipelines | +| `--files` | Validate specific files | Development checks | + +### Options + +| Flag | Description | +| ------------------- | -------------------------------------- | +| `--strict` | Treat warnings as errors (exit 1) | +| `--ignore-session` | Skip session scope rules | +| `--show-state` | Debug: show derived process state | +| `--format json` | Machine-readable output | +| `-f, --file ` | Specific file to validate (repeatable) | +| `-b, --base-dir` | Base directory for file resolution | + +### Exit Codes + +| Code | Meaning | +| ---- | -------------------------------------------- | +| `0` | No errors (warnings allowed unless --strict) | +| `1` | Errors found | + +### Examples + +```bash +lint-process --staged # Pre-commit hook (recommended) +lint-process --all --strict # CI pipeline with strict mode +lint-process --file specs/my-feature.feature # Validate specific file +lint-process --staged --show-state # Debug: see derived state +lint-process --staged --ignore-session # Override session scope +``` + +--- + +## Pre-commit Setup + +Configure Process Guard as a pre-commit hook using Husky. + +```bash +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npx lint-process --staged +``` + +### package.json Scripts + +```json +{ + "scripts": { + "lint:process": "lint-process --staged", + "lint:process:ci": "lint-process --all --strict" + } +} +``` + +--- + +## Programmatic API + +Use Process Guard programmatically for custom validation workflows. + +```typescript +import { + deriveProcessState, + detectStagedChanges, + validateChanges, + hasErrors, + summarizeResult, +} from '@libar-dev/delivery-process/lint'; + +// 1. Derive state from annotations +const state = (await deriveProcessState({ baseDir: '.' })).value; + +// 2. Detect changes +const changes = detectStagedChanges('.').value; + +// 3. Validate +const { result } = validateChanges({ + state, + changes, + options: { strict: false, ignoreSession: false }, +}); + +// 4. Handle results +if (hasErrors(result)) { + console.log(summarizeResult(result)); + process.exit(1); +} +``` + +### API Functions + +| Category | Function | Description | +| -------- | ------------------------ | --------------------------------- | +| State | deriveProcessState(cfg) | Build state from file annotations | +| Changes | detectStagedChanges(dir) | Parse staged git diff | +| Changes | detectBranchChanges(dir) | Parse all changes vs main | +| Validate | validateChanges(input) | Run all validation rules | +| Results | hasErrors(result) | Check for blocking errors | +| Results | summarizeResult(result) | Human-readable summary | + +--- + +## Architecture + +Process Guard uses the Decider pattern: pure functions with no I/O. + +```mermaid +graph LR + A[deriveProcessState] --> C[validateChanges] + B[detectChanges] --> C + C --> D[ValidationResult] +``` diff --git a/docs-sources/validation-tools-guide.md b/docs-sources/validation-tools-guide.md new file mode 100644 index 00000000..c0d5da69 --- /dev/null +++ b/docs-sources/validation-tools-guide.md @@ -0,0 +1,254 @@ +## Which Command Do I Run? + +```text +Need to check annotation quality? + Yes -> lint-patterns + +Need to check vitest-cucumber compatibility? + Yes -> lint-steps + +Need FSM workflow validation? + Yes -> lint-process + +Need cross-source or DoD validation? + Yes -> validate-patterns + +Running pre-commit hook? + lint-process --staged (default) +``` + +## Command Summary + +| Command | Purpose | When to Use | +| ------------------- | --------------------------------- | --------------------------------------------- | +| `lint-patterns` | Annotation quality | Ensure patterns have required tags | +| `lint-steps` | vitest-cucumber compatibility | After writing/modifying feature or step files | +| `lint-process` | FSM workflow enforcement | Pre-commit hooks, CI pipelines | +| `validate-patterns` | Cross-source + DoD + anti-pattern | Release validation, comprehensive | + +--- + +## lint-patterns + +Validates `@-*` annotation quality in TypeScript files. + +```bash +npx lint-patterns -i "src/**/*.ts" +npx lint-patterns -i "src/**/*.ts" --strict # CI +``` + +### CLI Flags + +| Flag | Short | Description | Default | +| ------------------------ | ----- | ----------------------------------- | -------- | +| `--input ` | `-i` | Glob pattern (required, repeatable) | required | +| `--exclude ` | `-e` | Exclude pattern (repeatable) | - | +| `--base-dir ` | `-b` | Base directory | cwd | +| `--strict` | | Treat warnings as errors | false | +| `--format ` | `-f` | Output: `pretty` or `json` | `pretty` | +| `--quiet` | `-q` | Only show errors | false | +| `--min-severity ` | | `error`, `warning`, `info` | - | + +### Rules + +| Rule | Severity | What It Checks | +| -------------------------------- | -------- | -------------------------------------------------- | +| `missing-pattern-name` | error | Must have `@-pattern` | +| `invalid-status` | error | Status must be valid FSM value | +| `tautological-description` | error | Description cannot just repeat name | +| `pattern-conflict-in-implements` | error | Pattern cannot implement itself (circular ref) | +| `missing-relationship-target` | warning | Relationship targets must reference known patterns | +| `missing-status` | warning | Should have status tag | +| `missing-when-to-use` | warning | Should have "When to Use" section | +| `missing-relationships` | info | Consider adding uses/used-by | + +--- + +## lint-steps + +Static analyzer for vitest-cucumber feature/step compatibility. Catches mismatches that cause cryptic runtime failures. + +```bash +pnpm lint:steps # Standard check +pnpm lint:steps --strict # CI +``` + +12 rules across 3 categories (9 error, 3 warning). + +### Feature File Rules + +| Rule ID | Severity | What It Catches | +| ------------------------ | -------- | ------------------------------------------------------------------------- | +| `hash-in-description` | error | `#` at line start inside `"""` block in description -- terminates parsing | +| `keyword-in-description` | error | Description line starting with Given/When/Then/And/But -- breaks parser | +| `duplicate-and-step` | error | Multiple `And` steps with identical text in same scenario | +| `dollar-in-step-text` | warning | `$` in step text (outside quotes) causes matching issues | +| `hash-in-step-text` | warning | Mid-line `#` in step text (outside quotes) silently truncates the step | + +### Step Definition Rules + +| Rule ID | Severity | What It Catches | +| ------------------------- | -------- | ----------------------------------------------------------- | +| `regex-step-pattern` | error | Regex pattern in step registration -- use string patterns | +| `unsupported-phrase-type` | error | `{phrase}` in step string -- use `{string}` instead | +| `repeated-step-pattern` | error | Same pattern registered twice -- second silently overwrites | + +### Cross-File Rules + +| Rule ID | Severity | What It Catches | +| ---------------------------------- | -------- | -------------------------------------------------------------------- | +| `scenario-outline-function-params` | error | Function params in ScenarioOutline callback (should use variables) | +| `missing-and-destructuring` | error | Feature has `And` steps but step file does not destructure `And` | +| `missing-rule-wrapper` | error | Feature has `Rule:` blocks but step file does not destructure `Rule` | +| `outline-quoted-values` | warning | Quoted values in Outline steps instead of `` syntax | + +### CLI Reference + +| Flag | Short | Description | Default | +| ------------------ | ----- | -------------------------- | -------- | +| `--strict` | | Treat warnings as errors | false | +| `--format ` | | Output: `pretty` or `json` | `pretty` | +| `--base-dir ` | `-b` | Base directory for paths | cwd | + +--- + +## lint-process + +FSM validation for delivery workflow. Enforces status transitions and protection levels. + +```bash +npx lint-process --staged # Pre-commit (default) +npx lint-process --all --strict # CI pipeline +``` + +**What it validates:** + +- Status transitions follow FSM (`roadmap` -> `active` -> `completed`) +- Completed specs require unlock reason to modify +- Active specs cannot add new deliverables (scope protection) +- Session scope rules (optional) + +For detailed rules, escape hatches, and error fixes, see the [Process Guard Reference](PROCESS-GUARD-REFERENCE.md). + +--- + +## validate-patterns + +Cross-source validator combining multiple checks. + +```bash +npx validate-patterns \ + -i "src/**/*.ts" \ + -F "specs/**/*.feature" \ + --dod \ + --anti-patterns +``` + +### CLI Flags + +| Flag | Short | Description | Default | +| ----------------- | ----- | ------------------------------------------------ | -------- | +| `--input` | `-i` | Glob for TypeScript files (required, repeatable) | required | +| `--features` | `-F` | Glob for Gherkin files (required, repeatable) | required | +| `--exclude` | `-e` | Exclude pattern (repeatable) | - | +| `--base-dir` | `-b` | Base directory | cwd | +| `--strict` | | Treat warnings as errors (exit 2) | false | +| `--verbose` | | Show info-level messages | false | +| `--format` | `-f` | Output: `pretty` or `json` | `pretty` | +| `--dod` | | Enable Definition of Done validation | false | +| `--anti-patterns` | | Enable anti-pattern detection | false | + +### Anti-Pattern Detection + +Detects process metadata tags that belong in feature files but appear in TypeScript code: + +| Tag Suffix (Feature-Only) | What It Tracks | +| ------------------------- | -------------------- | +| `@-quarter` | Timeline metadata | +| `@-team` | Ownership metadata | +| `@-effort` | Estimation metadata | +| `@-completed` | Completion timestamp | + +Additional checks: + +| ID | Severity | What It Detects | +| ----------------- | -------- | ----------------------------------- | +| `process-in-code` | error | Feature-only tags found in TS code | +| `magic-comments` | warning | Generator hints in feature files | +| `scenario-bloat` | warning | Too many scenarios per feature file | +| `mega-feature` | warning | Feature file exceeds line threshold | + +### DoD Validation + +For patterns with `completed` status, checks: + +- All deliverables are in a terminal state (`complete`, `n/a`, or `superseded`) +- At least one `@acceptance-criteria` scenario exists in the spec + +--- + +## CI/CD Integration + +### Recommended package.json Scripts + +```json +{ + "scripts": { + "lint:patterns": "lint-patterns -i 'src/**/*.ts'", + "lint:steps": "lint-steps", + "lint:steps:ci": "lint-steps --strict", + "lint:process": "lint-process --staged", + "lint:process:ci": "lint-process --all --strict", + "validate:all": "validate-patterns -i 'src/**/*.ts' -F 'specs/**/*.feature' --dod --anti-patterns" + } +} +``` + +### Pre-commit Hook + +```bash +npx lint-process --staged +``` + +### GitHub Actions + +```yaml +- name: Lint annotations + run: npx lint-patterns -i "src/**/*.ts" --strict + +- name: Lint steps + run: npx lint-steps --strict + +- name: Validate patterns + run: npx validate-patterns -i "src/**/*.ts" -F "specs/**/*.feature" --dod --anti-patterns +``` + +--- + +## Exit Codes + +| Code | lint-patterns / lint-steps / lint-process | validate-patterns | +| ---- | -------------------------------------------- | ----------------------------------- | +| `0` | No errors (warnings allowed unless --strict) | No issues found | +| `1` | Errors found (or warnings with --strict) | Errors found | +| `2` | -- | Warnings found (with --strict only) | + +--- + +## Programmatic API + +All validation tools expose programmatic APIs: + +```typescript +// Pattern linting +import { lintFiles, hasFailures } from '@libar-dev/delivery-process/lint'; + +// Step linting +import { runStepLint, STEP_LINT_RULES } from '@libar-dev/delivery-process/lint'; + +// Process guard +import { deriveProcessState, validateChanges } from '@libar-dev/delivery-process/lint'; + +// Anti-patterns and DoD +import { detectAntiPatterns, validateDoD } from '@libar-dev/delivery-process/validation'; +``` diff --git a/docs/ANNOTATION-GUIDE.md b/docs/ANNOTATION-GUIDE.md index a1609583..23be8da7 100644 --- a/docs/ANNOTATION-GUIDE.md +++ b/docs/ANNOTATION-GUIDE.md @@ -1,8 +1,6 @@ # Annotation Guide -> **Generated Reference Available:** Comprehensive annotation reference with -> tag tables and conventions is generated at `docs-live/reference/ANNOTATION-REFERENCE.md`. -> Run `pnpm docs:all` to regenerate. +> **Deprecated:** This document is superseded by the auto-generated [Annotation Reference Guide](../docs-live/reference/ANNOTATION-REFERENCE.md) which includes all content from this guide plus auto-updated tag tables. This file is preserved for reference only. How to annotate TypeScript and Gherkin files for pattern extraction, documentation generation, and architecture diagrams. diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md index 7e035021..dad5be57 100644 --- a/docs/ARCHITECTURE.md +++ b/docs/ARCHITECTURE.md @@ -1,5 +1,7 @@ # Architecture: @libar-dev/delivery-process +> **Deprecated:** Architecture documentation is now auto-generated across multiple reference docs: [Architecture Diagram](../docs-live/ARCHITECTURE.md), [Architecture Codecs](../docs-live/reference/ARCHITECTURE-CODECS.md), and [Architecture Types](../docs-live/reference/ARCHITECTURE-TYPES.md). This file is preserved for reference only. + > **Code-Driven Documentation Generator with Codec-Based Transformation Pipeline** This document describes the architecture of the `@libar-dev/delivery-process` package, a documentation generator that extracts patterns from TypeScript and Gherkin sources, transforms them through a unified pipeline, and renders them as markdown via typed codecs. diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md index 7286588a..45dd5f22 100644 --- a/docs/CONFIGURATION.md +++ b/docs/CONFIGURATION.md @@ -1,5 +1,7 @@ # Configuration Guide +> **Deprecated:** This document is superseded by the auto-generated [Configuration Guide](../docs-live/reference/CONFIGURATION-GUIDE.md). This file is preserved for reference only. + Configure tag prefixes, presets, sources, output, and custom taxonomies for `@libar-dev/delivery-process`. > **Prerequisites:** See [README.md](../README.md) for installation and basic usage. diff --git a/docs/GHERKIN-PATTERNS.md b/docs/GHERKIN-PATTERNS.md index 8dfb8bba..94097a24 100644 --- a/docs/GHERKIN-PATTERNS.md +++ b/docs/GHERKIN-PATTERNS.md @@ -1,8 +1,8 @@ # Gherkin Patterns Guide -Practical patterns for writing Gherkin specs that work with `delivery-process` generators. +> **Deprecated:** This document is superseded by the auto-generated [Gherkin Authoring Guide](../docs-live/reference/GHERKIN-AUTHORING-GUIDE.md). This file is preserved for reference only. Examples below may be stale and should not be used as templates — refer to the auto-generated guide for current patterns. -> **Tag Reference:** Run `npx generate-tag-taxonomy -o TAG_TAXONOMY.md -f` for the complete tag list. See [TAXONOMY.md](./TAXONOMY.md) for concepts. +Practical patterns for writing Gherkin specs that work with `delivery-process` generators. --- diff --git a/docs/INDEX.md b/docs/INDEX.md index 92617102..e18aa9bd 100644 --- a/docs/INDEX.md +++ b/docs/INDEX.md @@ -1,5 +1,7 @@ # Documentation Index +> **Deprecated:** This document is superseded by the auto-generated [Documentation Index](../docs-live/INDEX.md) which includes live statistics, audience-based navigation, and document roles. This file is preserved for reference only. + **Navigate the full documentation set for `@libar-dev/delivery-process`.** Use section links below for targeted reading. ## Package Metadata diff --git a/docs/METHODOLOGY.md b/docs/METHODOLOGY.md index f81d11c9..3a4b6d75 100644 --- a/docs/METHODOLOGY.md +++ b/docs/METHODOLOGY.md @@ -1,5 +1,7 @@ # Delivery Process Methodology +> **Editorial Document:** This document contains design philosophy and rationale that cannot be auto-generated from code annotations. It is maintained manually. + > **Git is the event store. Documentation artifacts are projections. Annotated code is the single source of truth.** This document explains the _why_ behind `@libar-dev/delivery-process`. For _how_, see [README.md](../README.md) and [TAXONOMY.md](./TAXONOMY.md). diff --git a/docs/PROCESS-API.md b/docs/PROCESS-API.md index cbd69b6d..e80573c6 100644 --- a/docs/PROCESS-API.md +++ b/docs/PROCESS-API.md @@ -1,5 +1,7 @@ # Data API CLI +> **Deprecated:** The full CLI documentation is now auto-generated. See [CLI Reference Tables](../docs-live/reference/PROCESS-API-REFERENCE.md) and [Recipes & Workflow Guide](../docs-live/reference/PROCESS-API-RECIPES.md). This file retains only quick-start guidance and operational reference (JSON envelope, exit codes). +> > Query delivery process state directly from annotated source code. > **For AI coding agents:** Start every session with these three commands: diff --git a/docs/PROCESS-GUARD.md b/docs/PROCESS-GUARD.md index 600e55d0..3298a0ec 100644 --- a/docs/PROCESS-GUARD.md +++ b/docs/PROCESS-GUARD.md @@ -1,10 +1,6 @@ # Process Guard -> **Generated Reference Available:** Comprehensive error guide with rationale, -> alternatives, and integration recipes is generated at -> `docs-live/reference/PROCESS-GUARD-REFERENCE.md`. Run `pnpm docs:all` to regenerate. - -> **Quick reference for `lint-process` validation rules, error fixes, and escape hatches.** +> **Deprecated:** This document is superseded by the auto-generated [Process Guard Reference](../docs-live/reference/PROCESS-GUARD-REFERENCE.md) which includes quick reference tables, error guides, CLI usage, and programmatic API. This file is preserved for reference only. Process Guard validates delivery workflow changes at commit time. For FSM concepts and state definitions, see [METHODOLOGY.md](./METHODOLOGY.md#fsm-enforced-workflow). diff --git a/docs/SESSION-GUIDES.md b/docs/SESSION-GUIDES.md index 0f86904e..27ce05cb 100644 --- a/docs/SESSION-GUIDES.md +++ b/docs/SESSION-GUIDES.md @@ -1,5 +1,7 @@ # Session Workflow Guides +> **Deprecated:** This document is superseded by the auto-generated [Session Workflow Guide](../docs-live/reference/SESSION-WORKFLOW-GUIDE.md) which includes a Mermaid decision tree, session checklists, and FSM protection reference. This file is preserved for reference only. +> > Quick reference for each session type. For concepts (FSM, two-tier architecture), see [METHODOLOGY.md](./METHODOLOGY.md). --- diff --git a/docs/TAXONOMY.md b/docs/TAXONOMY.md index be3b4f71..1c337636 100644 --- a/docs/TAXONOMY.md +++ b/docs/TAXONOMY.md @@ -1,6 +1,6 @@ # Tag Taxonomy -> **Complete Reference:** The auto-generated [Taxonomy Reference](../docs-live/TAXONOMY.md) contains the full 56-tag catalog with all values and examples. This document explains taxonomy concepts; the generated version is the authoritative lookup. +> **Deprecated:** This document is superseded by the auto-generated [Taxonomy Reference](../docs-live/TAXONOMY.md) which contains the full 60-tag catalog with all values, format types, and preset details. This file is preserved for reference only. The taxonomy defines the vocabulary for pattern annotations: what tags exist, their valid values, and how they're parsed. It's 100% TypeScript-defined in `src/taxonomy/`, providing type safety and IDE autocomplete. diff --git a/docs/VALIDATION.md b/docs/VALIDATION.md index 6711ceb2..484225b0 100644 --- a/docs/VALIDATION.md +++ b/docs/VALIDATION.md @@ -1,6 +1,6 @@ # Validation Tools -> **Generated Reference:** See [VALIDATION-RULES.md](../docs-live/VALIDATION-RULES.md) for auto-generated Process Guard rules extracted from annotated source code. +> **Deprecated:** This document is superseded by the auto-generated [Validation Tools Guide](../docs-live/reference/VALIDATION-TOOLS-GUIDE.md). This file is preserved for reference only. Quick reference for choosing and running the right validation command. diff --git a/package.json b/package.json index b2a3fa20..0bb2dc42 100644 --- a/package.json +++ b/package.json @@ -74,6 +74,7 @@ "clean": "rm -rf dist *.tsbuildinfo", "typecheck": "tsc --noEmit", "test": "vitest run", + "test:coverage": "vitest run --coverage", "lint": "eslint src tests", "lint:fix": "eslint src tests --fix", "lint:process": "tsx src/cli/lint-process.ts --staged", @@ -186,19 +187,18 @@ }, "devDependencies": { "@amiceli/vitest-cucumber": "^5.2.1", - "@types/node": "20.10.0", + "@libar-dev/modular-claude-md": "github:libar-dev/modular-claude-md#3a37c573ae8611f1e0e92c00f565bb0ab45e1263", + "@types/node": "^20.10.0", + "@vitest/coverage-v8": "^2.1.9", "eslint": "^9.17.0", "eslint-config-prettier": "^10.1.8", "husky": "^9.1.7", "lint-staged": "^16.2.7", - "pixelmatch": "^7.1.0", "prettier": "^3.8.1", - "quickpickle": "^1.10.1", "tsx": "^4.7.0", "typescript": "^5.7.2", "typescript-eslint": "^8.18.2", - "vitest": "^2.1.8", - "@libar-dev/modular-claude-md": "github:libar-dev/modular-claude-md#3a37c573ae8611f1e0e92c00f565bb0ab45e1263" + "vitest": "^2.1.8" }, "files": [ "dist", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 86d81e9c..afe30857 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -31,8 +31,11 @@ importers: specifier: github:libar-dev/modular-claude-md#3a37c573ae8611f1e0e92c00f565bb0ab45e1263 version: https://codeload.github.com/libar-dev/modular-claude-md/tar.gz/3a37c573ae8611f1e0e92c00f565bb0ab45e1263 '@types/node': - specifier: 20.10.0 + specifier: ^20.10.0 version: 20.10.0 + '@vitest/coverage-v8': + specifier: ^2.1.9 + version: 2.1.9(vitest@2.1.9(@types/node@20.10.0)) eslint: specifier: ^9.17.0 version: 9.39.2 @@ -45,15 +48,9 @@ importers: lint-staged: specifier: ^16.2.7 version: 16.2.7 - pixelmatch: - specifier: ^7.1.0 - version: 7.1.0 prettier: specifier: ^3.8.1 version: 3.8.1 - quickpickle: - specifier: ^1.10.1 - version: 1.11.0(vitest@2.1.9(@types/node@20.10.0)) tsx: specifier: ^4.7.0 version: 4.21.0 @@ -69,34 +66,42 @@ importers: packages: - '@a11y-tools/aria-roles@1.0.0': - resolution: {integrity: sha512-9rLDOQxgwJ6l9zhikwPx1L3fmsCO1aR19C0mBY5Zfdge9HmpbRNksynEjckqY8uSL/58mRTfSfZ3/uLWGUCwoA==} - engines: {node: '>=18.0.0'} - '@amiceli/vitest-cucumber@5.2.1': resolution: {integrity: sha512-gAs0j2CMGzcxe/11ZLWKUyiII7U4AF5kFBzptyisFCWhcQagZ3rIXLV5cNj5RIhIjxqJTSdaam9389Lq3u6cbA==} hasBin: true peerDependencies: vitest: ^3.1.4 - '@cucumber/cucumber-expressions@18.1.0': - resolution: {integrity: sha512-9yc+wForrn15FaqLWNjYb19iQ/gPXhcq1kc4X1Ex1lR7NcJpa5pGnCow3bc1HERVM5IoYH+gwwrcJogSMsf+Vw==} + '@ampproject/remapping@2.3.0': + resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} + engines: {node: '>=6.0.0'} + + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.28.5': + resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.29.0': + resolution: {integrity: sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/types@7.29.0': + resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} + engines: {node: '>=6.9.0'} + + '@bcoe/v8-coverage@0.2.3': + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} '@cucumber/gherkin@29.0.0': resolution: {integrity: sha512-6t3V7fFsLlyhLSj4FS+fPz22pPVcFhFZ3QOP7otFYmkhZ4g1ierj5pf7fxJWvEsI555hGatg+Iql6cqK93RFUg==} - '@cucumber/gherkin@32.2.0': - resolution: {integrity: sha512-X8xuVhSIqlUjxSRifRJ7t0TycVWyX58fygJH3wDNmHINLg9sYEkvQT0SO2G5YlRZnYc11TIFr4YPenscvdlBIw==} - '@cucumber/messages@25.0.1': resolution: {integrity: sha512-RjjhmzcauX5eYfcKns5pgenefDJQcfXE3ZDrVWdUDGcoaoyFVDmj+ZzQZWRWqFrfMjP3lKHJss6LtvIP/z+h8g==} - '@cucumber/messages@27.2.0': - resolution: {integrity: sha512-f2o/HqKHgsqzFLdq6fAhfG1FNOQPdBdyMGpKwhb7hZqg0yZtx9BVqkTyuoNk83Fcvk3wjMVfouFXXHNEk4nddA==} - - '@cucumber/tag-expressions@6.2.0': - resolution: {integrity: sha512-KIF0eLcafHbWOuSDWFw0lMmgJOLdDRWjEL1kfXEWrqHmx2119HxVAr35WuEd9z542d3Yyg+XNqSr+81rIKqEdg==} - '@esbuild/aix-ppc64@0.21.5': resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} engines: {node: '>=12'} @@ -457,9 +462,23 @@ packages: resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} + '@istanbuljs/schema@0.1.3': + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + '@jridgewell/sourcemap-codec@1.5.5': resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@libar-dev/modular-claude-md@https://codeload.github.com/libar-dev/modular-claude-md/tar.gz/3a37c573ae8611f1e0e92c00f565bb0ab45e1263': resolution: {tarball: https://codeload.github.com/libar-dev/modular-claude-md/tar.gz/3a37c573ae8611f1e0e92c00f565bb0ab45e1263} version: 0.1.0 @@ -618,9 +637,6 @@ packages: '@types/node@20.10.0': resolution: {integrity: sha512-D0WfRmU9TQ8I9PFx9Yc+EBHw+vSpIub4IDvQivcp26PtPrdMGAq5SDcpXEo/epqa/DXotVpekHiLNTg3iaKXBQ==} - '@types/uuid@10.0.0': - resolution: {integrity: sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==} - '@types/uuid@9.0.8': resolution: {integrity: sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==} @@ -683,6 +699,15 @@ packages: resolution: {integrity: sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@vitest/coverage-v8@2.1.9': + resolution: {integrity: sha512-Z2cOr0ksM00MpEfyVE8KXIYPEcBFxdbLSs56L8PO0QQMxt/6bDj45uQfxoc96v05KW3clk7vvgP0qfDit9DmfQ==} + peerDependencies: + '@vitest/browser': 2.1.9 + vitest: 2.1.9 + peerDependenciesMeta: + '@vitest/browser': + optional: true + '@vitest/expect@2.1.9': resolution: {integrity: sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==} @@ -755,8 +780,9 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - base64-js@1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + balanced-match@4.0.4: + resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} + engines: {node: 18 || 20 || >=22} brace-expansion@1.1.12: resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} @@ -764,13 +790,14 @@ packages: brace-expansion@2.0.2: resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + brace-expansion@5.0.4: + resolution: {integrity: sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==} + engines: {node: 18 || 20 || >=22} + braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - buffer@6.0.3: - resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -1017,14 +1044,14 @@ packages: resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} engines: {node: '>=8'} + html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + husky@9.1.7: resolution: {integrity: sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==} engines: {node: '>=18'} hasBin: true - ieee754@1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - ignore@5.3.2: resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} engines: {node: '>= 4'} @@ -1033,9 +1060,6 @@ packages: resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} engines: {node: '>= 4'} - image-crop-or-pad@1.0.1: - resolution: {integrity: sha512-0Gu+rHoFyKLZ14oaj+CJCElQz/5EOlMHvO9WwsANukerPSGG4MFpC81oDbvsN1wMbSzAhsgTPvgbBICl7ecazg==} - import-fresh@3.3.1: resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} engines: {node: '>=6'} @@ -1067,6 +1091,22 @@ packages: isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + istanbul-lib-coverage@3.2.2: + resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} + engines: {node: '>=8'} + + istanbul-lib-report@3.0.1: + resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==} + engines: {node: '>=10'} + + istanbul-lib-source-maps@5.0.6: + resolution: {integrity: sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==} + engines: {node: '>=10'} + + istanbul-reports@3.2.0: + resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==} + engines: {node: '>=8'} + jackspeak@3.4.3: resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} @@ -1103,15 +1143,9 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} - lodash-es@4.17.23: - resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} - lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} - lodash@4.17.23: - resolution: {integrity: sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==} - log-update@6.1.0: resolution: {integrity: sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==} engines: {node: '>=18'} @@ -1125,6 +1159,13 @@ packages: magic-string@0.30.21: resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + magicast@0.3.5: + resolution: {integrity: sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==} + + make-dir@4.0.0: + resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} + engines: {node: '>=10'} + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -1141,6 +1182,10 @@ packages: resolution: {integrity: sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==} engines: {node: 20 || >=22} + minimatch@10.2.4: + resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==} + engines: {node: 18 || 20 || >=22} + minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} @@ -1234,14 +1279,6 @@ packages: engines: {node: '>=0.10'} hasBin: true - pixelmatch@7.1.0: - resolution: {integrity: sha512-1wrVzJ2STrpmONHKBy228LM1b84msXDUoAzVEl0R8Mz4Ce6EPr+IVtxm8+yvrqLYMHswREkjYFaMxnyGnaY3Ng==} - hasBin: true - - pngjs@7.0.0: - resolution: {integrity: sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==} - engines: {node: '>=14.19.0'} - postcss@8.5.6: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} @@ -1262,21 +1299,9 @@ packages: queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - quickpickle@1.11.0: - resolution: {integrity: sha512-nnkvCjeE41KkjqFqWLJWSj90m8ejiL/xR6nChzUgqqMV27H1FKAzzuyM3rf7SgsEPHUJqQfXZ9ughGSSAmfkDA==} - peerDependencies: - vitest: ^1.0.0 || >=2.0.0 - reflect-metadata@0.2.2: resolution: {integrity: sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==} - regexp-match-indices@1.0.2: - resolution: {integrity: sha512-DwZuAkt8NF5mKwGGER1EGh2PRqyvhRhhLviH+R8y8dIuaQROlUfXjt4s9ZTXstIsSkptf06BSvwcEmmfheJJWQ==} - - regexp-tree@0.1.27: - resolution: {integrity: sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==} - hasBin: true - resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} @@ -1373,6 +1398,10 @@ packages: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} + test-exclude@7.0.2: + resolution: {integrity: sha512-u9E6A+ZDYdp7a4WnarkXPZOx8Ilz46+kby6p1yZ8zsGTz9gYa6FIS7lj2oezzNKmtdyyJNNmmXDppga5GB7kSw==} + engines: {node: '>=18'} + tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} @@ -1435,10 +1464,6 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - uuid@11.0.5: - resolution: {integrity: sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA==} - hasBin: true - uuid@9.0.1: resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} hasBin: true @@ -1544,8 +1569,6 @@ packages: snapshots: - '@a11y-tools/aria-roles@1.0.0': {} - '@amiceli/vitest-cucumber@5.2.1(vitest@2.1.9(@types/node@20.10.0))': dependencies: callsites: 4.2.0 @@ -1554,15 +1577,27 @@ snapshots: ts-morph: 26.0.0 vitest: 2.1.9(@types/node@20.10.0) - '@cucumber/cucumber-expressions@18.1.0': + '@ampproject/remapping@2.3.0': dependencies: - regexp-match-indices: 1.0.2 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 - '@cucumber/gherkin@29.0.0': + '@babel/helper-string-parser@7.27.1': {} + + '@babel/helper-validator-identifier@7.28.5': {} + + '@babel/parser@7.29.0': dependencies: - '@cucumber/messages': 25.0.1 + '@babel/types': 7.29.0 + + '@babel/types@7.29.0': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.28.5 + + '@bcoe/v8-coverage@0.2.3': {} - '@cucumber/gherkin@32.2.0': + '@cucumber/gherkin@29.0.0': dependencies: '@cucumber/messages': 25.0.1 @@ -1573,15 +1608,6 @@ snapshots: reflect-metadata: 0.2.2 uuid: 9.0.1 - '@cucumber/messages@27.2.0': - dependencies: - '@types/uuid': 10.0.0 - class-transformer: 0.5.1 - reflect-metadata: 0.2.2 - uuid: 11.0.5 - - '@cucumber/tag-expressions@6.2.0': {} - '@esbuild/aix-ppc64@0.21.5': optional: true @@ -1801,8 +1827,22 @@ snapshots: wrap-ansi: 8.1.0 wrap-ansi-cjs: wrap-ansi@7.0.0 + '@istanbuljs/schema@0.1.3': {} + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + '@jridgewell/sourcemap-codec@1.5.5': {} + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + '@libar-dev/modular-claude-md@https://codeload.github.com/libar-dev/modular-claude-md/tar.gz/3a37c573ae8611f1e0e92c00f565bb0ab45e1263': {} '@nodelib/fs.scandir@2.1.5': @@ -1909,8 +1949,6 @@ snapshots: dependencies: undici-types: 5.26.5 - '@types/uuid@10.0.0': {} - '@types/uuid@9.0.8': {} '@typescript-eslint/eslint-plugin@8.53.1(@typescript-eslint/parser@8.53.1(eslint@9.39.2)(typescript@5.9.3))(eslint@9.39.2)(typescript@5.9.3)': @@ -2004,6 +2042,24 @@ snapshots: '@typescript-eslint/types': 8.53.1 eslint-visitor-keys: 4.2.1 + '@vitest/coverage-v8@2.1.9(vitest@2.1.9(@types/node@20.10.0))': + dependencies: + '@ampproject/remapping': 2.3.0 + '@bcoe/v8-coverage': 0.2.3 + debug: 4.4.3 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 5.0.6 + istanbul-reports: 3.2.0 + magic-string: 0.30.21 + magicast: 0.3.5 + std-env: 3.10.0 + test-exclude: 7.0.2 + tinyrainbow: 1.2.0 + vitest: 2.1.9(@types/node@20.10.0) + transitivePeerDependencies: + - supports-color + '@vitest/expect@2.1.9': dependencies: '@vitest/spy': 2.1.9 @@ -2077,7 +2133,7 @@ snapshots: balanced-match@1.0.2: {} - base64-js@1.5.1: {} + balanced-match@4.0.4: {} brace-expansion@1.1.12: dependencies: @@ -2088,14 +2144,13 @@ snapshots: dependencies: balanced-match: 1.0.2 - braces@3.0.3: + brace-expansion@5.0.4: dependencies: - fill-range: 7.1.1 + balanced-match: 4.0.4 - buffer@6.0.3: + braces@3.0.3: dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 + fill-range: 7.1.1 cac@6.7.14: {} @@ -2381,16 +2436,14 @@ snapshots: has-flag@4.0.0: {} - husky@9.1.7: {} + html-escaper@2.0.2: {} - ieee754@1.2.1: {} + husky@9.1.7: {} ignore@5.3.2: {} ignore@7.0.5: {} - image-crop-or-pad@1.0.1: {} - import-fresh@3.3.1: dependencies: parent-module: 1.0.1 @@ -2414,6 +2467,27 @@ snapshots: isexe@2.0.0: {} + istanbul-lib-coverage@3.2.2: {} + + istanbul-lib-report@3.0.1: + dependencies: + istanbul-lib-coverage: 3.2.2 + make-dir: 4.0.0 + supports-color: 7.2.0 + + istanbul-lib-source-maps@5.0.6: + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + debug: 4.4.3 + istanbul-lib-coverage: 3.2.2 + transitivePeerDependencies: + - supports-color + + istanbul-reports@3.2.0: + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.1 + jackspeak@3.4.3: dependencies: '@isaacs/cliui': 8.0.2 @@ -2462,12 +2536,8 @@ snapshots: dependencies: p-locate: 5.0.0 - lodash-es@4.17.23: {} - lodash.merge@4.6.2: {} - lodash@4.17.23: {} - log-update@6.1.0: dependencies: ansi-escapes: 7.2.0 @@ -2484,6 +2554,16 @@ snapshots: dependencies: '@jridgewell/sourcemap-codec': 1.5.5 + magicast@0.3.5: + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + source-map-js: 1.2.1 + + make-dir@4.0.0: + dependencies: + semver: 7.7.3 + merge2@1.4.1: {} micromatch@4.0.8: @@ -2497,6 +2577,10 @@ snapshots: dependencies: '@isaacs/brace-expansion': 5.0.0 + minimatch@10.2.4: + dependencies: + brace-expansion: 5.0.4 + minimatch@3.1.2: dependencies: brace-expansion: 1.1.12 @@ -2569,12 +2653,6 @@ snapshots: pidtree@0.6.0: {} - pixelmatch@7.1.0: - dependencies: - pngjs: 7.0.0 - - pngjs@7.0.0: {} - postcss@8.5.6: dependencies: nanoid: 3.3.11 @@ -2589,29 +2667,8 @@ snapshots: queue-microtask@1.2.3: {} - quickpickle@1.11.0(vitest@2.1.9(@types/node@20.10.0)): - dependencies: - '@a11y-tools/aria-roles': 1.0.0 - '@cucumber/cucumber-expressions': 18.1.0 - '@cucumber/gherkin': 32.2.0 - '@cucumber/messages': 27.2.0 - '@cucumber/tag-expressions': 6.2.0 - buffer: 6.0.3 - image-crop-or-pad: 1.0.1 - js-yaml: 4.1.1 - lodash: 4.17.23 - lodash-es: 4.17.23 - pngjs: 7.0.0 - vitest: 2.1.9(@types/node@20.10.0) - reflect-metadata@0.2.2: {} - regexp-match-indices@1.0.2: - dependencies: - regexp-tree: 0.1.27 - - regexp-tree@0.1.27: {} - resolve-from@4.0.0: {} resolve-pkg-maps@1.0.0: {} @@ -2722,6 +2779,12 @@ snapshots: dependencies: has-flag: 4.0.0 + test-exclude@7.0.2: + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 10.5.0 + minimatch: 10.2.4 + tinybench@2.9.0: {} tinyexec@0.3.2: {} @@ -2780,8 +2843,6 @@ snapshots: dependencies: punycode: 2.3.1 - uuid@11.0.5: {} - uuid@9.0.1: {} vite-node@2.1.9(@types/node@20.10.0): diff --git a/src/cli/generate-docs.ts b/src/cli/generate-docs.ts index 6db3e4b5..2bea6f59 100644 --- a/src/cli/generate-docs.ts +++ b/src/cli/generate-docs.ts @@ -27,6 +27,11 @@ * - **Explicit Registration**: Generators must be registered before use */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import * as path from 'path'; import { generatorRegistry } from '../generators/registry.js'; import { generateDocumentation, generateFromConfig } from '../generators/orchestrator.js'; diff --git a/src/cli/generate-tag-taxonomy.ts b/src/cli/generate-tag-taxonomy.ts index 2f81b110..c6d148b0 100644 --- a/src/cli/generate-tag-taxonomy.ts +++ b/src/cli/generate-tag-taxonomy.ts @@ -27,6 +27,11 @@ * - Use in documentation regeneration workflows */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import * as fs from 'fs/promises'; import * as path from 'path'; import { loadConfig, formatConfigError } from '../config/config-loader.js'; diff --git a/src/cli/lint-patterns.ts b/src/cli/lint-patterns.ts index 4b08c021..41199fe6 100644 --- a/src/cli/lint-patterns.ts +++ b/src/cli/lint-patterns.ts @@ -20,6 +20,11 @@ * - Use with `--strict` flag to treat warnings as errors */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import { printVersionAndExit } from './version.js'; import { handleCliError } from './error-handler.js'; import { scanPatterns } from '../scanner/index.js'; diff --git a/src/cli/lint-process.ts b/src/cli/lint-process.ts index 41d011fa..f33fc9e9 100644 --- a/src/cli/lint-process.ts +++ b/src/cli/lint-process.ts @@ -21,6 +21,11 @@ * - Development to check specific files */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import { printVersionAndExit } from './version.js'; import { handleCliError } from './error-handler.js'; import { diff --git a/src/cli/lint-steps.ts b/src/cli/lint-steps.ts index c5ae509c..0c955589 100644 --- a/src/cli/lint-steps.ts +++ b/src/cli/lint-steps.ts @@ -8,6 +8,11 @@ * cause cryptic runtime failures. */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import { printVersionAndExit } from './version.js'; import { handleCliError } from './error-handler.js'; import { runStepLint } from '../lint/steps/index.js'; diff --git a/src/cli/process-api.ts b/src/cli/process-api.ts index 9d5845ed..c7a1eacc 100644 --- a/src/cli/process-api.ts +++ b/src/cli/process-api.ts @@ -34,6 +34,11 @@ * - **Output Shaping**: 594KB -> 4KB via summarization and modifiers */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import * as path from 'path'; import * as fs from 'fs'; import { applyProjectSourceDefaults } from '../config/config-loader.js'; @@ -159,157 +164,207 @@ interface ProcessAPICLIConfig { // Argument Parsing // ============================================================================= -function parseArgs(argv: string[] = process.argv.slice(2)): ProcessAPICLIConfig { - const config: ProcessAPICLIConfig = { - input: [], - features: [], - baseDir: process.cwd(), - workflowPath: null, - subcommand: null, - subArgs: [], - help: false, - version: false, - modifiers: { ...DEFAULT_OUTPUT_MODIFIERS }, - format: 'json', - sessionType: null, - noCache: false, - dryRun: false, - subcommandHelp: null, - }; +/** Mutable state accumulated during argument parsing. */ +interface ParseState { + readonly config: ProcessAPICLIConfig; + namesOnly: boolean; + count: boolean; + fields: string[] | null; + full: boolean; + parsingFlags: boolean; +} - // Mutable modifiers for parsing - let namesOnly = false; - let count = false; - let fields: string[] | null = null; - let full = false; - let parsingFlags = true; +/** + * Handle position-independent flags (help, version, cache, dry-run, modifiers, format). + * These work regardless of position — before or after the subcommand. + * + * @returns Number of additional args consumed (0 for booleans, 1 for --value flags). + * Returns -1 if the arg is not a position-independent flag. + */ +function handlePositionIndependentFlag( + state: ParseState, + arg: string, + nextArg: string | undefined +): number { + switch (arg) { + case '-h': + case '--help': + if (state.config.subcommand !== null) { + state.config.subcommandHelp = state.config.subcommand; + } else { + state.config.help = true; + } + return 0; - for (let i = 0; i < argv.length; i++) { - const arg = argv[i]; - const nextArg = argv[i + 1]; + case '-v': + case '--version': + state.config.version = true; + return 0; - // pnpm passes '--' as a literal arg separator — skip it - if (arg === '--') { - parsingFlags = false; - continue; - } + case '--no-cache': + state.config.noCache = true; + return 0; - // Handle --help and --version regardless of position - if (arg === '-h' || arg === '--help') { - // If a subcommand was already parsed, this is per-subcommand help - if (config.subcommand !== null) { - config.subcommandHelp = config.subcommand; - } else { - config.help = true; - } - continue; - } - if (arg === '-v' || arg === '--version') { - config.version = true; - continue; - } + case '--dry-run': + state.config.dryRun = true; + return 0; - // Handle cache and diagnostic flags regardless of position - if (arg === '--no-cache') { - config.noCache = true; - continue; - } - if (arg === '--dry-run') { - config.dryRun = true; - continue; - } + case '--names-only': + state.namesOnly = true; + return 0; - // Handle output modifiers regardless of position (before or after subcommand) - if (arg === '--names-only') { - namesOnly = true; - continue; - } - if (arg === '--count') { - count = true; - continue; - } - if (arg === '--fields') { + case '--count': + state.count = true; + return 0; + + case '--fields': if (!nextArg || nextArg.startsWith('-')) { throw new Error(`${arg} requires a value (comma-separated field names)`); } - fields = nextArg.split(',').map((f) => f.trim()); - i++; - continue; - } - if (arg === '--full') { - full = true; - continue; - } - if (arg === '--format') { + state.fields = nextArg.split(',').map((f) => f.trim()); + return 1; + + case '--full': + state.full = true; + return 0; + + case '--format': if (nextArg !== 'json' && nextArg !== 'compact') { throw new Error(`${arg} must be "json" or "compact"`); } - config.format = nextArg; - i++; - continue; - } + state.config.format = nextArg; + return 1; - if (parsingFlags && arg?.startsWith('-') === true) { - switch (arg) { - case '-i': - case '--input': - if (!nextArg || nextArg.startsWith('-')) { - throw new Error(`${arg} requires a value`); - } - config.input.push(nextArg); - i++; - break; - - case '-f': - case '--features': - if (!nextArg || nextArg.startsWith('-')) { - throw new Error(`${arg} requires a value`); - } - config.features.push(nextArg); - i++; - break; + default: + return -1; + } +} - case '-b': - case '--base-dir': - if (!nextArg || nextArg.startsWith('-')) { - throw new Error(`${arg} requires a value`); - } - config.baseDir = nextArg; - i++; - break; +/** + * Handle position-dependent global flags (input, features, base-dir, workflow, session). + * These only apply before the subcommand is detected. + * + * @returns Number of additional args consumed (always 1 for these flags). + * @throws On unknown flag. + */ +function handleGlobalFlag(state: ParseState, arg: string, nextArg: string | undefined): number { + switch (arg) { + case '-i': + case '--input': + if (!nextArg || nextArg.startsWith('-')) { + throw new Error(`${arg} requires a value`); + } + state.config.input.push(nextArg); + return 1; - case '-w': - case '--workflow': - if (!nextArg || nextArg.startsWith('-')) { - throw new Error(`${arg} requires a value`); - } - config.workflowPath = nextArg; - i++; - break; + case '-f': + case '--features': + if (!nextArg || nextArg.startsWith('-')) { + throw new Error(`${arg} requires a value`); + } + state.config.features.push(nextArg); + return 1; - case '--session': - if (!nextArg || !isValidSessionType(nextArg)) { - throw new Error(`${arg} must be "planning", "design", or "implement"`); - } - config.sessionType = nextArg; - i++; - break; + case '-b': + case '--base-dir': + if (!nextArg || nextArg.startsWith('-')) { + throw new Error(`${arg} requires a value`); + } + state.config.baseDir = nextArg; + return 1; - default: - throw new Error(`Unknown option: ${arg}`); + case '-w': + case '--workflow': + if (!nextArg || nextArg.startsWith('-')) { + throw new Error(`${arg} requires a value`); } - } else if (arg !== undefined) { - if (config.subcommand === null) { - config.subcommand = arg; - parsingFlags = false; - } else { - config.subArgs.push(arg); + state.config.workflowPath = nextArg; + return 1; + + case '--session': + if (!nextArg || !isValidSessionType(nextArg)) { + throw new Error(`${arg} must be "planning", "design", or "implement"`); } + state.config.sessionType = nextArg; + return 1; + + default: + throw new Error(`Unknown option: ${arg}`); + } +} + +/** + * Handle positional args: first becomes subcommand, rest become subArgs. + */ +function handlePositionalArg(state: ParseState, arg: string): void { + if (state.config.subcommand === null) { + state.config.subcommand = arg; + state.parsingFlags = false; + } else { + state.config.subArgs.push(arg); + } +} + +function parseArgs(argv: string[] = process.argv.slice(2)): ProcessAPICLIConfig { + const state: ParseState = { + config: { + input: [], + features: [], + baseDir: process.cwd(), + workflowPath: null, + subcommand: null, + subArgs: [], + help: false, + version: false, + modifiers: { ...DEFAULT_OUTPUT_MODIFIERS }, + format: 'json', + sessionType: null, + noCache: false, + dryRun: false, + subcommandHelp: null, + }, + namesOnly: false, + count: false, + fields: null, + full: false, + parsingFlags: true, + }; + + for (let i = 0; i < argv.length; i++) { + const arg = argv[i]; + if (arg === undefined) continue; + const nextArg = argv[i + 1]; + + // pnpm passes '--' as a literal arg separator — skip it + if (arg === '--') { + state.parsingFlags = false; + continue; + } + + // Position-independent flags (work before and after subcommand) + const piConsumed = handlePositionIndependentFlag(state, arg, nextArg); + if (piConsumed >= 0) { + i += piConsumed; + continue; } + + // Position-dependent global flags (only before subcommand) + if (state.parsingFlags && arg.startsWith('-')) { + i += handleGlobalFlag(state, arg, nextArg); + continue; + } + + // Positional: subcommand or subArg + handlePositionalArg(state, arg); } - config.modifiers = { namesOnly, count, fields, full }; - return config; + state.config.modifiers = { + namesOnly: state.namesOnly, + count: state.count, + fields: state.fields, + full: state.full, + }; + return state.config; } // ============================================================================= @@ -783,6 +838,47 @@ function coerceArg(arg: string): string | number { return arg; } +/** + * Require a string argument at the given index, throwing INVALID_ARGUMENT if missing. + */ +function requireStringArg( + args: ReadonlyArray, + index: number, + methodName: string +): string { + if (args[index] === undefined) { + throw new QueryApiError( + 'INVALID_ARGUMENT', + `${methodName} requires an argument at position ${index + 1}` + ); + } + return String(args[index]); +} + +/** + * Require a numeric argument at the given index, throwing INVALID_ARGUMENT if missing or NaN. + */ +function requireNumberArg( + args: ReadonlyArray, + index: number, + methodName: string +): number { + if (args[index] === undefined) { + throw new QueryApiError( + 'INVALID_ARGUMENT', + `${methodName} requires a numeric argument at position ${index + 1}` + ); + } + const value = Number(args[index]); + if (isNaN(value)) { + throw new QueryApiError( + 'INVALID_ARGUMENT', + `${methodName} requires a numeric argument, got: "${String(args[index])}"` + ); + } + return value; +} + const API_METHODS = [ 'getPatternsByNormalizedStatus', 'getPatternsByStatus', @@ -813,6 +909,88 @@ const API_METHODS = [ 'getMasterDataset', ] as const satisfies ReadonlyArray; +type ApiMethodName = (typeof API_METHODS)[number]; + +/** + * Typed dispatch map: each entry invokes the API method with correct parameter types. + * The Record type ensures compile-time completeness — adding a + * method to API_METHODS without a dispatch entry causes a type error. + */ +const API_DISPATCH: Record< + ApiMethodName, + (api: ProcessStateAPI, args: ReadonlyArray) => unknown +> = { + // Status queries + getPatternsByNormalizedStatus: (api, args) => + api.getPatternsByNormalizedStatus( + requireStringArg(args, 0, 'getPatternsByNormalizedStatus') as + | 'completed' + | 'active' + | 'planned' + ), + getPatternsByStatus: (api, args) => + api.getPatternsByStatus(requireStringArg(args, 0, 'getPatternsByStatus') as ProcessStatusValue), + getStatusCounts: (api) => api.getStatusCounts(), + getStatusDistribution: (api) => api.getStatusDistribution(), + getCompletionPercentage: (api) => api.getCompletionPercentage(), + + // Phase queries + getPatternsByPhase: (api, args) => + api.getPatternsByPhase(requireNumberArg(args, 0, 'getPatternsByPhase')), + getPhaseProgress: (api, args) => + api.getPhaseProgress(requireNumberArg(args, 0, 'getPhaseProgress')), + getActivePhases: (api) => api.getActivePhases(), + getAllPhases: (api) => api.getAllPhases(), + + // FSM queries + isValidTransition: (api, args) => + api.isValidTransition( + requireStringArg(args, 0, 'isValidTransition') as ProcessStatusValue, + requireStringArg(args, 1, 'isValidTransition') as ProcessStatusValue + ), + checkTransition: (api, args) => + api.checkTransition( + requireStringArg(args, 0, 'checkTransition'), + requireStringArg(args, 1, 'checkTransition') + ), + getValidTransitionsFrom: (api, args) => + api.getValidTransitionsFrom( + requireStringArg(args, 0, 'getValidTransitionsFrom') as ProcessStatusValue + ), + getProtectionInfo: (api, args) => + api.getProtectionInfo(requireStringArg(args, 0, 'getProtectionInfo') as ProcessStatusValue), + + // Pattern queries + getPattern: (api, args) => api.getPattern(requireStringArg(args, 0, 'getPattern')), + getPatternDependencies: (api, args) => + api.getPatternDependencies(requireStringArg(args, 0, 'getPatternDependencies')), + getPatternRelationships: (api, args) => + api.getPatternRelationships(requireStringArg(args, 0, 'getPatternRelationships')), + getRelatedPatterns: (api, args) => + api.getRelatedPatterns(requireStringArg(args, 0, 'getRelatedPatterns')), + getApiReferences: (api, args) => + api.getApiReferences(requireStringArg(args, 0, 'getApiReferences')), + getPatternDeliverables: (api, args) => + api.getPatternDeliverables(requireStringArg(args, 0, 'getPatternDeliverables')), + getPatternsByCategory: (api, args) => + api.getPatternsByCategory(requireStringArg(args, 0, 'getPatternsByCategory')), + getCategories: (api) => api.getCategories(), + + // Timeline queries + getPatternsByQuarter: (api, args) => + api.getPatternsByQuarter(requireStringArg(args, 0, 'getPatternsByQuarter')), + getQuarters: (api) => api.getQuarters(), + getCurrentWork: (api) => api.getCurrentWork(), + getRoadmapItems: (api) => api.getRoadmapItems(), + getRecentlyCompleted: (api, args) => { + const limit = args[0] !== undefined ? Number(args[0]) : undefined; + return api.getRecentlyCompleted(limit); + }, + + // Raw access + getMasterDataset: (api) => api.getMasterDataset(), +}; + function handleQuery( api: ProcessStateAPI, args: string[] @@ -825,21 +1003,16 @@ function handleQuery( ); } - if (!API_METHODS.includes(methodName as (typeof API_METHODS)[number])) { + if (!API_METHODS.includes(methodName as ApiMethodName)) { throw new QueryApiError( 'UNKNOWN_METHOD', `Unknown API method: ${methodName}\nAvailable: ${API_METHODS.join(', ')}` ); } - // Safe to cast: we validated methodName is in API_METHODS above - const apiRecord = api as unknown as Record unknown>; - const method = apiRecord[methodName]; - if (method === undefined) { - throw new QueryApiError('UNKNOWN_METHOD', `Method not found on API: ${methodName}`); - } + const dispatch = API_DISPATCH[methodName as ApiMethodName]; const coercedArgs = args.slice(1).map(coerceArg); - return { methodName, result: method.apply(api, coercedArgs) }; + return { methodName, result: dispatch(api, coercedArgs) }; } function handlePattern(api: ProcessStateAPI, args: string[]): unknown { diff --git a/src/cli/repl.ts b/src/cli/repl.ts index 76f1ee9a..0962b445 100644 --- a/src/cli/repl.ts +++ b/src/cli/repl.ts @@ -22,6 +22,11 @@ * - `help` — list available subcommands */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import * as readline from 'node:readline/promises'; import * as path from 'path'; import { diff --git a/src/cli/validate-patterns.ts b/src/cli/validate-patterns.ts index 6ae97e03..cf2ef7a0 100644 --- a/src/cli/validate-patterns.ts +++ b/src/cli/validate-patterns.ts @@ -26,6 +26,11 @@ * - Strict mode (`--strict`) for production readiness checks */ +// ─── Error Convention ─────────────────────────────────────────────────── +// CLI modules use throw/catch + process.exit(). Pipeline modules use Result. +// See src/cli/error-handler.ts for the unified handler. +// ──────────────────────────────────────────────────────────────────────── + import { printVersionAndExit } from './version.js'; import { handleCliError } from './error-handler.js'; import { getPatternName } from '../api/pattern-helpers.js'; @@ -843,5 +848,7 @@ async function main(): Promise { } } -// Entry point -void main(); +// Entry point — catch ensures parseArgs errors reach the unified handler +void main().catch((error: unknown) => { + handleCliError(error, 1); +}); diff --git a/src/config/defaults.ts b/src/config/defaults.ts index 788b8fbb..4ef743cf 100644 --- a/src/config/defaults.ts +++ b/src/config/defaults.ts @@ -26,7 +26,7 @@ */ import { createRegexBuilders, type RegexBuilders } from './regex-builders.js'; -import type { ContextInferenceRule } from '../generators/pipeline/transform-dataset.js'; +import type { ContextInferenceRule } from '../generators/pipeline/context-inference.js'; /** * Default tag prefix for @libar-docs-* annotations. diff --git a/src/config/project-config-schema.ts b/src/config/project-config-schema.ts index 398f2e73..ee338d1b 100644 --- a/src/config/project-config-schema.ts +++ b/src/config/project-config-schema.ts @@ -28,6 +28,7 @@ import { z } from 'zod'; import type { DeliveryProcessProjectConfig } from './project-config.js'; import type { DeliveryProcessInstance } from './types.js'; +// Cross-layer: config → renderable (see comment in project-config.ts) import { DIAGRAM_SOURCE_VALUES } from '../renderable/codecs/reference.js'; import { SectionBlockSchema } from '../renderable/schema.js'; diff --git a/src/config/project-config.ts b/src/config/project-config.ts index 1262fd1a..b1c60714 100644 --- a/src/config/project-config.ts +++ b/src/config/project-config.ts @@ -36,7 +36,15 @@ import type { PresetName } from './presets.js'; import type { DeliveryProcessConfig, DeliveryProcessInstance } from './types.js'; -import type { ContextInferenceRule } from '../generators/pipeline/transform-dataset.js'; +import type { ContextInferenceRule } from '../generators/pipeline/context-inference.js'; +// ═══ Cross-Layer Imports: config → renderable ═══════════════════════════════ +// Project configuration declares which reference documents to generate, +// requiring knowledge of renderer capability types (ReferenceDocConfig, +// CodecOptions). This is intentional — moving these types to a shared location +// would force renderable to import its own types from config (worse direction). +// See also: src/config/project-config-schema.ts (Zod schema uses +// DIAGRAM_SOURCE_VALUES and SectionBlockSchema from renderable). +// ═════════════════════════════════════════════════════════════════════════════ import type { ReferenceDocConfig } from '../renderable/codecs/reference.js'; import type { CodecOptions } from '../renderable/generate.js'; diff --git a/src/config/resolve-config.ts b/src/config/resolve-config.ts index c7cb2f37..d4d46ca4 100644 --- a/src/config/resolve-config.ts +++ b/src/config/resolve-config.ts @@ -34,7 +34,7 @@ * - `createDefaultResolvedConfig()` provides a fallback when no config file exists */ -import type { ContextInferenceRule } from '../generators/pipeline/transform-dataset.js'; +import type { ContextInferenceRule } from '../generators/pipeline/context-inference.js'; import type { DeliveryProcessProjectConfig, GeneratorSourceOverride, diff --git a/src/config/types.ts b/src/config/types.ts index 4c3bfd65..1aa96dba 100644 --- a/src/config/types.ts +++ b/src/config/types.ts @@ -22,7 +22,7 @@ import type { TagRegistry } from '../validation-schemas/tag-registry.js'; import type { CategoryDefinition } from '../taxonomy/categories.js'; import type { MetadataTagDefinitionForRegistry } from '../taxonomy/registry-builder.js'; -import type { ContextInferenceRule } from '../generators/pipeline/transform-dataset.js'; +import type { ContextInferenceRule } from '../generators/pipeline/context-inference.js'; /** * Configuration for creating a delivery process instance. diff --git a/src/generators/orchestrator.ts b/src/generators/orchestrator.ts index 9bab2b91..d5bfd152 100644 --- a/src/generators/orchestrator.ts +++ b/src/generators/orchestrator.ts @@ -61,7 +61,7 @@ import type { GeneratorContext } from './types.js'; import type { Result } from '../types/index.js'; import { Result as R } from '../types/index.js'; import { buildMasterDataset } from './pipeline/index.js'; -import { detectBranchChanges, getAllChangedFiles } from '../lint/process-guard/detect-changes.js'; +import { getChangedFilesList } from '../git/index.js'; import type { CodecOptions } from '../renderable/generate.js'; import { registerReferenceGenerators } from './built-in/reference-generators.js'; @@ -333,10 +333,10 @@ export async function generateDocumentation( let changedFiles = options.changedFiles; if (!changedFiles && options.gitDiffBase) { - const detectionResult = detectBranchChanges(baseDir, options.gitDiffBase); + const detectionResult = getChangedFilesList(baseDir, options.gitDiffBase); if (detectionResult.ok) { // Filter for relevant file types (source, tests, specs, features) - changedFiles = getAllChangedFiles(detectionResult.value).filter( + changedFiles = detectionResult.value.filter( (f) => f.endsWith('.ts') || f.endsWith('.tsx') || diff --git a/src/generators/pipeline/build-pipeline.ts b/src/generators/pipeline/build-pipeline.ts index f6a355f4..8fe5af76 100644 --- a/src/generators/pipeline/build-pipeline.ts +++ b/src/generators/pipeline/build-pipeline.ts @@ -57,11 +57,8 @@ import { } from './transform-dataset.js'; import { Result } from '../../types/result.js'; import type { ExtractedPattern } from '../../validation-schemas/index.js'; -import type { - RuntimeMasterDataset, - ValidationSummary, - ContextInferenceRule, -} from './transform-dataset.js'; +import type { RuntimeMasterDataset, ValidationSummary } from './transform-types.js'; +import type { ContextInferenceRule } from './context-inference.js'; // ═══════════════════════════════════════════════════════════════════════════ // Types diff --git a/src/generators/pipeline/context-inference.ts b/src/generators/pipeline/context-inference.ts new file mode 100644 index 00000000..1572bc40 --- /dev/null +++ b/src/generators/pipeline/context-inference.ts @@ -0,0 +1,109 @@ +/** + * @libar-docs + * @libar-docs-pattern ContextInferenceImpl + * @libar-docs-status completed + * @libar-docs-implements ContextInference + * @libar-docs-arch-role utility + * @libar-docs-arch-context generator + * @libar-docs-arch-layer application + * @libar-docs-used-by TransformDataset + * + * ## ContextInference - File Path Based Context Resolution + * + * Auto-infers bounded context from file paths using configurable rules. + * Reduces annotation redundancy when directory structure already implies + * the bounded context. + */ + +/** + * Rule for auto-inferring bounded context from file paths. + * + * When a pattern has an architecture layer (`@libar-docs-arch-layer`) but no explicit + * context (`@libar-docs-arch-context`), these rules can infer the context from the + * file path. This reduces annotation redundancy when directory structure already + * implies the bounded context. + * + * @example + * ```typescript + * const rules: ContextInferenceRule[] = [ + * { pattern: 'src/validation/**', context: 'validation' }, + * { pattern: 'src/lint/**', context: 'lint' }, + * ]; + * // File at src/validation/rules.ts will get archContext='validation' if not explicit + * ``` + */ +export interface ContextInferenceRule { + /** Glob pattern to match file paths (e.g., 'src/validation/**') */ + readonly pattern: string; + /** Default context name to assign when pattern matches */ + readonly context: string; +} + +/** + * Infer bounded context from file path using configured rules. + * + * Iterates through rules in order and returns the context from the first + * matching pattern. Returns undefined if no rules match. + * + * Pattern matching supports: + * - Simple prefix matching: `src/validation/` matches files in that directory + * - Glob-style wildcards: `src/validation/**` matches all files recursively + * + * @param filePath - The source file path to check + * @param rules - Ordered list of inference rules + * @returns The inferred context name, or undefined if no match + */ +export function inferContext( + filePath: string, + rules: readonly ContextInferenceRule[] | undefined +): string | undefined { + if (!rules || rules.length === 0) return undefined; + + for (const rule of rules) { + if (matchPattern(filePath, rule.pattern)) { + return rule.context; + } + } + return undefined; +} + +/** + * Simple pattern matching for file paths. + * + * Supports: + * - Exact prefix matching: `src/validation/` matches `src/validation/foo.ts` + * - Glob-style `**` wildcard: `src/validation/**` matches all files recursively + * + * @param filePath - The file path to check + * @param pattern - The pattern to match against + * @returns true if the file path matches the pattern + */ +function matchPattern(filePath: string, pattern: string): boolean { + // Handle `**` wildcard patterns (recursive match) + if (pattern.endsWith('/**')) { + const prefix = pattern.slice(0, -3); // Remove '/**' + return hasPathPrefix(filePath, prefix); + } + + // Handle `/*` wildcard patterns (single level match) + if (pattern.endsWith('/*')) { + const prefix = pattern.slice(0, -2); // Remove '/*' + if (!hasPathPrefix(filePath, prefix)) { + return false; + } + + const afterPrefix = filePath.slice(prefix.length + 1); + return afterPrefix.length > 0 && !afterPrefix.includes('/'); + } + + // Simple prefix matching + if (pattern.endsWith('/')) { + return hasPathPrefix(filePath, pattern.slice(0, -1)); + } + + return filePath === pattern || filePath.startsWith(`${pattern}/`); +} + +function hasPathPrefix(filePath: string, prefix: string): boolean { + return filePath === prefix || filePath.startsWith(`${prefix}/`); +} diff --git a/src/generators/pipeline/index.ts b/src/generators/pipeline/index.ts index 74c0fea4..7baa3ff4 100644 --- a/src/generators/pipeline/index.ts +++ b/src/generators/pipeline/index.ts @@ -29,15 +29,19 @@ export { transformToMasterDatasetWithValidation, completionPercentage, isFullyCompleted, - type RawDataset, - type RuntimeMasterDataset, - type ContextInferenceRule, - type ValidationSummary, - type MalformedPattern, - type DanglingReference, - type TransformResult, } from './transform-dataset.js'; +export type { ContextInferenceRule } from './context-inference.js'; + +export type { + RawDataset, + RuntimeMasterDataset, + ValidationSummary, + MalformedPattern, + DanglingReference, + TransformResult, +} from './transform-types.js'; + // ═══════════════════════════════════════════════════════════════════════════ // Merge Patterns // ═══════════════════════════════════════════════════════════════════════════ diff --git a/src/generators/pipeline/relationship-resolver.ts b/src/generators/pipeline/relationship-resolver.ts new file mode 100644 index 00000000..054f3580 --- /dev/null +++ b/src/generators/pipeline/relationship-resolver.ts @@ -0,0 +1,163 @@ +/** + * @libar-docs + * @libar-docs-pattern RelationshipResolver + * @libar-docs-status active + * @libar-docs-arch-role service + * @libar-docs-arch-context generator + * @libar-docs-arch-layer application + * @libar-docs-used-by TransformDataset + * @libar-docs-uses ExtractedPattern, RelationshipEntry, ImplementationRef, PatternHelpers + * + * ## RelationshipResolver - Reverse Lookup and Dangling Reference Detection + * + * Computes reverse relationship lookups (implementedBy, extendedBy, enables, usedBy) + * and detects dangling references in the pattern graph. These are the 2nd and 3rd + * passes of the MasterDataset transformation pipeline. + */ + +import type { ExtractedPattern } from '../../validation-schemas/index.js'; +import type { + RelationshipEntry, + ImplementationRef, +} from '../../validation-schemas/master-dataset.js'; +import { getPatternName } from '../../api/pattern-helpers.js'; +import type { DanglingReference } from './transform-types.js'; + +/** + * Build reverse lookups for relationship index entries. + * + * Iterates over patterns to compute: + * - implementedBy: which patterns implement this pattern (with file + description) + * - extendedBy: which patterns extend this pattern + * - enables: which patterns depend on this pattern (reverse of dependsOn) + * - usedBy: which patterns use this pattern (reverse of uses) + * + * Mutates the `relationshipIndex` entries in place, then sorts reverse-computed + * arrays for consistent output ordering. + * + * @param patterns - All extracted patterns + * @param relationshipIndex - Mutable relationship index (entries are mutated) + */ +export function buildReverseLookups( + patterns: readonly ExtractedPattern[], + relationshipIndex: Record +): void { + for (const pattern of patterns) { + const patternKey = getPatternName(pattern); + const entry = relationshipIndex[patternKey]; + if (!entry) continue; + + // Build implementedBy reverse lookup with full ImplementationRef + for (const implemented of entry.implementsPatterns) { + const target = relationshipIndex[implemented]; + if (target) { + const alreadyAdded = target.implementedBy.some( + (impl: ImplementationRef) => impl.name === patternKey + ); + if (!alreadyAdded) { + const desc = pattern.directive.description; + const firstLine = desc ? desc.split('\n')[0]?.trim() : undefined; + const description = + firstLine && firstLine.length > 0 + ? firstLine.slice(0, 100) + (firstLine.length > 100 ? '...' : '') + : undefined; + + target.implementedBy.push({ + name: patternKey, + file: pattern.source.file, + description, + }); + } + } + } + + // Build extendedBy reverse lookup + if (entry.extendsPattern) { + const target = relationshipIndex[entry.extendsPattern]; + if (target && !target.extendedBy.includes(patternKey)) { + target.extendedBy.push(patternKey); + } + } + + // Build enables reverse lookup (dependsOn -> enables) + for (const dep of entry.dependsOn) { + const target = relationshipIndex[dep]; + if (target && !target.enables.includes(patternKey)) { + target.enables.push(patternKey); + } + } + + // Build usedBy reverse lookup (uses -> usedBy) + for (const used of entry.uses) { + const target = relationshipIndex[used]; + if (target && !target.usedBy.includes(patternKey)) { + target.usedBy.push(patternKey); + } + } + } + + // Sort reverse-computed arrays for consistent output + for (const entry of Object.values(relationshipIndex)) { + entry.implementedBy.sort((a: ImplementationRef, b: ImplementationRef) => + a.file.localeCompare(b.file) + ); + entry.extendedBy.sort((a, b) => a.localeCompare(b)); + entry.enables.sort((a, b) => a.localeCompare(b)); + entry.usedBy.sort((a, b) => a.localeCompare(b)); + } +} + +/** + * Detect dangling references in pattern relationship fields. + * + * Checks uses, dependsOn, implementsPatterns, extendsPattern, and seeAlso + * fields for references to patterns that don't exist in the dataset. + * + * @param patterns - All extracted patterns + * @param allPatternNames - Set of all valid pattern names + * @returns Array of dangling references found + */ +export function detectDanglingReferences( + patterns: readonly ExtractedPattern[], + allPatternNames: ReadonlySet +): DanglingReference[] { + const danglingReferences: DanglingReference[] = []; + + for (const pattern of patterns) { + const patternKey = getPatternName(pattern); + + for (const ref of pattern.uses ?? []) { + if (!allPatternNames.has(ref)) { + danglingReferences.push({ pattern: patternKey, field: 'uses', missing: ref }); + } + } + + for (const ref of pattern.dependsOn ?? []) { + if (!allPatternNames.has(ref)) { + danglingReferences.push({ pattern: patternKey, field: 'dependsOn', missing: ref }); + } + } + + for (const ref of pattern.implementsPatterns ?? []) { + if (!allPatternNames.has(ref)) { + danglingReferences.push({ pattern: patternKey, field: 'implementsPatterns', missing: ref }); + } + } + + if (pattern.extendsPattern && !allPatternNames.has(pattern.extendsPattern)) { + danglingReferences.push({ + pattern: patternKey, + field: 'extendsPattern', + missing: pattern.extendsPattern, + }); + } + + for (const ref of pattern.seeAlso ?? []) { + if (!allPatternNames.has(ref)) { + danglingReferences.push({ pattern: patternKey, field: 'seeAlso', missing: ref }); + } + } + } + + return danglingReferences; +} diff --git a/src/generators/pipeline/transform-dataset.ts b/src/generators/pipeline/transform-dataset.ts index 20a9098b..e116dcf2 100644 --- a/src/generators/pipeline/transform-dataset.ts +++ b/src/generators/pipeline/transform-dataset.ts @@ -33,209 +33,33 @@ * - **Workflow integration**: Uses workflow config for phase names */ -import type { ExtractedPattern, TagRegistry } from '../../validation-schemas/index.js'; +import type { ExtractedPattern } from '../../validation-schemas/index.js'; import { ExtractedPatternSchema } from '../../validation-schemas/index.js'; import { getPatternName } from '../../api/pattern-helpers.js'; -import type { LoadedWorkflow } from '../../config/workflow-loader.js'; import type { StatusGroups, StatusCounts, PhaseGroup, SourceViews, RelationshipEntry, - ImplementationRef, ArchIndex, SequenceIndexEntry, } from '../../validation-schemas/master-dataset.js'; -import type { MasterDataset } from '../../validation-schemas/master-dataset.js'; import { normalizeStatus, ACCEPTED_STATUS_VALUES } from '../../taxonomy/index.js'; import { buildSequenceIndexEntryWithValidation } from './sequence-utils.js'; - -// ============================================================================= -// Validation Summary Types -// ============================================================================= - -/** - * Information about a malformed pattern that failed schema validation. - */ -export interface MalformedPattern { - /** Pattern ID or name for identification */ - patternId: string; - /** List of validation issues found */ - issues: string[]; -} - -/** - * Information about a dangling reference (reference to non-existent pattern). - */ -export interface DanglingReference { - /** The pattern containing the dangling reference */ - pattern: string; - /** The field containing the dangling reference (e.g., "uses", "dependsOn") */ - field: string; - /** The referenced pattern name that doesn't exist */ - missing: string; -} - -/** - * Summary of validation results from dataset transformation. - * - * Provides structured information about data quality issues encountered - * during transformation, enabling upstream error handling and reporting. - */ -export interface ValidationSummary { - /** Total number of patterns processed */ - totalPatterns: number; - - /** Patterns that failed schema validation */ - malformedPatterns: MalformedPattern[]; - - /** References to patterns that don't exist in the dataset */ - danglingReferences: DanglingReference[]; - - /** Status values that were not recognized (normalized to 'planned') */ - unknownStatuses: string[]; - - /** Total count of all warnings (malformed + dangling + unknown statuses) */ - warningCount: number; -} - -/** - * Result of transformToMasterDataset including both dataset and validation info. - */ -export interface TransformResult { - /** The transformed MasterDataset */ - dataset: RuntimeMasterDataset; - - /** Validation summary with any issues found during transformation */ - validation: ValidationSummary; -} - -// ============================================================================= -// Context Inference Types -// ============================================================================= - -/** - * Rule for auto-inferring bounded context from file paths. - * - * When a pattern has an architecture layer (`@libar-docs-arch-layer`) but no explicit - * context (`@libar-docs-arch-context`), these rules can infer the context from the - * file path. This reduces annotation redundancy when directory structure already - * implies the bounded context. - * - * @example - * ```typescript - * const rules: ContextInferenceRule[] = [ - * { pattern: 'src/validation/**', context: 'validation' }, - * { pattern: 'src/lint/**', context: 'lint' }, - * ]; - * // File at src/validation/rules.ts will get archContext='validation' if not explicit - * ``` - */ -export interface ContextInferenceRule { - /** Glob pattern to match file paths (e.g., 'src/validation/**') */ - readonly pattern: string; - /** Default context name to assign when pattern matches */ - readonly context: string; -} - -/** - * Runtime MasterDataset with optional workflow - * - * Extends the Zod-compatible MasterDataset with workflow reference. - * LoadedWorkflow contains Maps which aren't JSON-serializable, - * so it's kept separate from the Zod schema. - * - * @libar-docs-shape master-dataset - */ -export interface RuntimeMasterDataset extends MasterDataset { - /** Optional workflow configuration (not serializable) */ - readonly workflow?: LoadedWorkflow; -} - -/** - * Raw input data for transformation - * - * @libar-docs-shape master-dataset - */ -export interface RawDataset { - /** Extracted patterns from TypeScript and/or Gherkin sources */ - readonly patterns: readonly ExtractedPattern[]; - - /** Tag registry for category lookups */ - readonly tagRegistry: TagRegistry; - - /** Optional workflow configuration for phase names (can be undefined) */ - readonly workflow?: LoadedWorkflow | undefined; - - /** Optional rules for inferring bounded context from file paths */ - readonly contextInferenceRules?: readonly ContextInferenceRule[] | undefined; -} - -/** - * Infer bounded context from file path using configured rules. - * - * Iterates through rules in order and returns the context from the first - * matching pattern. Returns undefined if no rules match. - * - * Pattern matching supports: - * - Simple prefix matching: `src/validation/` matches files in that directory - * - Glob-style wildcards: `src/validation/**` matches all files recursively - * - * @param filePath - The source file path to check - * @param rules - Ordered list of inference rules - * @returns The inferred context name, or undefined if no match - */ -function inferContext( - filePath: string, - rules: readonly ContextInferenceRule[] | undefined -): string | undefined { - if (!rules || rules.length === 0) return undefined; - - for (const rule of rules) { - if (matchPattern(filePath, rule.pattern)) { - return rule.context; - } - } - return undefined; -} - -/** - * Simple pattern matching for file paths. - * - * Supports: - * - Exact prefix matching: `src/validation/` matches `src/validation/foo.ts` - * - Glob-style `**` wildcard: `src/validation/**` matches all files recursively - * - * @param filePath - The file path to check - * @param pattern - The pattern to match against - * @returns true if the file path matches the pattern - */ -function matchPattern(filePath: string, pattern: string): boolean { - // Handle `**` wildcard patterns (recursive match) - if (pattern.endsWith('/**')) { - const prefix = pattern.slice(0, -3); // Remove '/**' - return filePath.startsWith(prefix); - } - - // Handle `/*` wildcard patterns (single level match) - if (pattern.endsWith('/*')) { - const prefix = pattern.slice(0, -2); // Remove '/*' - const afterPrefix = filePath.slice(prefix.length); - // Must start with prefix and have exactly one path segment after - return filePath.startsWith(prefix) && !afterPrefix.slice(1).includes('/'); - } - - // Simple prefix matching - return filePath.startsWith(pattern); -} +import { inferContext } from './context-inference.js'; +import { buildReverseLookups, detectDanglingReferences } from './relationship-resolver.js'; +import type { + MalformedPattern, + ValidationSummary, + TransformResult, + RuntimeMasterDataset, + RawDataset, +} from './transform-types.js'; /** * Check if a status value is a known/valid status. - * - * @param status - Status value to check - * @returns true if status is a known value */ function isKnownStatus(status: string | undefined): boolean { if (!status) return true; // undefined is acceptable (defaults to planned) @@ -259,20 +83,6 @@ function isKnownStatus(status: string | undefined): boolean { * * @param raw - Raw dataset with patterns, registry, and optional workflow * @returns MasterDataset with all pre-computed views - * - * @example - * ```typescript - * const masterDataset = transformToMasterDataset({ - * patterns: mergedPatterns, - * tagRegistry: registry, - * workflow, - * }); - * - * // Access pre-computed views - * const completed = masterDataset.byStatus.completed; - * const phase3Patterns = masterDataset.byPhase.find(p => p.phaseNumber === 3); - * const q42024 = masterDataset.byQuarter["Q4-2024"]; - * ``` */ export function transformToMasterDataset(raw: RawDataset): RuntimeMasterDataset { return transformToMasterDatasetWithValidation(raw).dataset; @@ -294,21 +104,6 @@ export function transformToMasterDataset(raw: RawDataset): RuntimeMasterDataset * * @param raw - Raw dataset with patterns, registry, and optional workflow * @returns TransformResult with dataset and validation summary - * - * @example - * ```typescript - * const result = transformToMasterDatasetWithValidation({ - * patterns: mergedPatterns, - * tagRegistry: registry, - * workflow, - * }); - * - * if (result.validation.warningCount > 0) { - * console.warn(`Found ${result.validation.warningCount} validation issues`); - * } - * - * const dataset = result.dataset; - * ``` */ export function transformToMasterDatasetWithValidation(raw: RawDataset): TransformResult { const { patterns, tagRegistry, workflow, contextInferenceRules } = raw; @@ -319,21 +114,17 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo const malformedPatterns: MalformedPattern[] = []; const unknownStatusSet = new Set(); - const danglingReferences: DanglingReference[] = []; // ───────────────────────────────────────────────────────────────────────── // Pre-loop validation: validate each pattern against schema // ───────────────────────────────────────────────────────────────────────── - // Build a set of all pattern names for reference checking const allPatternNames = new Set(); for (const pattern of patterns) { - const key = getPatternName(pattern); - allPatternNames.add(key); + allPatternNames.add(getPatternName(pattern)); } for (const pattern of patterns) { - // Validate against schema const parseResult = ExtractedPatternSchema.safeParse(pattern); if (!parseResult.success) { const patternId = getPatternName(pattern); @@ -343,7 +134,6 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo malformedPatterns.push({ patternId, issues }); } - // Check for unknown status values if (pattern.status && !isKnownStatus(pattern.status)) { unknownStatusSet.add(pattern.status); } @@ -371,13 +161,9 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo }; const byProductAreaMap: Record = {}; - const relationshipIndex: Record = {}; - - // Sequence index for design review diagram generation const sequenceIndex: Record = {}; - // Architecture index for diagram generation const archIndex: ArchIndex = { byRole: {}, byContext: {}, @@ -391,52 +177,47 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo // ───────────────────────────────────────────────────────────────────────── for (const pattern of patterns) { - // Reference for accumulation - const p = pattern; - // ─── Status grouping ─────────────────────────────────────────────────── const status = normalizeStatus(pattern.status); - byStatus[status].push(p); + byStatus[status].push(pattern); // ─── Phase grouping ──────────────────────────────────────────────────── if (pattern.phase !== undefined) { const existing = byPhaseMap.get(pattern.phase) ?? []; - existing.push(p); + existing.push(pattern); byPhaseMap.set(pattern.phase, existing); - - // Also add to roadmap view (patterns with phase are roadmap items) - bySource.roadmap.push(p); + bySource.roadmap.push(pattern); } // ─── Quarter grouping ────────────────────────────────────────────────── if (pattern.quarter) { const quarter = pattern.quarter; const quarterPatterns = (byQuarter[quarter] ??= []); - quarterPatterns.push(p); + quarterPatterns.push(pattern); } // ─── Category grouping ───────────────────────────────────────────────── const category = pattern.category; const categoryPatterns = byCategoryMap.get(category) ?? []; - categoryPatterns.push(p); + categoryPatterns.push(pattern); byCategoryMap.set(category, categoryPatterns); // ─── Source grouping ─────────────────────────────────────────────────── - if (pattern.source.file.endsWith('.feature')) { - bySource.gherkin.push(p); + if (pattern.source.file.endsWith('.feature') || pattern.source.file.endsWith('.feature.md')) { + bySource.gherkin.push(pattern); } else { - bySource.typescript.push(p); + bySource.typescript.push(pattern); } // ─── PRD grouping (has productArea, userRole, or businessValue) ──────── if (pattern.productArea || pattern.userRole || pattern.businessValue) { - bySource.prd.push(p); + bySource.prd.push(pattern); } // ─── Product area grouping ────────────────────────────────────────── if (pattern.productArea) { const areaPatterns = (byProductAreaMap[pattern.productArea] ??= []); - areaPatterns.push(p); + areaPatterns.push(pattern); } // ─── Relationship index ──────────────────────────────────────────────── @@ -446,18 +227,15 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo usedBy: [...(pattern.usedBy ?? [])], dependsOn: [...(pattern.dependsOn ?? [])], enables: [...(pattern.enables ?? [])], - // UML-inspired relationship fields (PatternRelationshipModel) implementsPatterns: [...(pattern.implementsPatterns ?? [])], - implementedBy: [], // Computed in second pass + implementedBy: [], // Computed by buildReverseLookups extendsPattern: pattern.extendsPattern, - extendedBy: [], // Computed in second pass - // Cross-reference and API navigation fields (PatternRelationshipModel enhancement) + extendedBy: [], // Computed by buildReverseLookups seeAlso: [...(pattern.seeAlso ?? [])], apiRef: [...(pattern.apiRef ?? [])], }; // ─── Architecture index (for diagram generation) ────────────────────── - // Infer context from file path if not explicitly set const inferredContext = pattern.archContext ?? inferContext(pattern.source.file, contextInferenceRules); @@ -467,33 +245,28 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo pattern.archLayer !== undefined || (pattern.include !== undefined && pattern.include.length > 0); if (hasArchMetadata) { - archIndex.all.push(p); + archIndex.all.push(pattern); - // Group by role (bounded-context, projection, saga, etc.) if (pattern.archRole) { const rolePatterns = (archIndex.byRole[pattern.archRole] ??= []); - rolePatterns.push(p); + rolePatterns.push(pattern); } - // Group by context (orders, inventory, etc.) for subgraph rendering - // Uses explicit archContext OR inferred context from file path if (inferredContext) { const contextPatterns = (archIndex.byContext[inferredContext] ??= []); - contextPatterns.push(p); + contextPatterns.push(pattern); } - // Group by layer (domain, application, infrastructure) if (pattern.archLayer) { const layerPatterns = (archIndex.byLayer[pattern.archLayer] ??= []); - layerPatterns.push(p); + layerPatterns.push(pattern); } - // Group by view (patterns can appear in multiple named views via include tag) if (pattern.include) { for (const view of pattern.include) { if (view.length === 0) continue; const viewPatterns = (archIndex.byView[view] ??= []); - viewPatterns.push(p); + viewPatterns.push(pattern); } } } @@ -535,116 +308,13 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo // Second pass: compute reverse lookups (implementedBy, extendedBy, enables, usedBy) // ───────────────────────────────────────────────────────────────────────── - // We iterate over patterns again to have access to source.file for implementedBy - for (const pattern of patterns) { - const patternKey = getPatternName(pattern); - const entry = relationshipIndex[patternKey]; - if (!entry) continue; - - // Build implementedBy reverse lookup with full ImplementationRef - for (const implemented of entry.implementsPatterns) { - const target = relationshipIndex[implemented]; - if (target) { - // Check if this implementation is already added (by name) - const alreadyAdded = target.implementedBy.some( - (impl: ImplementationRef) => impl.name === patternKey - ); - if (!alreadyAdded) { - // Extract first line of description if available, truncate to 100 chars - const desc = pattern.directive.description; - const firstLine = desc ? desc.split('\n')[0]?.trim() : undefined; - const description = - firstLine && firstLine.length > 0 - ? firstLine.slice(0, 100) + (firstLine.length > 100 ? '...' : '') - : undefined; - - target.implementedBy.push({ - name: patternKey, - file: pattern.source.file, - description, - }); - } - } - } - - // Build extendedBy reverse lookup (still uses string names) - if (entry.extendsPattern) { - const target = relationshipIndex[entry.extendsPattern]; - if (target && !target.extendedBy.includes(patternKey)) { - target.extendedBy.push(patternKey); - } - } - - // Build enables reverse lookup (dependsOn -> enables) - for (const dep of entry.dependsOn) { - const target = relationshipIndex[dep]; - if (target && !target.enables.includes(patternKey)) { - target.enables.push(patternKey); - } - } - - // Build usedBy reverse lookup (uses -> usedBy) - for (const used of entry.uses) { - const target = relationshipIndex[used]; - if (target && !target.usedBy.includes(patternKey)) { - target.usedBy.push(patternKey); - } - } - } - - // Sort reverse-computed arrays for consistent output - for (const entry of Object.values(relationshipIndex)) { - entry.implementedBy.sort((a: ImplementationRef, b: ImplementationRef) => - a.file.localeCompare(b.file) - ); - entry.enables.sort((a, b) => a.localeCompare(b)); - entry.usedBy.sort((a, b) => a.localeCompare(b)); - } + buildReverseLookups(patterns, relationshipIndex); // ───────────────────────────────────────────────────────────────────────── // Third pass: detect dangling references in relationship fields // ───────────────────────────────────────────────────────────────────────── - for (const pattern of patterns) { - const patternKey = getPatternName(pattern); - - // Check 'uses' references - for (const ref of pattern.uses ?? []) { - if (!allPatternNames.has(ref)) { - danglingReferences.push({ pattern: patternKey, field: 'uses', missing: ref }); - } - } - - // Check 'dependsOn' references - for (const ref of pattern.dependsOn ?? []) { - if (!allPatternNames.has(ref)) { - danglingReferences.push({ pattern: patternKey, field: 'dependsOn', missing: ref }); - } - } - - // Check 'implementsPatterns' references - for (const ref of pattern.implementsPatterns ?? []) { - if (!allPatternNames.has(ref)) { - danglingReferences.push({ pattern: patternKey, field: 'implementsPatterns', missing: ref }); - } - } - - // Check 'extendsPattern' reference - if (pattern.extendsPattern && !allPatternNames.has(pattern.extendsPattern)) { - danglingReferences.push({ - pattern: patternKey, - field: 'extendsPattern', - missing: pattern.extendsPattern, - }); - } - - // Check 'seeAlso' references - for (const ref of pattern.seeAlso ?? []) { - if (!allPatternNames.has(ref)) { - danglingReferences.push({ pattern: patternKey, field: 'seeAlso', missing: ref }); - } - } - } + const danglingReferences = detectDanglingReferences(patterns, allPatternNames); // ───────────────────────────────────────────────────────────────────────── // Build phase groups with counts (sorted by phase number) @@ -653,9 +323,7 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo const byPhase: PhaseGroup[] = Array.from(byPhaseMap.entries()) .sort(([a], [b]) => a - b) .map(([phaseNumber, phasePatterns]) => { - // Try workflow config first, then derive from patterns const workflowPhaseName = workflow?.config.phases.find((p) => p.order === phaseNumber)?.name; - // If no workflow name, use the first pattern's name (often the phase has one primary pattern) const firstPattern = phasePatterns[0]; const derivedName = firstPattern?.name; @@ -668,15 +336,11 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo }); // ───────────────────────────────────────────────────────────────────────── - // Convert category map to record + // Assemble final MasterDataset // ───────────────────────────────────────────────────────────────────────── const byCategory = Object.fromEntries(byCategoryMap); - // ───────────────────────────────────────────────────────────────────────── - // Compute aggregate counts - // ───────────────────────────────────────────────────────────────────────── - const counts: StatusCounts = { completed: byStatus.completed.length, active: byStatus.active.length, @@ -684,10 +348,6 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo total: patterns.length, }; - // ───────────────────────────────────────────────────────────────────────── - // Build validation summary - // ───────────────────────────────────────────────────────────────────────── - const unknownStatuses = [...unknownStatusSet]; const validation: ValidationSummary = { totalPatterns: patterns.length, @@ -697,10 +357,6 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo warningCount: malformedPatterns.length + danglingReferences.length + unknownStatuses.length, }; - // ───────────────────────────────────────────────────────────────────────── - // Return assembled MasterDataset with validation - // ───────────────────────────────────────────────────────────────────────── - const dataset: RuntimeMasterDataset = { patterns: patterns as ExtractedPattern[], tagRegistry, @@ -714,13 +370,10 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo phaseCount: byPhaseMap.size, categoryCount: byCategoryMap.size, relationshipIndex, - // Only include archIndex if it has content ...(archIndex.all.length > 0 && { archIndex }), - // Only include sequenceIndex if it has content ...(Object.keys(sequenceIndex).length > 0 && { sequenceIndex }), }; - // Only include workflow if defined (exactOptionalPropertyTypes compliance) if (workflow !== undefined) { return { dataset: { ...dataset, workflow }, validation }; } @@ -730,9 +383,6 @@ export function transformToMasterDatasetWithValidation(raw: RawDataset): Transfo /** * Compute status counts for a subset of patterns - * - * @param patterns - Patterns to count - * @returns Status counts including total */ function computeCounts(patterns: readonly ExtractedPattern[]): StatusCounts { let completed = 0; @@ -756,9 +406,6 @@ function computeCounts(patterns: readonly ExtractedPattern[]): StatusCounts { /** * Compute completion percentage from status counts - * - * @param counts - Status counts - * @returns Percentage (0-100) of completed items */ export function completionPercentage(counts: StatusCounts): number { if (counts.total === 0) return 0; @@ -767,9 +414,6 @@ export function completionPercentage(counts: StatusCounts): number { /** * Check if all items in a phase/group are completed - * - * @param counts - Status counts - * @returns True if all items are completed */ export function isFullyCompleted(counts: StatusCounts): boolean { return counts.total > 0 && counts.completed === counts.total; diff --git a/src/generators/pipeline/transform-types.ts b/src/generators/pipeline/transform-types.ts new file mode 100644 index 00000000..9faad129 --- /dev/null +++ b/src/generators/pipeline/transform-types.ts @@ -0,0 +1,110 @@ +/** + * @libar-docs + * @libar-docs-pattern TransformTypes + * @libar-docs-status active + * @libar-docs-arch-role types + * @libar-docs-arch-context generator + * @libar-docs-arch-layer application + * @libar-docs-used-by TransformDataset, Orchestrator + * @libar-docs-uses MasterDataset, LoadedWorkflow, ExtractedPattern, TagRegistry, ContextInferenceRule + * + * ## TransformTypes - MasterDataset Transformation Types + * + * Type definitions for the dataset transformation pipeline. + * Separated from transform-dataset.ts to allow importing types + * without pulling in the transformation logic. + */ + +import type { MasterDataset } from '../../validation-schemas/master-dataset.js'; +import type { LoadedWorkflow } from '../../config/workflow-loader.js'; +import type { ExtractedPattern, TagRegistry } from '../../validation-schemas/index.js'; +import type { ContextInferenceRule } from './context-inference.js'; + +/** + * Information about a malformed pattern that failed schema validation. + */ +export interface MalformedPattern { + /** Pattern ID or name for identification */ + patternId: string; + /** List of validation issues found */ + issues: string[]; +} + +/** + * Information about a dangling reference (reference to non-existent pattern). + */ +export interface DanglingReference { + /** The pattern containing the dangling reference */ + pattern: string; + /** The field containing the dangling reference (e.g., "uses", "dependsOn") */ + field: string; + /** The referenced pattern name that doesn't exist */ + missing: string; +} + +/** + * Summary of validation results from dataset transformation. + * + * Provides structured information about data quality issues encountered + * during transformation, enabling upstream error handling and reporting. + */ +export interface ValidationSummary { + /** Total number of patterns processed */ + totalPatterns: number; + + /** Patterns that failed schema validation */ + malformedPatterns: MalformedPattern[]; + + /** References to patterns that don't exist in the dataset */ + danglingReferences: DanglingReference[]; + + /** Status values that were not recognized (normalized to 'planned') */ + unknownStatuses: string[]; + + /** Total count of all warnings (malformed + dangling + unknown statuses) */ + warningCount: number; +} + +/** + * Result of transformToMasterDataset including both dataset and validation info. + */ +export interface TransformResult { + /** The transformed MasterDataset */ + dataset: RuntimeMasterDataset; + + /** Validation summary with any issues found during transformation */ + validation: ValidationSummary; +} + +/** + * Runtime MasterDataset with optional workflow + * + * Extends the Zod-compatible MasterDataset with workflow reference. + * LoadedWorkflow contains Maps which aren't JSON-serializable, + * so it's kept separate from the Zod schema. + * + * @libar-docs-shape master-dataset + */ +export interface RuntimeMasterDataset extends MasterDataset { + /** Optional workflow configuration (not serializable) */ + readonly workflow?: LoadedWorkflow; +} + +/** + * Raw input data for transformation + * + * @libar-docs-shape master-dataset + */ +export interface RawDataset { + /** Extracted patterns from TypeScript and/or Gherkin sources */ + readonly patterns: readonly ExtractedPattern[]; + + /** Tag registry for category lookups */ + readonly tagRegistry: TagRegistry; + + /** Optional workflow configuration for phase names (can be undefined) */ + readonly workflow?: LoadedWorkflow | undefined; + + /** Optional rules for inferring bounded context from file paths */ + readonly contextInferenceRules?: readonly ContextInferenceRule[] | undefined; +} diff --git a/src/git/branch-diff.ts b/src/git/branch-diff.ts new file mode 100644 index 00000000..e69cb429 --- /dev/null +++ b/src/git/branch-diff.ts @@ -0,0 +1,61 @@ +/** + * @libar-docs + * @libar-docs-pattern GitBranchDiff + * @libar-docs-status active + * @libar-docs-arch-role utility + * @libar-docs-arch-context generator + * @libar-docs-arch-layer infrastructure + * @libar-docs-used-by Orchestrator + * + * ## GitBranchDiff - Pure Git Change Detection + * + * Provides lightweight git diff operations for determining which files changed + * relative to a base branch. This module exists to decouple the generators + * layer from the lint layer — the orchestrator needs file change lists for + * PR-scoped generation, but should not depend on Process Guard's domain-specific + * change detection (status transitions, deliverable changes). + * + * ### When to Use + * + * - When you need a list of changed files relative to a base branch + * - When orchestrating generation for only changed patterns + * + * ### When NOT to Use + * + * - For Process Guard validation — use detectBranchChanges from lint/process-guard + * - For status transition detection — use detectStagedChanges/detectBranchChanges + */ + +import type { Result } from '../types/index.js'; +import { Result as R } from '../types/index.js'; +import { execGitSafe, sanitizeBranchName } from './helpers.js'; +import { parseGitNameStatus } from './name-status.js'; + +/** + * Get all files changed relative to a base branch (excludes deleted files). + * + * This is a lightweight alternative to detectBranchChanges from lint/process-guard + * that returns only the file list without domain-specific parsing (status transitions, + * deliverable changes). Used by the orchestrator for PR-scoped generation. + * + * Deleted files are excluded because the consumer (orchestrator) uses this list + * to scope generation to files that still exist on the current branch. + * + * @param baseDir - Repository base directory + * @param baseBranch - Branch to compare against (default: main) + * @returns Result containing array of changed file paths (modified + added), or error + */ +export function getChangedFilesList( + baseDir: string, + baseBranch = 'main' +): Result { + try { + const safeBranch = sanitizeBranchName(baseBranch); + const mergeBase = execGitSafe('merge-base', [safeBranch, 'HEAD'], baseDir).trim(); + const nameStatus = execGitSafe('diff', ['--name-status', '-z', mergeBase], baseDir); + const { modified, added } = parseGitNameStatus(nameStatus); + return R.ok([...modified, ...added]); + } catch (error) { + return R.err(error instanceof Error ? error : new Error(String(error))); + } +} diff --git a/src/git/helpers.ts b/src/git/helpers.ts new file mode 100644 index 00000000..d58bd5d7 --- /dev/null +++ b/src/git/helpers.ts @@ -0,0 +1,70 @@ +/** + * @libar-docs + * @libar-docs-pattern GitHelpers + * @libar-docs-status active + * @libar-docs-arch-role utility + * @libar-docs-arch-context generator + * @libar-docs-arch-layer infrastructure + * @libar-docs-used-by GitBranchDiff, DetectChanges + * + * ## GitHelpers - Shared Git Command Utilities + * + * Low-level helpers for safe git command execution and input sanitization. + * Used by both the generators layer (branch-diff) and the lint layer + * (detect-changes) to avoid duplicating security-critical code. + */ + +import { execFileSync } from 'child_process'; + +/** + * Maximum buffer size for git command output (50MB). + * Large enough to handle staging entire dist/ folders with source maps. + * Prevents ENOBUFS errors when diff output exceeds Node.js default (~1MB). + */ +export const GIT_MAX_BUFFER = 50 * 1024 * 1024; + +/** + * Execute a git subcommand safely using execFileSync (no shell interpolation). + * + * Uses execFileSync to bypass shell interpretation entirely, preventing + * metacharacter injection vulnerabilities. + * + * @param subcommand - Git subcommand (e.g., 'merge-base', 'diff', 'ls-files') + * @param args - Array of arguments (never interpolated into a shell command) + * @param cwd - Working directory + * @returns Command output as string + */ +export function execGitSafe(subcommand: string, args: readonly string[], cwd: string): string { + return execFileSync('git', [subcommand, ...args], { + cwd, + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + maxBuffer: GIT_MAX_BUFFER, + }); +} + +/** + * Validate and sanitize a git branch name to prevent command injection. + * + * Allows only alphanumeric characters, dots, hyphens, underscores, and forward slashes. + * This matches the valid git branch name character set per git-check-ref-format. + * Excludes shell metacharacters: ; | & $ ` ( ) { } [ ] < > ! ~ ^ * ? " ' \ + * + * @param branch - Branch name to validate + * @returns The validated branch name (unchanged if valid) + * @throws Error if branch name contains invalid characters or path traversal + */ +export function sanitizeBranchName(branch: string): string { + // Reject leading hyphens to prevent git option injection (e.g., --help, -c) + if (branch.startsWith('-')) { + throw new Error(`Invalid branch name (starts with hyphen): ${branch}`); + } + if (!/^[a-zA-Z0-9._\-/]+$/.test(branch)) { + throw new Error(`Invalid branch name: ${branch}`); + } + // Prevent path traversal attempts in branch names + if (branch.includes('..')) { + throw new Error(`Invalid branch name (contains ..): ${branch}`); + } + return branch; +} diff --git a/src/git/index.ts b/src/git/index.ts new file mode 100644 index 00000000..1849bd9e --- /dev/null +++ b/src/git/index.ts @@ -0,0 +1,18 @@ +/** + * @libar-docs + * @libar-docs-pattern GitModule + * @libar-docs-status active + * @libar-docs-arch-role barrel + * @libar-docs-arch-context generator + * @libar-docs-arch-layer infrastructure + * @libar-docs-uses GitBranchDiff, GitHelpers + * + * ## Git Module - Pure Git Operations + * + * Shared git utilities used by both generators and lint layers. + * Decouples orchestrator from Process Guard's domain-specific change detection. + */ + +export { getChangedFilesList } from './branch-diff.js'; +export { parseGitNameStatus, type ParsedGitNameStatus } from './name-status.js'; +export { execGitSafe, sanitizeBranchName, GIT_MAX_BUFFER } from './helpers.js'; diff --git a/src/git/name-status.ts b/src/git/name-status.ts new file mode 100644 index 00000000..1acf4f49 --- /dev/null +++ b/src/git/name-status.ts @@ -0,0 +1,75 @@ +/** + * @libar-docs + * @libar-docs-pattern GitNameStatusParser + * @libar-docs-status active + * @libar-docs-arch-role utility + * @libar-docs-arch-context generator + * @libar-docs-arch-layer infrastructure + * @libar-docs-used-by GitBranchDiff, DetectChanges + * + * ## GitNameStatusParser - Shared Parsing for `git diff --name-status -z` + * + * Parses NUL-delimited git name-status output into categorized file lists. + * Using `-z` preserves filenames with spaces and rename/copy pairs without + * relying on whitespace splitting. + */ + +export interface ParsedGitNameStatus { + readonly modified: string[]; + readonly added: string[]; + readonly deleted: string[]; +} + +/** + * Parse NUL-delimited `git diff --name-status -z` output. + * + * Git emits records as: + * - `M\0path\0` + * - `A\0path\0` + * - `D\0path\0` + * - `R100\0old_path\0new_path\0` + * - `C087\0source_path\0copy_path\0` + */ +export function parseGitNameStatus(output: string): ParsedGitNameStatus { + const modified: string[] = []; + const added: string[] = []; + const deleted: string[] = []; + + const tokens = output.split('\0'); + let index = 0; + + while (index < tokens.length) { + const status = tokens[index++]; + if (!status) continue; + + const kind = status[0]; + if (!kind) continue; + + if (kind === 'R' || kind === 'C') { + const oldPath = tokens[index++]; + const newPath = tokens[index++]; + if (!oldPath || !newPath) continue; + modified.push(newPath); + continue; + } + + const filePath = tokens[index++]; + if (!filePath) continue; + + switch (kind) { + case 'M': + modified.push(filePath); + break; + case 'A': + added.push(filePath); + break; + case 'D': + deleted.push(filePath); + break; + default: + break; + } + } + + return { modified, added, deleted }; +} diff --git a/src/lint/process-guard/detect-changes.ts b/src/lint/process-guard/detect-changes.ts index 986daba6..67d4e745 100644 --- a/src/lint/process-guard/detect-changes.ts +++ b/src/lint/process-guard/detect-changes.ts @@ -30,11 +30,11 @@ * - When detecting scope creep (new deliverables) */ -import { execFileSync } from 'child_process'; import * as path from 'path'; import type { Result } from '../../types/index.js'; import { Result as R } from '../../types/index.js'; import { PROCESS_STATUS_VALUES, type ProcessStatusValue } from '../../taxonomy/index.js'; +import { execGitSafe, sanitizeBranchName, parseGitNameStatus } from '../../git/index.js'; import type { ChangeDetection, StatusTransition, @@ -45,13 +45,6 @@ import { DEFAULT_TAG_PREFIX } from '../../config/defaults.js'; import { DEFAULT_STATUS } from '../../taxonomy/status-values.js'; import type { WithTagRegistry } from '../../validation/types.js'; -/** - * Maximum buffer size for git command output (50MB). - * Large enough to handle staging entire dist/ folders with source maps. - * Prevents ENOBUFS errors when diff output exceeds Node.js default (~1MB). - */ -const GIT_MAX_BUFFER = 50 * 1024 * 1024; - /** * Options for change detection functions. * @@ -87,8 +80,8 @@ export function detectStagedChanges( try { // Get list of staged files with status - const nameStatus = execGitSafe('diff', ['--cached', '--name-status'], baseDir); - const { modified, added, deleted } = parseNameStatus(nameStatus); + const nameStatus = execGitSafe('diff', ['--cached', '--name-status', '-z'], baseDir); + const { modified, added, deleted } = parseGitNameStatus(nameStatus); // Get full diff for content analysis const diff = execGitSafe('diff', ['--cached'], baseDir); @@ -134,8 +127,8 @@ export function detectBranchChanges( const mergeBase = execGitSafe('merge-base', [safeBranch, 'HEAD'], baseDir).trim(); // Get list of changed files - const nameStatus = execGitSafe('diff', ['--name-status', mergeBase], baseDir); - const { modified, added, deleted } = parseNameStatus(nameStatus); + const nameStatus = execGitSafe('diff', ['--name-status', '-z', mergeBase], baseDir); + const { modified, added, deleted } = parseGitNameStatus(nameStatus); // Get full diff const diff = execGitSafe('diff', [mergeBase], baseDir); @@ -214,95 +207,6 @@ export function detectFileChanges( } // ============================================================================= -// Git Helpers -// ============================================================================= - -/** - * Execute a git command safely using execFileSync to prevent command injection. - * - * Uses array-based arguments instead of string interpolation to avoid shell - * metacharacter injection vulnerabilities. - * - * @param subcommand - Git subcommand (e.g., 'merge-base', 'diff', 'ls-files') - * @param args - Array of arguments (never interpolated into a shell command) - * @param cwd - Working directory - * @returns Command output as string - */ -function execGitSafe(subcommand: string, args: readonly string[], cwd: string): string { - return execFileSync('git', [subcommand, ...args], { - cwd, - encoding: 'utf-8', - stdio: ['pipe', 'pipe', 'pipe'], - maxBuffer: GIT_MAX_BUFFER, - }); -} - -/** - * Validate and sanitize a git branch name to prevent command injection. - * - * Allows only alphanumeric characters, dots, hyphens, underscores, and forward slashes. - * This matches the valid git branch name character set per git-check-ref-format. - * - * @param branch - Branch name to validate - * @returns The validated branch name (unchanged if valid) - * @throws Error if branch name contains invalid characters - */ -function sanitizeBranchName(branch: string): string { - // Git branch names: alphanumeric, dots, hyphens, underscores, forward slashes - // Excludes shell metacharacters: ; | & $ ` ( ) { } [ ] < > ! ~ ^ * ? " ' \ - if (!/^[a-zA-Z0-9._\-/]+$/.test(branch)) { - throw new Error(`Invalid branch name: ${branch}`); - } - // Prevent path traversal attempts in branch names - if (branch.includes('..')) { - throw new Error(`Invalid branch name (contains ..): ${branch}`); - } - return branch; -} - -/** - * Parse git name-status output into file lists. - */ -function parseNameStatus(output: string): { - modified: string[]; - added: string[]; - deleted: string[]; -} { - const modified: string[] = []; - const added: string[] = []; - const deleted: string[] = []; - - for (const line of output.split('\n')) { - const trimmed = line.trim(); - if (!trimmed) continue; - - const [status, ...pathParts] = trimmed.split(/\s+/); - const filePath = pathParts.join(' '); - - if (!filePath) continue; - - switch (status) { - case 'M': - modified.push(filePath); - break; - case 'A': - added.push(filePath); - break; - case 'D': - deleted.push(filePath); - break; - case 'R': - case 'C': - // Renamed/Copied: path is "old -> new" - const newPath = filePath.includes('->') ? filePath.split('->')[1]?.trim() : filePath; - if (newPath) modified.push(newPath); - break; - } - } - - return { modified, added, deleted }; -} - // ============================================================================= // Status Transition Detection // ============================================================================= diff --git a/src/renderable/codecs/architecture.ts b/src/renderable/codecs/architecture.ts index 6577871d..934e5496 100644 --- a/src/renderable/codecs/architecture.ts +++ b/src/renderable/codecs/architecture.ts @@ -26,6 +26,7 @@ * | includeInventory | boolean | true | Include component inventory table | * | includeLegend | boolean | true | Include legend for arrow styles | * | filterContexts | string[] | [] | Filter to specific contexts (empty = all) | + * | diagramKeyComponentsOnly | boolean | true | Only show components with archRole in diagrams | * * ### When to Use * @@ -100,6 +101,14 @@ export interface ArchitectureCodecOptions extends BaseCodecOptions { /** Filter to specific contexts (default: all contexts) */ filterContexts?: string[]; + + /** + * Only include patterns with an explicit archRole in diagrams (default: true). + * Patterns without a role (barrel exports, type-only modules, ADRs, test features) + * add noise to diagrams without conveying architectural significance. + * The component inventory table always shows all patterns regardless. + */ + diagramKeyComponentsOnly?: boolean; } /** @@ -111,6 +120,7 @@ export const DEFAULT_ARCHITECTURE_OPTIONS: Required = includeInventory: true, includeLegend: true, filterContexts: [], + diagramKeyComponentsOnly: true, }; // ═══════════════════════════════════════════════════════════════════════════ @@ -194,22 +204,29 @@ function buildArchitectureDocument( // Apply context filter if specified const filteredIndex = applyContextFilter(archIndex, options.filterContexts); + // 2. Filter for diagram: only key components (with archRole) if enabled + const diagramIndex = options.diagramKeyComponentsOnly + ? filterToKeyComponents(filteredIndex) + : filteredIndex; + // 1. Summary section - sections.push(...buildSummarySection(filteredIndex)); + sections.push( + ...buildSummarySection(diagramIndex, filteredIndex.all.length, options.diagramKeyComponentsOnly) + ); - // 2. Main diagram based on type + // 3. Main diagram based on type if (options.diagramType === 'component') { - sections.push(...buildComponentDiagram(filteredIndex, dataset)); + sections.push(...buildComponentDiagram(diagramIndex, dataset)); } else { - sections.push(...buildLayeredDiagram(filteredIndex, dataset)); + sections.push(...buildLayeredDiagram(diagramIndex, dataset)); } - // 3. Legend (if enabled) + // 4. Legend (if enabled) if (options.includeLegend) { sections.push(...buildLegendSection()); } - // 4. Component inventory (if enabled) + // 5. Component inventory (if enabled) — uses full filteredIndex, not diagramIndex if (options.includeInventory) { sections.push(...buildInventorySection(filteredIndex)); } @@ -289,6 +306,59 @@ function applyContextFilter( }; } +/** + * Filter architecture index to only include patterns with an explicit archRole. + * Patterns without a role (barrel exports, type modules, ADRs, test features) + * are excluded from diagrams but remain in the component inventory. + */ +function filterToKeyComponents( + archIndex: NonNullable +): NonNullable { + const hasRole = (p: ExtractedPattern): boolean => p.archRole !== undefined; + + const filteredAll = archIndex.all.filter(hasRole); + + const filteredByContext: Record = {}; + for (const [ctx, patterns] of Object.entries(archIndex.byContext)) { + const filtered = patterns.filter(hasRole); + if (filtered.length > 0) { + filteredByContext[ctx] = filtered; + } + } + + const filteredByRole: Record = {}; + for (const [role, patterns] of Object.entries(archIndex.byRole)) { + const filtered = patterns.filter(hasRole); + if (filtered.length > 0) { + filteredByRole[role] = filtered; + } + } + + const filteredByLayer: Record = {}; + for (const [layer, patterns] of Object.entries(archIndex.byLayer)) { + const filtered = patterns.filter(hasRole); + if (filtered.length > 0) { + filteredByLayer[layer] = filtered; + } + } + + const filteredByView: Record = {}; + for (const [view, patterns] of Object.entries(archIndex.byView)) { + const filtered = patterns.filter(hasRole); + if (filtered.length > 0) { + filteredByView[view] = filtered; + } + } + + return { + byContext: filteredByContext, + byRole: filteredByRole, + byLayer: filteredByLayer, + byView: filteredByView, + all: filteredAll, + }; +} + // ═══════════════════════════════════════════════════════════════════════════ // Section Builders // ═══════════════════════════════════════════════════════════════════════════ @@ -296,25 +366,35 @@ function applyContextFilter( /** * Build summary section with component counts */ -function buildSummarySection(archIndex: NonNullable): SectionBlock[] { - const contextCount = Object.keys(archIndex.byContext).length; - const roleCount = Object.keys(archIndex.byRole).length; - const totalComponents = archIndex.all.length; +function buildSummarySection( + diagramIndex: NonNullable, + totalAnnotated: number, + keyComponentsOnly: boolean +): SectionBlock[] { + const contextCount = Object.keys(diagramIndex.byContext).length; + const roleCount = Object.keys(diagramIndex.byRole).length; + const diagramComponents = diagramIndex.all.length; + + const rows: string[][] = [ + ['Diagram Components', String(diagramComponents)], + ['Bounded Contexts', String(contextCount)], + ['Component Roles', String(roleCount)], + ]; + + if (totalAnnotated !== diagramComponents) { + rows.push(['Total Annotated', String(totalAnnotated)]); + } + + const description = keyComponentsOnly + ? `This diagram shows ${diagramComponents} key components with explicit architectural roles ` + + `across ${contextCount} bounded context${contextCount !== 1 ? 's' : ''}.` + : `This diagram shows all ${diagramComponents} annotated components ` + + `across ${contextCount} bounded context${contextCount !== 1 ? 's' : ''}.`; return [ heading(2, 'Overview'), - paragraph( - `This diagram was auto-generated from ${totalComponents} annotated source files ` + - `across ${contextCount} bounded context${contextCount !== 1 ? 's' : ''}.` - ), - table( - ['Metric', 'Count'], - [ - ['Total Components', String(totalComponents)], - ['Bounded Contexts', String(contextCount)], - ['Component Roles', String(roleCount)], - ] - ), + paragraph(description), + table(['Metric', 'Count'], rows), separator(), ]; } diff --git a/tests/features/behavior/architecture-diagrams/component-diagram.feature b/tests/features/behavior/architecture-diagrams/component-diagram.feature index d643c3c0..1f2487c2 100644 --- a/tests/features/behavior/architecture-diagrams/component-diagram.feature +++ b/tests/features/behavior/architecture-diagrams/component-diagram.feature @@ -133,7 +133,7 @@ Feature: Component Diagram Generation Then the document contains elements: | text | | ## Overview | - | 3 annotated source files | + | 3 key components | | 2 bounded context | Rule: Component diagram includes legend when enabled diff --git a/tests/features/behavior/architecture-diagrams/layered-diagram.feature b/tests/features/behavior/architecture-diagrams/layered-diagram.feature index d21b075c..cadc8ffa 100644 --- a/tests/features/behavior/architecture-diagrams/layered-diagram.feature +++ b/tests/features/behavior/architecture-diagrams/layered-diagram.feature @@ -122,5 +122,5 @@ Feature: Layered Architecture Diagram Generation Then the document contains elements: | text | | ## Overview | - | 2 annotated source files | + | 2 key components | diff --git a/tests/features/behavior/context-inference.feature b/tests/features/behavior/context-inference.feature index 8186cccf..395f5662 100644 --- a/tests/features/behavior/context-inference.feature +++ b/tests/features/behavior/context-inference.feature @@ -37,6 +37,7 @@ Feature: Context Auto-Inference from File Paths | pattern | filePath | expectedContext | | src/validation/** | src/validation/rules.ts | test-context | | src/validation/** | src/validation/deep/nested.ts | test-context | + | src/validation/** | src/validation2/file.ts | none | | src/validation/** | src/other/file.ts | none | | src/validation/** | other/validation/rules.ts | none | @@ -61,6 +62,7 @@ Feature: Context Auto-Inference from File Paths | pattern | filePath | expectedContext | | src/validation/* | src/validation/rules.ts | test-context | | src/validation/* | src/validation/deep/nested.ts | none | + | src/validation/* | src/validation2/file.ts | none | # ═══════════════════════════════════════════════════════════════════════════ # Pattern Matching - Prefix Matching diff --git a/tests/features/cli/data-api-cache.feature b/tests/features/cli/data-api-cache.feature index 630f33e8..c1d5dd64 100644 --- a/tests/features/cli/data-api-cache.feature +++ b/tests/features/cli/data-api-cache.feature @@ -25,7 +25,7 @@ Feature: Process API CLI - Dataset Cache When running status and capturing the first result And running status and capturing the second result Then the second result metadata has cache.hit true - And the second result pipelineMs is less than 500 + And the second result pipelineMs is less than the first @happy-path Scenario: Cache invalidated on source file change diff --git a/tests/features/types/deliverable-status.feature b/tests/features/types/deliverable-status.feature new file mode 100644 index 00000000..9d72345c --- /dev/null +++ b/tests/features/types/deliverable-status.feature @@ -0,0 +1,102 @@ +@libar-docs +@libar-docs-pattern:DeliverableStatusTaxonomyTesting +@libar-docs-status:active +@libar-docs-product-area:CoreTypes +@libar-docs-include:core-types +@taxonomy @deliverable +Feature: Deliverable Status Taxonomy + The deliverable status module defines the 6 canonical status values for + deliverables in Gherkin Background tables: complete, in-progress, pending, + deferred, superseded, n/a. It provides predicates for status classification + and terminal status checks for DoD validation. + + Background: + Given a deliverable status test context + + Rule: isDeliverableStatusTerminal identifies terminal statuses for DoD validation + + **Invariant:** Only complete, n/a, and superseded are terminal. Deferred is NOT terminal because it implies unfinished work that should block DoD. + **Rationale:** Marking a pattern as completed when deliverables are merely deferred creates a hard-locked state with incomplete work, violating delivery process integrity. + **Verified by:** Terminal status classification + + @function:isDeliverableStatusTerminal @happy-path + Scenario Outline: Terminal status classification + When checking if "" is terminal + Then the terminal check result is "" + + Examples: + | status | isTerminal | + | complete | true | + | n/a | true | + | superseded | true | + | deferred | false | + | in-progress | false | + | pending | false | + + Rule: Status predicates classify individual deliverable states + + **Invariant:** isDeliverableStatusComplete, isDeliverableStatusInProgress, and isDeliverableStatusPending each match exactly one status value. + **Rationale:** Single-value predicates provide type-safe branching for consumers that need to distinguish specific states rather than terminal vs non-terminal groupings. + **Verified by:** isDeliverableStatusComplete classification, isDeliverableStatusInProgress classification, isDeliverableStatusPending classification + + @function:isDeliverableStatusComplete @happy-path + Scenario Outline: isDeliverableStatusComplete classification + When checking if "" is complete + Then the predicate result is "" + + Examples: + | status | expected | + | complete | true | + | in-progress | false | + | pending | false | + | deferred | false | + | superseded | false | + | n/a | false | + + @function:isDeliverableStatusInProgress @happy-path + Scenario Outline: isDeliverableStatusInProgress classification + When checking if "" is in-progress + Then the predicate result is "" + + Examples: + | status | expected | + | in-progress | true | + | complete | false | + | pending | false | + | deferred | false | + | superseded | false | + | n/a | false | + + @function:isDeliverableStatusPending @happy-path + Scenario Outline: isDeliverableStatusPending classification + When checking if "" is pending + Then the predicate result is "" + + Examples: + | status | expected | + | pending | true | + | complete | false | + | in-progress | false | + | deferred | false | + | superseded | false | + | n/a | false | + + Rule: getDeliverableStatusEmoji returns display emoji for all statuses + + **Invariant:** getDeliverableStatusEmoji returns a non-empty string for all 6 canonical statuses. No status value is unmapped. + **Rationale:** Missing emoji mappings would cause empty display cells in generated documentation tables, breaking visual consistency. + **Verified by:** Emoji mapping for all statuses + + @function:getDeliverableStatusEmoji @happy-path + Scenario Outline: Emoji mapping for all statuses + When getting the emoji for "" + Then the emoji is not empty + + Examples: + | status | + | complete | + | in-progress | + | pending | + | deferred | + | superseded | + | n/a | diff --git a/tests/features/types/normalized-status.feature b/tests/features/types/normalized-status.feature new file mode 100644 index 00000000..afdd1334 --- /dev/null +++ b/tests/features/types/normalized-status.feature @@ -0,0 +1,85 @@ +@libar-docs +@libar-docs-pattern:NormalizedStatusTesting +@libar-docs-status:active +@libar-docs-product-area:CoreTypes +@libar-docs-include:core-types +@taxonomy @status +Feature: Normalized Status Taxonomy + The normalized status module maps any status input — raw FSM states (roadmap, + active, completed, deferred), already-normalized values (planned), undefined, + or unknown strings — to exactly one of three display buckets (completed, + active, planned) for UI presentation and generated documentation output. + + Background: + Given a normalized status test context + + Rule: normalizeStatus maps raw FSM states to display buckets + + **Invariant:** normalizeStatus must map every raw FSM status to exactly one of three display buckets: completed, active, or planned. Unknown or undefined inputs default to planned. + **Rationale:** UI and generated documentation need a simplified status model; the raw 4-state FSM is an implementation detail that should not leak into display logic. + **Verified by:** Status normalization, normalizeStatus defaults undefined to planned, normalizeStatus defaults unknown status to planned + + @function:normalizeStatus @happy-path + Scenario Outline: Status normalization + When normalizing status "" + Then the normalized status is "" + + Examples: + | rawStatus | normalizedStatus | + | completed | completed | + | active | active | + | roadmap | planned | + | deferred | planned | + | planned | planned | + + @function:normalizeStatus + Scenario: normalizeStatus defaults undefined to planned + When normalizing an undefined status + Then the normalized status is "planned" + + @function:normalizeStatus + Scenario: normalizeStatus defaults unknown status to planned + When normalizing status "unknown-value" + Then the normalized status is "planned" + + Rule: Pattern status predicates check normalized state + + **Invariant:** isPatternComplete, isPatternActive, and isPatternPlanned are mutually exclusive for any given status input. Exactly one returns true. + **Rationale:** Consumers branch on these predicates; overlapping true values would cause double-rendering or contradictory UI states. + **Verified by:** isPatternComplete classification, isPatternActive classification, isPatternPlanned classification + + @function:isPatternComplete @happy-path + Scenario Outline: isPatternComplete classification + When checking isPatternComplete for "" + Then the predicate result is "" + + Examples: + | status | expected | + | completed | true | + | active | false | + | roadmap | false | + | deferred | false | + + @function:isPatternActive @happy-path + Scenario Outline: isPatternActive classification + When checking isPatternActive for "" + Then the predicate result is "" + + Examples: + | status | expected | + | active | true | + | completed | false | + | roadmap | false | + | deferred | false | + + @function:isPatternPlanned @happy-path + Scenario Outline: isPatternPlanned classification + When checking isPatternPlanned for "" + Then the predicate result is "" + + Examples: + | status | expected | + | roadmap | true | + | deferred | true | + | completed | false | + | active | false | diff --git a/tests/features/types/tag-registry-builder.feature b/tests/features/types/tag-registry-builder.feature new file mode 100644 index 00000000..75d40f01 --- /dev/null +++ b/tests/features/types/tag-registry-builder.feature @@ -0,0 +1,74 @@ +@libar-docs +@libar-docs-pattern:TagRegistryBuilderTesting +@libar-docs-status:active +@libar-docs-product-area:CoreTypes +@libar-docs-include:core-types +@taxonomy @registry +Feature: Tag Registry Builder + The tag registry builder constructs a complete TagRegistry from TypeScript + constants. It is the single source of truth for the delivery-process + annotation taxonomy, providing tag definitions, categories, and format + options used by scanners and extractors. + + Background: + Given a tag registry test context + + Rule: buildRegistry returns a well-formed TagRegistry + + **Invariant:** buildRegistry always returns a TagRegistry with version, categories, metadataTags, aggregationTags, formatOptions, tagPrefix, and fileOptInTag properties. + **Rationale:** All downstream consumers (scanner, extractor, validator) depend on registry structure. A malformed registry would cause silent extraction failures across the entire pipeline. + **Verified by:** Registry has correct version, Registry has expected category count, Registry has required metadata tags + + @function:buildRegistry @happy-path + Scenario: Registry has correct version + When I build the tag registry + Then the registry version is "2.0.0" + + @function:buildRegistry @happy-path + Scenario: Registry has expected category count + When I build the tag registry + Then the registry has 21 categories + + @function:buildRegistry @happy-path + Scenario: Registry has required metadata tags + When I build the tag registry + Then the registry contains these metadata tags: + | tag | format | + | pattern | value | + | status | enum | + | phase | number | + | core | flag | + + Rule: Metadata tags have correct configuration + + **Invariant:** The pattern tag is required, the status tag has a default value, and tags with transforms apply them correctly. + **Rationale:** Misconfigured tag metadata would cause the extractor to skip required fields or apply wrong defaults, producing silently corrupt patterns. + **Verified by:** Pattern tag is marked as required, Status tag has default value, Transform functions work correctly + + @function:buildRegistry + Scenario: Pattern tag is marked as required + When I build the tag registry + Then the metadata tag "pattern" has required set to true + + @function:buildRegistry + Scenario: Status tag has default value + When I build the tag registry + Then the metadata tag "status" has a default value + + @function:buildRegistry + Scenario: Transform functions work correctly + When I build the tag registry + Then the metadata tag "business-value" has a transform function + And applying the "business-value" transform to "eliminates-event-replay" produces "eliminates event replay" + + Rule: Registry includes standard prefixes and opt-in tag + + **Invariant:** tagPrefix is the standard annotation prefix and fileOptInTag is the bare opt-in marker. These are non-empty strings. + **Rationale:** Changing these values without updating all annotated files would break scanner opt-in detection across the entire monorepo. + **Verified by:** Registry has standard tag prefix and opt-in tag + + @function:buildRegistry + Scenario: Registry has standard tag prefix and opt-in tag + When I build the tag registry + Then the tag prefix is not empty + And the file opt-in tag is not empty diff --git a/tests/features/utils/file-cache.feature b/tests/features/utils/file-cache.feature new file mode 100644 index 00000000..56e95fd2 --- /dev/null +++ b/tests/features/utils/file-cache.feature @@ -0,0 +1,84 @@ +@libar-docs +@libar-docs-pattern:FileCacheTesting +@libar-docs-status:active +@libar-docs-product-area:CoreTypes +@libar-docs-include:core-types +@cache @utils +Feature: File Cache + The file cache provides request-scoped content caching for generation runs. + It avoids repeated disk reads for files accessed multiple times during + extraction and deduplication phases. + + Background: + Given a file cache test context + + Rule: Store and retrieve round-trip preserves content + + **Invariant:** Content stored via set is returned identically by get. No transformation or encoding occurs. + **Rationale:** File content must survive caching verbatim; any mutation would cause extraction to produce different results on cache hits vs misses. + **Verified by:** Store and retrieve returns same content, Non-existent path returns undefined + + @function:createFileCache @happy-path + Scenario: Store and retrieve returns same content + When I store content "hello world" at path "/tmp/test.ts" + Then retrieving path "/tmp/test.ts" returns "hello world" + + @function:createFileCache + Scenario: Non-existent path returns undefined + When I retrieve a non-existent path "/tmp/nonexistent.ts" + Then the retrieved content is undefined + + Rule: has checks membership without affecting stats + + **Invariant:** has returns true for cached paths and false for uncached paths. It does not increment hit or miss counters. + **Rationale:** has is used for guard checks before get; double-counting would inflate stats and misrepresent actual cache effectiveness. + **Verified by:** has returns true for cached path, has returns false for uncached path + + @function:createFileCache + Scenario: has returns true for cached path + When I store content "data" at path "/tmp/cached.ts" + Then has returns true for path "/tmp/cached.ts" + + @function:createFileCache + Scenario: has returns false for uncached path + Then has returns false for path "/tmp/missing.ts" + + Rule: Stats track hits and misses accurately + + **Invariant:** Every get call increments either hits or misses. hitRate is computed as (hits / total) * 100 with a zero-division guard returning 0 when total is 0. + **Rationale:** Accurate stats enable performance analysis of generation runs; incorrect counts would lead to wrong caching decisions. + **Verified by:** Stats track hits and misses, Hit rate starts at zero for empty cache, Hit rate is 100 when all gets are hits + + @function:createFileCache @happy-path + Scenario: Stats track hits and misses + When I store content "data" at path "/tmp/a.ts" + And I perform a get on cached path "/tmp/a.ts" + And I perform a get on uncached path "/tmp/b.ts" + Then the stats show 1 hit and 1 miss + And the stats show size 1 + + @function:createFileCache + Scenario: Hit rate starts at zero for empty cache + Then the hit rate is 0 + + @function:createFileCache + Scenario: Hit rate is 100 when all gets are hits + When I store content "data" at path "/tmp/x.ts" + And I perform a get on path "/tmp/x.ts" + Then the hit rate is 100 + + Rule: Clear resets cache and stats + + **Invariant:** clear removes all cached entries and resets hit/miss counters to zero. + **Rationale:** Per-run scoping requires a clean slate; stale entries from a previous run would cause the extractor to use outdated content. + **Verified by:** Clear resets everything + + @function:createFileCache + Scenario: Clear resets everything + When I store content "data" at path "/tmp/c.ts" + And I perform a get on path "/tmp/c.ts" + And I clear the cache + Then the stats show 0 hits and 0 misses + And the stats show size 0 + When I retrieve a non-existent path "/tmp/c.ts" + Then the retrieved content is undefined diff --git a/tests/features/utils/git-branch-diff.feature b/tests/features/utils/git-branch-diff.feature new file mode 100644 index 00000000..bd9c14d9 --- /dev/null +++ b/tests/features/utils/git-branch-diff.feature @@ -0,0 +1,74 @@ +@libar-docs +@libar-docs-pattern:GitBranchDiffTesting +@libar-docs-status:active +@libar-docs-product-area:Generation +@libar-docs-implements:GitBranchDiff +@git @branch-diff +Feature: Git Branch Diff + The branch diff utility returns changed files relative to a base branch for + PR-scoped generation. It must exclude deleted files from the returned list + while preserving filenames exactly, including rename/copy targets and paths + containing spaces. + + Background: + Given a git branch diff test context + + Rule: getChangedFilesList returns only existing changed files + + **Invariant:** Modified and added files are returned, while deleted tracked files are excluded from the final list. + **Rationale:** PR-scoped generation only needs files that still exist on the current branch; including deleted paths would force consumers to chase files that cannot be read. + **Verified by:** Modified and added files are returned while deleted files are excluded + + @happy-path + Scenario: Modified and added files are returned while deleted files are excluded + Given an initialized git repository + And these committed files exist: + | file | content | + | src/keep.ts | export const keep = 1; | + | src/remove.ts | export const remove = 1; | + When I modify file "src/keep.ts" to "export const keep = 2;" + And I add file "src/new.ts" with content "export const created = 1;" + And I delete file "src/remove.ts" + And I list changed files against "main" + Then the changed files should include: + | file | + | src/keep.ts | + | src/new.ts | + And the changed files should not include: + | file | + | src/remove.ts | + + Rule: Paths with spaces are preserved + + **Invariant:** A filename containing spaces is returned as the exact original path, not split into multiple tokens. + **Rationale:** Whitespace splitting corrupts file paths and breaks PR-scoped generation in repositories with descriptive filenames. + **Verified by:** File paths with spaces are preserved + + @edge-case + Scenario: File paths with spaces are preserved + Given an initialized git repository + And a committed file "src/file with spaces.ts" with content "export const spaced = 1;" + When I modify file "src/file with spaces.ts" to "export const spaced = 2;" + And I list changed files against "main" + Then the changed files should include: + | file | + | src/file with spaces.ts | + + Rule: NUL-delimited rename and copy statuses use the new path + + **Invariant:** Rename and copy statuses with similarity scores must record the current path, not the old/source path. + **Rationale:** Git emits statuses like R100 and C087 in real diffs; parsing the wrong side of the pair causes generators to scope output to stale paths. + **Verified by:** Similarity status maps to the new path + + @edge-case + Scenario Outline: Similarity status maps to the new path + Given a git name-status output with status "" from "" to "" + When I parse the git name-status output + Then the parsed modified files should include "" + + Examples: + | status | oldPath | newPath | + | R100 | src/old-name.ts | src/new-name.ts | + | R087 | src/legacy.ts | src/current.ts | + | C100 | src/source.ts | src/copied.ts | + | C087 | src/base name.ts | src/copied name.ts | diff --git a/tests/features/validation/codec-utils.feature b/tests/features/validation/codec-utils.feature new file mode 100644 index 00000000..d0ca78c1 --- /dev/null +++ b/tests/features/validation/codec-utils.feature @@ -0,0 +1,81 @@ +@libar-docs +@libar-docs-pattern:CodecUtilsValidation +@libar-docs-status:active +@libar-docs-product-area:Validation +@validation @codec +Feature: Codec Utils Validation + The codec utilities provide factory functions for creating type-safe JSON + parsing and serialization pipelines using Zod schemas. They replace manual + JSON.parse/stringify with single-step validated operations that return + Result types for explicit error handling. + + Background: + Given a codec utils test context + + Rule: createJsonInputCodec parses and validates JSON strings + + **Invariant:** createJsonInputCodec returns an ok Result when the input is valid JSON that conforms to the provided Zod schema, and an err Result with a descriptive CodecError otherwise. + **Rationale:** Combining JSON parsing and schema validation into a single operation eliminates the class of bugs where parsed-but-invalid data leaks into the application. + **Verified by:** Input codec parses valid JSON matching schema, Input codec rejects invalid JSON syntax, Input codec rejects valid JSON that fails schema validation, Input codec includes source in error when provided, Input codec safeParse returns value for valid input, Input codec safeParse returns undefined for invalid input + + @function:createJsonInputCodec @happy-path + Scenario: Input codec parses valid JSON matching schema + Given a Zod schema for an object with a required name string field + When I parse the JSON string '{"name": "Alice"}' with the input codec + Then the parse result should be ok + And the parsed value name should be "Alice" + + @function:createJsonInputCodec @error-case + Scenario: Input codec rejects invalid JSON syntax + Given a Zod schema for an object with a required name string field + When I parse the JSON string '{not valid json}' with the input codec + Then the parse result should be err + And the codec error operation should be "parse" + And the codec error message should contain "Invalid JSON" + + @function:createJsonInputCodec @error-case + Scenario: Input codec rejects valid JSON that fails schema validation + Given a Zod schema for an object with a required name string field + When I parse the JSON string '{"age": 30}' with the input codec + Then the parse result should be err + And the codec error operation should be "parse" + And the codec error message should contain "Schema validation failed" + And the codec error should have validation errors + + @function:createJsonInputCodec + Scenario: Input codec includes source in error when provided + Given a Zod schema for an object with a required name string field + When I parse the JSON string '{"age": 30}' with source "config.json" using the input codec + Then the parse result should be err + And the codec error message should contain "config.json" + + @function:createJsonInputCodec + Scenario: Input codec safeParse returns value for valid input + Given a Zod schema for an object with a required name string field + When I safeParse the JSON string '{"name": "Bob"}' with the input codec + Then the safeParse result should not be undefined + And the safeParse result name should be "Bob" + + @function:createJsonInputCodec + Scenario: Input codec safeParse returns undefined for invalid input + Given a Zod schema for an object with a required name string field + When I safeParse the JSON string '{broken' with the input codec + Then the safeParse result should be undefined + + Rule: formatCodecError formats errors for display + + **Invariant:** formatCodecError always returns a non-empty string that includes the operation type and message, and appends validation errors when present. + **Rationale:** Consistent error formatting across all codec consumers avoids duplicated formatting logic and ensures error messages always contain enough context for debugging. + **Verified by:** formatCodecError formats error without validation details, formatCodecError formats error with validation details + + @function:formatCodecError + Scenario: formatCodecError formats error without validation details + When I format a codec error with operation "parse" and message "Invalid JSON" + Then the formatted error should contain "parse" + And the formatted error should contain "Invalid JSON" + + @function:formatCodecError + Scenario: formatCodecError formats error with validation details + When I format a codec error with operation "parse" and message "Schema validation failed" and validation errors + Then the formatted error should contain "Schema validation failed" + And the formatted error should contain "Validation errors" diff --git a/tests/features/validation/tag-registry-schemas.feature b/tests/features/validation/tag-registry-schemas.feature new file mode 100644 index 00000000..ff2de642 --- /dev/null +++ b/tests/features/validation/tag-registry-schemas.feature @@ -0,0 +1,71 @@ +@libar-docs +@libar-docs-pattern:TagRegistrySchemasValidation +@libar-docs-status:active +@libar-docs-product-area:Validation +@validation @tag-registry +Feature: Tag Registry Schema Validation + The tag registry configuration module provides schema-validated taxonomy + definitions for organizing patterns by category, metadata tags, and + aggregation rules. It supports creating default registries from the + canonical taxonomy source and merging custom overrides. + + Background: + Given a tag registry test context + + Rule: createDefaultTagRegistry produces a valid registry from taxonomy source + + **Invariant:** createDefaultTagRegistry always returns a TagRegistry that passes TagRegistrySchema validation, with non-empty categories, metadataTags, and aggregationTags arrays. + **Rationale:** The default registry is the foundation for all pattern extraction. An invalid or empty default registry would silently break extraction for every consumer. + **Verified by:** Default registry passes schema validation, Default registry has non-empty categories, Default registry has non-empty metadata tags, Default registry has expected tag prefix + + @function:createDefaultTagRegistry @happy-path + Scenario: Default registry passes schema validation + When I create a default tag registry + Then the registry should pass TagRegistrySchema validation + + @function:createDefaultTagRegistry + Scenario: Default registry has non-empty categories + When I create a default tag registry + Then the registry should have at least 1 category + + @function:createDefaultTagRegistry + Scenario: Default registry has non-empty metadata tags + When I create a default tag registry + Then the registry should have at least 1 metadata tag + + @function:createDefaultTagRegistry + Scenario: Default registry has expected tag prefix + When I create a default tag registry + Then the registry tag prefix should be "@libar-docs-" + + Rule: mergeTagRegistries deep-merges registries by tag + + **Invariant:** mergeTagRegistries merges categories, metadataTags, and aggregationTags by their tag field, with override entries replacing base entries of the same tag and new entries being appended. Scalar fields (version, tagPrefix, fileOptInTag, formatOptions) are fully replaced when provided. + **Rationale:** Consumers need to customize the taxonomy without losing default definitions. Tag-based merging prevents accidental duplication while allowing targeted overrides. + **Verified by:** Merge overrides a category by tag, Merge adds new categories from override, Merge replaces scalar fields when provided, Merge preserves base when override is empty + + @function:mergeTagRegistries @happy-path + Scenario: Merge overrides a category by tag + Given a base registry with a category "core" at priority 1 + When I merge with an override that sets category "core" to priority 10 + Then the merged registry should have category "core" at priority 10 + + @function:mergeTagRegistries + Scenario: Merge adds new categories from override + Given a base registry with a category "core" at priority 1 + When I merge with an override that adds category "custom" at priority 5 + Then the merged registry should have 2 categories + And the merged registry should contain category "custom" + + @function:mergeTagRegistries + Scenario: Merge replaces scalar fields when provided + Given a base registry with tag prefix "@libar-docs-" + When I merge with an override that sets tag prefix "@custom-" + Then the merged registry tag prefix should be "@custom-" + + @function:mergeTagRegistries + Scenario: Merge preserves base when override is empty + Given a base registry with a category "core" at priority 1 + When I merge with an empty override + Then the merged registry should have 1 category + And the merged registry should have category "core" at priority 1 diff --git a/tests/features/validation/workflow-config-schemas.feature b/tests/features/validation/workflow-config-schemas.feature new file mode 100644 index 00000000..7963e970 --- /dev/null +++ b/tests/features/validation/workflow-config-schemas.feature @@ -0,0 +1,106 @@ +@libar-docs +@libar-docs-pattern:WorkflowConfigSchemasValidation +@libar-docs-status:active +@libar-docs-product-area:Validation +@validation @workflow +Feature: Workflow Config Schema Validation + The workflow configuration module defines Zod schemas for validating + delivery workflow definitions with statuses, phases, and metadata. + It provides runtime type guards and efficient lookup map construction + for loaded workflows. + + Background: + Given a workflow config test context + + Rule: WorkflowConfigSchema validates workflow configurations + + **Invariant:** WorkflowConfigSchema accepts objects with a name, semver version, at least one status, and at least one phase, and rejects objects missing any required field or with invalid semver format. + **Rationale:** Workflow configurations drive FSM validation and phase-based document routing. Malformed configs would cause silent downstream failures in process guard and documentation generation. + **Verified by:** Valid workflow config passes schema validation, Config without name is rejected, Config with invalid semver version is rejected, Config without statuses is rejected, Config without phases is rejected + + @schema:WorkflowConfigSchema @happy-path + Scenario: Valid workflow config passes schema validation + When I validate a workflow config with name "standard" and version "1.0.0" with 1 status and 1 phase + Then the workflow config should be valid + + @schema:WorkflowConfigSchema @error-case + Scenario: Config without name is rejected + When I validate a workflow config without a name + Then the workflow config should be invalid + + @schema:WorkflowConfigSchema @error-case + Scenario: Config with invalid semver version is rejected + When I validate a workflow config with name "standard" and version "not-semver" + Then the workflow config should be invalid + + @schema:WorkflowConfigSchema @error-case + Scenario: Config without statuses is rejected + When I validate a workflow config with name "standard" and version "1.0.0" with 0 statuses + Then the workflow config should be invalid + + @schema:WorkflowConfigSchema @error-case + Scenario: Config without phases is rejected + When I validate a workflow config with name "standard" and version "1.0.0" with 0 phases + Then the workflow config should be invalid + + Rule: createLoadedWorkflow builds efficient lookup maps + + **Invariant:** createLoadedWorkflow produces a LoadedWorkflow whose statusMap and phaseMap contain all statuses and phases from the config, keyed by lowercase name for case-insensitive lookup. + **Rationale:** O(1) status and phase lookup eliminates repeated linear scans during validation and rendering, where each pattern may reference multiple statuses. + **Verified by:** Loaded workflow has status lookup map, Status lookup is case-insensitive, Loaded workflow has phase lookup map, Phase lookup is case-insensitive + + @function:createLoadedWorkflow @happy-path + Scenario: Loaded workflow has status lookup map + Given a valid workflow config with status "roadmap" and status "active" + When I create a loaded workflow + Then the status map should contain "roadmap" + And the status map should contain "active" + And the status map should have 2 entries + + @function:createLoadedWorkflow + Scenario: Status lookup is case-insensitive + Given a valid workflow config with status "Roadmap" and status "Active" + When I create a loaded workflow + Then the status map should contain "roadmap" + And the status map should contain "active" + + @function:createLoadedWorkflow + Scenario: Loaded workflow has phase lookup map + Given a valid workflow config with phase "Inception" and phase "Construction" + When I create a loaded workflow + Then the phase map should contain "inception" + And the phase map should contain "construction" + And the phase map should have 2 entries + + @function:createLoadedWorkflow + Scenario: Phase lookup is case-insensitive + Given a valid workflow config with phase "Inception" and phase "Construction" + When I create a loaded workflow + Then the phase map should contain "inception" + And the phase map should contain "construction" + + Rule: isWorkflowConfig type guard validates at runtime + + **Invariant:** isWorkflowConfig returns true only for values that conform to WorkflowConfigSchema and false for all other values including null, undefined, primitives, and partial objects. + **Rationale:** Runtime type guards enable safe narrowing in dynamic contexts (config loading, API responses) where TypeScript compile-time types are unavailable. + **Verified by:** Type guard accepts valid workflow config, Type guard rejects null, Type guard rejects partial config, Type guard rejects non-object + + @function:isWorkflowConfig @happy-path + Scenario: Type guard accepts valid workflow config + When I check isWorkflowConfig with a valid config + Then isWorkflowConfig should return true + + @function:isWorkflowConfig @error-case + Scenario: Type guard rejects null + When I check isWorkflowConfig with null + Then isWorkflowConfig should return false + + @function:isWorkflowConfig + Scenario: Type guard rejects partial config + When I check isWorkflowConfig with a partial config missing statuses + Then isWorkflowConfig should return false + + @function:isWorkflowConfig + Scenario: Type guard rejects non-object + When I check isWorkflowConfig with the string "not a config" + Then isWorkflowConfig should return false diff --git a/tests/fixtures/dataset-factories.ts b/tests/fixtures/dataset-factories.ts index 3dab6dee..da764c1f 100644 --- a/tests/fixtures/dataset-factories.ts +++ b/tests/fixtures/dataset-factories.ts @@ -11,7 +11,7 @@ import type { ExtractedPattern } from '../../src/validation-schemas/index.js'; import type { StatusCounts } from '../../src/validation-schemas/master-dataset.js'; -import type { RuntimeMasterDataset } from '../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../src/generators/pipeline/transform-types.js'; import { transformToMasterDataset } from '../../src/generators/pipeline/transform-dataset.js'; import { createDefaultTagRegistry } from '../../src/validation-schemas/tag-registry.js'; diff --git a/tests/steps/api/architecture-queries/arch-queries.steps.ts b/tests/steps/api/architecture-queries/arch-queries.steps.ts index 60cb4db3..8f9e8cf9 100644 --- a/tests/steps/api/architecture-queries/arch-queries.steps.ts +++ b/tests/steps/api/architecture-queries/arch-queries.steps.ts @@ -21,7 +21,7 @@ import { findUnusedTaxonomy, type UnusedTaxonomyReport, } from '../../../../src/api/coverage-analyzer.js'; -import type { RuntimeMasterDataset } from '../../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../../src/generators/pipeline/transform-types.js'; import type { ExtractedPattern } from '../../../../src/validation-schemas/index.js'; import { createTestPattern, diff --git a/tests/steps/api/context-assembly/context-assembler.steps.ts b/tests/steps/api/context-assembly/context-assembler.steps.ts index 89e703de..3b8064a1 100644 --- a/tests/steps/api/context-assembly/context-assembler.steps.ts +++ b/tests/steps/api/context-assembly/context-assembler.steps.ts @@ -21,7 +21,7 @@ import { import { QueryApiError } from '../../../../src/api/types.js'; import { createProcessStateAPI } from '../../../../src/api/process-state.js'; import type { ProcessStateAPI } from '../../../../src/api/process-state.js'; -import type { RuntimeMasterDataset } from '../../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../../src/generators/pipeline/transform-types.js'; import type { ExtractedPattern } from '../../../../src/validation-schemas/index.js'; import { createTestPattern, diff --git a/tests/steps/architecture/arch-index.steps.ts b/tests/steps/architecture/arch-index.steps.ts index e7abdcd4..883af20c 100644 --- a/tests/steps/architecture/arch-index.steps.ts +++ b/tests/steps/architecture/arch-index.steps.ts @@ -11,10 +11,8 @@ import { expect } from 'vitest'; import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; -import { - transformToMasterDataset, - type RuntimeMasterDataset, -} from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; +import { transformToMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; import { createDefaultTagRegistry, createTestPattern } from '../../fixtures/dataset-factories.js'; import type { DataTableRow } from '../../support/world.js'; diff --git a/tests/steps/behavior/context-inference.steps.ts b/tests/steps/behavior/context-inference.steps.ts index 08f1c3c3..3f759270 100644 --- a/tests/steps/behavior/context-inference.steps.ts +++ b/tests/steps/behavior/context-inference.steps.ts @@ -11,11 +11,9 @@ import { expect } from 'vitest'; import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; -import { - transformToMasterDataset, - type RuntimeMasterDataset, - type ContextInferenceRule, -} from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; +import type { ContextInferenceRule } from '../../../src/generators/pipeline/context-inference.js'; +import { transformToMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; import { DEFAULT_CONTEXT_INFERENCE_RULES } from '../../../src/config/defaults.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; import { createDefaultTagRegistry } from '../../../src/validation-schemas/tag-registry.js'; diff --git a/tests/steps/behavior/implementation-links.steps.ts b/tests/steps/behavior/implementation-links.steps.ts index 04e91e63..3a09450c 100644 --- a/tests/steps/behavior/implementation-links.steps.ts +++ b/tests/steps/behavior/implementation-links.steps.ts @@ -13,7 +13,7 @@ import { createPatternsCodec, normalizeImplPath } from '../../../src/renderable/ import { transformToMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; import { createDefaultTagRegistry } from '../../../src/validation-schemas/tag-registry.js'; import type { RenderableDocument } from '../../../src/renderable/schema.js'; -import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; import { createTestPattern, resetPatternCounter } from '../../fixtures/pattern-factories.js'; import type { DataTableRow } from '../../support/world.js'; diff --git a/tests/steps/behavior/transform-dataset.steps.ts b/tests/steps/behavior/transform-dataset.steps.ts index fedff36f..483bac7b 100644 --- a/tests/steps/behavior/transform-dataset.steps.ts +++ b/tests/steps/behavior/transform-dataset.steps.ts @@ -6,12 +6,14 @@ */ import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; import { expect } from 'vitest'; +import type { + RuntimeMasterDataset, + RawDataset, +} from '../../../src/generators/pipeline/transform-types.js'; import { transformToMasterDataset, completionPercentage, isFullyCompleted, - type RuntimeMasterDataset, - type RawDataset, } from '../../../src/generators/pipeline/transform-dataset.js'; import type { StatusCounts } from '../../../src/validation-schemas/master-dataset.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; diff --git a/tests/steps/cli/data-api-cache.steps.ts b/tests/steps/cli/data-api-cache.steps.ts index f73eba71..c322c609 100644 --- a/tests/steps/cli/data-api-cache.steps.ts +++ b/tests/steps/cli/data-api-cache.steps.ts @@ -72,6 +72,7 @@ function parseMetadata(result: CLIResult): ParsedMetadata { // ============================================================================= let state: CacheTestState | null = null; +const CACHE_QUERY_TIMEOUT_MS = 120000; // ============================================================================= // Feature Definition @@ -113,14 +114,18 @@ describeFeature(feature, ({ Background, Rule, AfterEachScenario }) => { }); When('running status and capturing the first result', async () => { - await runCLICommand(state, "process-api -i 'src/**/*.ts' status"); + await runCLICommand(state, "process-api -i 'src/**/*.ts' status", { + timeout: CACHE_QUERY_TIMEOUT_MS, + }); getCacheState(state).firstResult = getResult(state); }); And('running status and capturing the second result', async () => { // Reset result before the second run getCacheState(state).result = null; - await runCLICommand(state, "process-api -i 'src/**/*.ts' status"); + await runCLICommand(state, "process-api -i 'src/**/*.ts' status", { + timeout: CACHE_QUERY_TIMEOUT_MS, + }); getCacheState(state).secondResult = getResult(state); }); @@ -131,11 +136,13 @@ describeFeature(feature, ({ Background, Rule, AfterEachScenario }) => { expect(metadata.cache!.hit).toBe(true); }); - And('the second result pipelineMs is less than 500', () => { + And('the second result pipelineMs is less than the first', () => { const s = getCacheState(state); - const metadata = parseMetadata(s.secondResult!); - expect(metadata.pipelineMs).toBeDefined(); - expect(metadata.pipelineMs!).toBeLessThan(500); + const firstMetadata = parseMetadata(s.firstResult!); + const secondMetadata = parseMetadata(s.secondResult!); + expect(firstMetadata.pipelineMs).toBeDefined(); + expect(secondMetadata.pipelineMs).toBeDefined(); + expect(secondMetadata.pipelineMs!).toBeLessThan(firstMetadata.pipelineMs!); }); }); @@ -145,7 +152,9 @@ describeFeature(feature, ({ Background, Rule, AfterEachScenario }) => { }); When('running status and capturing the first result', async () => { - await runCLICommand(state, "process-api -i 'src/**/*.ts' status"); + await runCLICommand(state, "process-api -i 'src/**/*.ts' status", { + timeout: CACHE_QUERY_TIMEOUT_MS, + }); getCacheState(state).firstResult = getResult(state); }); @@ -160,7 +169,9 @@ describeFeature(feature, ({ Background, Rule, AfterEachScenario }) => { And('running status and capturing the second result', async () => { getCacheState(state).result = null; - await runCLICommand(state, "process-api -i 'src/**/*.ts' status"); + await runCLICommand(state, "process-api -i 'src/**/*.ts' status", { + timeout: CACHE_QUERY_TIMEOUT_MS, + }); getCacheState(state).secondResult = getResult(state); }); @@ -178,13 +189,17 @@ describeFeature(feature, ({ Background, Rule, AfterEachScenario }) => { }); When('running status and capturing the first result', async () => { - await runCLICommand(state, "process-api -i 'src/**/*.ts' status"); + await runCLICommand(state, "process-api -i 'src/**/*.ts' status", { + timeout: CACHE_QUERY_TIMEOUT_MS, + }); getCacheState(state).firstResult = getResult(state); }); And('running status with --no-cache and capturing the second result', async () => { getCacheState(state).result = null; - await runCLICommand(state, "process-api -i 'src/**/*.ts' --no-cache status"); + await runCLICommand(state, "process-api -i 'src/**/*.ts' --no-cache status", { + timeout: CACHE_QUERY_TIMEOUT_MS, + }); getCacheState(state).secondResult = getResult(state); }); diff --git a/tests/steps/generation/design-review-generator.steps.ts b/tests/steps/generation/design-review-generator.steps.ts index 06c8681e..10d36936 100644 --- a/tests/steps/generation/design-review-generator.steps.ts +++ b/tests/steps/generation/design-review-generator.steps.ts @@ -12,7 +12,7 @@ import { expect } from 'vitest'; import type { GeneratorOutput } from '../../../src/generators/types.js'; import { createDesignReviewGenerator } from '../../../src/generators/built-in/design-review-generator.js'; import { createDefaultTagRegistry } from '../../../src/validation-schemas/tag-registry.js'; -import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; import { createTempDir, writeTempFile, diff --git a/tests/steps/generators/business-rules-generator.steps.ts b/tests/steps/generators/business-rules-generator.steps.ts index 2a956106..74568bb5 100644 --- a/tests/steps/generators/business-rules-generator.steps.ts +++ b/tests/steps/generators/business-rules-generator.steps.ts @@ -11,7 +11,7 @@ import { expect } from 'vitest'; import { createBusinessRulesCodec } from '../../../src/renderable/codecs/business-rules.js'; import { renderToMarkdown } from '../../../src/renderable/render.js'; import type { RenderableDocument, TableBlock } from '../../../src/renderable/schema.js'; -import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; import { transformToMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; import { createDefaultTagRegistry } from '../../../src/validation-schemas/tag-registry.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; diff --git a/tests/steps/generators/pr-changes-options.steps.ts b/tests/steps/generators/pr-changes-options.steps.ts index 16f5918c..c0d554ef 100644 --- a/tests/steps/generators/pr-changes-options.steps.ts +++ b/tests/steps/generators/pr-changes-options.steps.ts @@ -17,7 +17,7 @@ import { } from '../../fixtures/dataset-factories.js'; import type { CodecOptions } from '../../../src/renderable/generate.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; -import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; import type { OutputFile } from '../../../src/renderable/render.js'; import type { DataTableRow } from '../../support/world.js'; diff --git a/tests/steps/generators/prd-implementation-section.steps.ts b/tests/steps/generators/prd-implementation-section.steps.ts index f020974b..292c0886 100644 --- a/tests/steps/generators/prd-implementation-section.steps.ts +++ b/tests/steps/generators/prd-implementation-section.steps.ts @@ -12,7 +12,7 @@ import { expect } from 'vitest'; import { createPatternsCodec } from '../../../src/renderable/codecs/patterns.js'; import { renderToMarkdown } from '../../../src/renderable/render.js'; import type { RenderableDocument } from '../../../src/renderable/schema.js'; -import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; import { transformToMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; import { createDefaultTagRegistry } from '../../../src/validation-schemas/tag-registry.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; diff --git a/tests/steps/generators/table-extraction.steps.ts b/tests/steps/generators/table-extraction.steps.ts index 0e049bea..4fbede46 100644 --- a/tests/steps/generators/table-extraction.steps.ts +++ b/tests/steps/generators/table-extraction.steps.ts @@ -13,7 +13,7 @@ import { createBusinessRulesCodec } from '../../../src/renderable/codecs/busines import { stripMarkdownTables } from '../../../src/renderable/codecs/helpers.js'; import { renderToMarkdown } from '../../../src/renderable/render.js'; import type { RenderableDocument } from '../../../src/renderable/schema.js'; -import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; +import type { RuntimeMasterDataset } from '../../../src/generators/pipeline/transform-types.js'; import { transformToMasterDataset } from '../../../src/generators/pipeline/transform-dataset.js'; import { createDefaultTagRegistry } from '../../../src/validation-schemas/tag-registry.js'; import type { ExtractedPattern } from '../../../src/validation-schemas/index.js'; diff --git a/tests/steps/types/deliverable-status.steps.ts b/tests/steps/types/deliverable-status.steps.ts new file mode 100644 index 00000000..34c4e43e --- /dev/null +++ b/tests/steps/types/deliverable-status.steps.ts @@ -0,0 +1,161 @@ +/** + * Deliverable Status Step Definitions + * + * BDD step definitions for testing the deliverable status taxonomy: + * - isDeliverableStatusTerminal - DoD validation check + * - isDeliverableStatusComplete / InProgress / Pending - individual predicates + * - getDeliverableStatusEmoji - display emoji mapping + */ +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { + isDeliverableStatusTerminal, + isDeliverableStatusComplete, + isDeliverableStatusInProgress, + isDeliverableStatusPending, + getDeliverableStatusEmoji, + type DeliverableStatus, +} from '../../../src/taxonomy/deliverable-status.js'; + +// ============================================================================= +// Type Definitions +// ============================================================================= + +interface DeliverableStatusTestState { + terminalResult: boolean; + predicateResult: boolean; + emojiResult: string; +} + +// ============================================================================= +// Module-level state (reset per scenario) +// ============================================================================= + +let state: DeliverableStatusTestState | null = null; + +// ============================================================================= +// Helper Functions +// ============================================================================= + +function initState(): DeliverableStatusTestState { + return { + terminalResult: false, + predicateResult: false, + emojiResult: '', + }; +} + +// ============================================================================= +// Feature: Deliverable Status Taxonomy +// ============================================================================= + +const feature = await loadFeature('tests/features/types/deliverable-status.feature'); + +describeFeature(feature, ({ Rule, Background, AfterEachScenario }) => { + AfterEachScenario(() => { + state = null; + }); + + Background(({ Given }) => { + Given('a deliverable status test context', () => { + state = initState(); + }); + }); + + // =========================================================================== + // isDeliverableStatusTerminal + // =========================================================================== + + Rule( + 'isDeliverableStatusTerminal identifies terminal statuses for DoD validation', + ({ RuleScenarioOutline }) => { + RuleScenarioOutline( + 'Terminal status classification', + ({ When, Then }, variables: { status: string; isTerminal: string }) => { + When('checking if {string} is terminal', () => { + state!.terminalResult = isDeliverableStatusTerminal( + variables.status as DeliverableStatus + ); + }); + + Then('the terminal check result is {string}', () => { + expect(state!.terminalResult).toBe(variables.isTerminal === 'true'); + }); + } + ); + } + ); + + // =========================================================================== + // Status predicates + // =========================================================================== + + Rule('Status predicates classify individual deliverable states', ({ RuleScenarioOutline }) => { + RuleScenarioOutline( + 'isDeliverableStatusComplete classification', + ({ When, Then }, variables: { status: string; expected: string }) => { + When('checking if {string} is complete', () => { + state!.predicateResult = isDeliverableStatusComplete( + variables.status as DeliverableStatus + ); + }); + + Then('the predicate result is {string}', () => { + expect(state!.predicateResult).toBe(variables.expected === 'true'); + }); + } + ); + + RuleScenarioOutline( + 'isDeliverableStatusInProgress classification', + ({ When, Then }, variables: { status: string; expected: string }) => { + When('checking if {string} is in-progress', () => { + state!.predicateResult = isDeliverableStatusInProgress( + variables.status as DeliverableStatus + ); + }); + + Then('the predicate result is {string}', () => { + expect(state!.predicateResult).toBe(variables.expected === 'true'); + }); + } + ); + + RuleScenarioOutline( + 'isDeliverableStatusPending classification', + ({ When, Then }, variables: { status: string; expected: string }) => { + When('checking if {string} is pending', () => { + state!.predicateResult = isDeliverableStatusPending( + variables.status as DeliverableStatus + ); + }); + + Then('the predicate result is {string}', () => { + expect(state!.predicateResult).toBe(variables.expected === 'true'); + }); + } + ); + }); + + // =========================================================================== + // getDeliverableStatusEmoji + // =========================================================================== + + Rule( + 'getDeliverableStatusEmoji returns display emoji for all statuses', + ({ RuleScenarioOutline }) => { + RuleScenarioOutline( + 'Emoji mapping for all statuses', + ({ When, Then }, variables: { status: string }) => { + When('getting the emoji for {string}', () => { + state!.emojiResult = getDeliverableStatusEmoji(variables.status as DeliverableStatus); + }); + + Then('the emoji is not empty', () => { + expect(state!.emojiResult.length).toBeGreaterThan(0); + }); + } + ); + } + ); +}); diff --git a/tests/steps/types/normalized-status.steps.ts b/tests/steps/types/normalized-status.steps.ts new file mode 100644 index 00000000..83448a19 --- /dev/null +++ b/tests/steps/types/normalized-status.steps.ts @@ -0,0 +1,150 @@ +/** + * @libar-docs + * @libar-docs-implements NormalizedStatusTesting + * @libar-docs-uses NormalizedStatus + * + * Normalized Status Step Definitions + * + * BDD step definitions for testing the normalized status taxonomy: + * - normalizeStatus - maps raw FSM states to display buckets + * - isPatternComplete / isPatternActive / isPatternPlanned - predicates + */ +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { + normalizeStatus, + isPatternComplete, + isPatternActive, + isPatternPlanned, +} from '../../../src/taxonomy/normalized-status.js'; + +// ============================================================================= +// Type Definitions +// ============================================================================= + +interface NormalizedStatusTestState { + normalizedResult: string; + predicateResult: boolean; +} + +// ============================================================================= +// Module-level state (reset per scenario) +// ============================================================================= + +let state: NormalizedStatusTestState | null = null; + +// ============================================================================= +// Helper Functions +// ============================================================================= + +function initState(): NormalizedStatusTestState { + return { + normalizedResult: '', + predicateResult: false, + }; +} + +// ============================================================================= +// Feature: Normalized Status Taxonomy +// ============================================================================= + +const feature = await loadFeature('tests/features/types/normalized-status.feature'); + +describeFeature(feature, ({ Rule, Background, AfterEachScenario }) => { + AfterEachScenario(() => { + state = null; + }); + + Background(({ Given }) => { + Given('a normalized status test context', () => { + state = initState(); + }); + }); + + // =========================================================================== + // normalizeStatus + // =========================================================================== + + Rule( + 'normalizeStatus maps raw FSM states to display buckets', + ({ RuleScenario, RuleScenarioOutline }) => { + RuleScenarioOutline( + 'Status normalization', + ({ When, Then }, variables: { rawStatus: string; normalizedStatus: string }) => { + When('normalizing status ""', () => { + state!.normalizedResult = normalizeStatus(variables.rawStatus); + }); + + Then('the normalized status is ""', () => { + expect(state!.normalizedResult).toBe(variables.normalizedStatus); + }); + } + ); + + RuleScenario('normalizeStatus defaults undefined to planned', ({ When, Then }) => { + When('normalizing an undefined status', () => { + state!.normalizedResult = normalizeStatus(undefined); + }); + + Then('the normalized status is {string}', (_ctx: unknown, expected: string) => { + expect(state!.normalizedResult).toBe(expected); + }); + }); + + RuleScenario('normalizeStatus defaults unknown status to planned', ({ When, Then }) => { + When('normalizing status {string}', (_ctx: unknown, rawStatus: string) => { + state!.normalizedResult = normalizeStatus(rawStatus); + }); + + Then('the normalized status is {string}', (_ctx: unknown, expected: string) => { + expect(state!.normalizedResult).toBe(expected); + }); + }); + } + ); + + // =========================================================================== + // Pattern status predicates + // =========================================================================== + + Rule('Pattern status predicates check normalized state', ({ RuleScenarioOutline }) => { + RuleScenarioOutline( + 'isPatternComplete classification', + ({ When, Then }, variables: { status: string; expected: string }) => { + When('checking isPatternComplete for ""', () => { + state!.predicateResult = isPatternComplete(variables.status); + }); + + Then('the predicate result is ""', () => { + expect(state!.predicateResult).toBe(variables.expected === 'true'); + }); + } + ); + + RuleScenarioOutline( + 'isPatternActive classification', + ({ When, Then }, variables: { status: string; expected: string }) => { + When('checking isPatternActive for ""', () => { + state!.predicateResult = isPatternActive(variables.status); + }); + + Then('the predicate result is ""', () => { + expect(state!.predicateResult).toBe(variables.expected === 'true'); + }); + } + ); + + RuleScenarioOutline( + 'isPatternPlanned classification', + ({ When, Then }, variables: { status: string; expected: string }) => { + When('checking isPatternPlanned for ""', () => { + state!.predicateResult = isPatternPlanned(variables.status); + }); + + Then('the predicate result is ""', () => { + expect(state!.predicateResult).toBe(variables.expected === 'true'); + }); + } + ); + }); +}); diff --git a/tests/steps/types/tag-registry-builder.steps.ts b/tests/steps/types/tag-registry-builder.steps.ts new file mode 100644 index 00000000..697d9b33 --- /dev/null +++ b/tests/steps/types/tag-registry-builder.steps.ts @@ -0,0 +1,195 @@ +/** + * @libar-docs + * @libar-docs-implements TagRegistryBuilderTesting + * @libar-docs-uses RegistryBuilder, TagRegistry + * + * Tag Registry Builder Step Definitions + * + * BDD step definitions for testing the tag registry builder: + * - buildRegistry - constructs complete TagRegistry from TypeScript constants + * - Registry structure validation (version, categories, metadata tags) + * - Transform function verification + */ +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { + buildRegistry, + type TagRegistry, + type MetadataTagDefinitionForRegistry, +} from '../../../src/taxonomy/registry-builder.js'; +import type { DataTableRow } from '../../support/world.js'; + +// ============================================================================= +// Type Definitions +// ============================================================================= + +interface TagRegistryTestState { + registry: TagRegistry | null; + foundTag: MetadataTagDefinitionForRegistry | null; + transformResult: string; +} + +// ============================================================================= +// Module-level state (reset per scenario) +// ============================================================================= + +let state: TagRegistryTestState | null = null; + +// ============================================================================= +// Helper Functions +// ============================================================================= + +function initState(): TagRegistryTestState { + return { + registry: null, + foundTag: null, + transformResult: '', + }; +} + +function findMetadataTag( + registry: TagRegistry, + tagName: string +): MetadataTagDefinitionForRegistry | undefined { + return registry.metadataTags.find((t) => t.tag === tagName); +} + +// ============================================================================= +// Feature: Tag Registry Builder +// ============================================================================= + +const feature = await loadFeature('tests/features/types/tag-registry-builder.feature'); + +describeFeature(feature, ({ Rule, Background, AfterEachScenario }) => { + AfterEachScenario(() => { + state = null; + }); + + Background(({ Given }) => { + Given('a tag registry test context', () => { + state = initState(); + }); + }); + + // =========================================================================== + // buildRegistry - well-formed registry + // =========================================================================== + + Rule('buildRegistry returns a well-formed TagRegistry', ({ RuleScenario }) => { + RuleScenario('Registry has correct version', ({ When, Then }) => { + When('I build the tag registry', () => { + state!.registry = buildRegistry(); + }); + + Then('the registry version is {string}', (_ctx: unknown, expected: string) => { + expect(state!.registry!.version).toBe(expected); + }); + }); + + RuleScenario('Registry has expected category count', ({ When, Then }) => { + When('I build the tag registry', () => { + state!.registry = buildRegistry(); + }); + + Then('the registry has {int} categories', (_ctx: unknown, count: number) => { + expect(state!.registry!.categories).toHaveLength(count); + }); + }); + + RuleScenario('Registry has required metadata tags', ({ When, Then }) => { + When('I build the tag registry', () => { + state!.registry = buildRegistry(); + }); + + Then('the registry contains these metadata tags:', (_ctx: unknown, table: DataTableRow[]) => { + for (const row of table) { + const tagName = row.tag ?? ''; + const tag = findMetadataTag(state!.registry!, tagName); + expect(tag, `metadata tag "${tagName}" should exist`).toBeDefined(); + expect(tag!.format).toBe(row.format); + } + }); + }); + }); + + // =========================================================================== + // Metadata tags configuration + // =========================================================================== + + Rule('Metadata tags have correct configuration', ({ RuleScenario }) => { + RuleScenario('Pattern tag is marked as required', ({ When, Then }) => { + When('I build the tag registry', () => { + state!.registry = buildRegistry(); + }); + + Then( + 'the metadata tag {string} has required set to true', + (_ctx: unknown, tagName: string) => { + const tag = findMetadataTag(state!.registry!, tagName); + expect(tag).toBeDefined(); + expect(tag!.required).toBe(true); + } + ); + }); + + RuleScenario('Status tag has default value', ({ When, Then }) => { + When('I build the tag registry', () => { + state!.registry = buildRegistry(); + }); + + Then('the metadata tag {string} has a default value', (_ctx: unknown, tagName: string) => { + const tag = findMetadataTag(state!.registry!, tagName); + expect(tag).toBeDefined(); + expect(tag!.default).toBeDefined(); + expect(tag!.default!.length).toBeGreaterThan(0); + }); + }); + + RuleScenario('Transform functions work correctly', ({ When, Then, And }) => { + When('I build the tag registry', () => { + state!.registry = buildRegistry(); + }); + + Then( + 'the metadata tag {string} has a transform function', + (_ctx: unknown, tagName: string) => { + const tag = findMetadataTag(state!.registry!, tagName); + expect(tag).toBeDefined(); + expect(tag!.transform).toBeDefined(); + expect(typeof tag!.transform).toBe('function'); + state!.foundTag = tag!; + } + ); + + And( + 'applying the {string} transform to {string} produces {string}', + (_ctx: unknown, _tagName: string, input: string, expected: string) => { + expect(state!.foundTag).toBeDefined(); + expect(state!.foundTag!.transform).toBeDefined(); + state!.transformResult = state!.foundTag!.transform!(input); + expect(state!.transformResult).toBe(expected); + } + ); + }); + }); + + // =========================================================================== + // Standard prefixes and opt-in tag + // =========================================================================== + + Rule('Registry includes standard prefixes and opt-in tag', ({ RuleScenario }) => { + RuleScenario('Registry has standard tag prefix and opt-in tag', ({ When, Then, And }) => { + When('I build the tag registry', () => { + state!.registry = buildRegistry(); + }); + + Then('the tag prefix is not empty', () => { + expect(state!.registry!.tagPrefix.length).toBeGreaterThan(0); + }); + + And('the file opt-in tag is not empty', () => { + expect(state!.registry!.fileOptInTag.length).toBeGreaterThan(0); + }); + }); + }); +}); diff --git a/tests/steps/utils/file-cache.steps.ts b/tests/steps/utils/file-cache.steps.ts new file mode 100644 index 00000000..6094ef6a --- /dev/null +++ b/tests/steps/utils/file-cache.steps.ts @@ -0,0 +1,224 @@ +/** + * File Cache Step Definitions + * + * BDD step definitions for testing the file cache: + * - Store/retrieve round-trip + * - has membership checks + * - Hit/miss stats tracking + * - Hit rate calculation with zero-division guard + * - Clear resets everything + */ +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { createFileCache, type FileCache } from '../../../src/cache/file-cache.js'; + +// ============================================================================= +// Type Definitions +// ============================================================================= + +interface FileCacheTestState { + cache: FileCache; + retrievedContent: string | undefined; + hasResult: boolean; +} + +// ============================================================================= +// Module-level state (reset per scenario) +// ============================================================================= + +let state: FileCacheTestState | null = null; + +// ============================================================================= +// Helper Functions +// ============================================================================= + +function initState(): FileCacheTestState { + return { + cache: createFileCache(), + retrievedContent: undefined, + hasResult: false, + }; +} + +// ============================================================================= +// Feature: File Cache +// ============================================================================= + +const feature = await loadFeature('tests/features/utils/file-cache.feature'); + +describeFeature(feature, ({ Rule, Background, AfterEachScenario }) => { + AfterEachScenario(() => { + state = null; + }); + + Background(({ Given }) => { + Given('a file cache test context', () => { + state = initState(); + }); + }); + + // =========================================================================== + // Store and retrieve round-trip + // =========================================================================== + + Rule('Store and retrieve round-trip preserves content', ({ RuleScenario }) => { + RuleScenario('Store and retrieve returns same content', ({ When, Then }) => { + When( + 'I store content {string} at path {string}', + (_ctx: unknown, content: string, path: string) => { + state!.cache.set(path, content); + } + ); + + Then( + 'retrieving path {string} returns {string}', + (_ctx: unknown, path: string, expected: string) => { + const content = state!.cache.get(path); + expect(content).toBe(expected); + } + ); + }); + + RuleScenario('Non-existent path returns undefined', ({ When, Then }) => { + When('I retrieve a non-existent path {string}', (_ctx: unknown, path: string) => { + state!.retrievedContent = state!.cache.get(path); + }); + + Then('the retrieved content is undefined', () => { + expect(state!.retrievedContent).toBeUndefined(); + }); + }); + }); + + // =========================================================================== + // has checks + // =========================================================================== + + Rule('has checks membership without affecting stats', ({ RuleScenario }) => { + RuleScenario('has returns true for cached path', ({ When, Then }) => { + When( + 'I store content {string} at path {string}', + (_ctx: unknown, content: string, path: string) => { + state!.cache.set(path, content); + } + ); + + Then('has returns true for path {string}', (_ctx: unknown, path: string) => { + expect(state!.cache.has(path)).toBe(true); + }); + }); + + RuleScenario('has returns false for uncached path', ({ Then }) => { + Then('has returns false for path {string}', (_ctx: unknown, path: string) => { + expect(state!.cache.has(path)).toBe(false); + }); + }); + }); + + // =========================================================================== + // Stats tracking + // =========================================================================== + + Rule('Stats track hits and misses accurately', ({ RuleScenario }) => { + RuleScenario('Stats track hits and misses', ({ When, And, Then }) => { + When( + 'I store content {string} at path {string}', + (_ctx: unknown, content: string, path: string) => { + state!.cache.set(path, content); + } + ); + + And('I perform a get on cached path {string}', (_ctx: unknown, path: string) => { + state!.retrievedContent = state!.cache.get(path); + }); + + And('I perform a get on uncached path {string}', (_ctx: unknown, path: string) => { + state!.retrievedContent = state!.cache.get(path); + }); + + Then( + 'the stats show {int} hit and {int} miss', + (_ctx: unknown, expectedHits: number, expectedMisses: number) => { + const stats = state!.cache.getStats(); + expect(stats.hits).toBe(expectedHits); + expect(stats.misses).toBe(expectedMisses); + } + ); + + And('the stats show size {int}', (_ctx: unknown, expectedSize: number) => { + const stats = state!.cache.getStats(); + expect(stats.size).toBe(expectedSize); + }); + }); + + RuleScenario('Hit rate starts at zero for empty cache', ({ Then }) => { + Then('the hit rate is {int}', (_ctx: unknown, expected: number) => { + const stats = state!.cache.getStats(); + expect(stats.hitRate).toBe(expected); + }); + }); + + RuleScenario('Hit rate is 100 when all gets are hits', ({ When, And, Then }) => { + When( + 'I store content {string} at path {string}', + (_ctx: unknown, content: string, path: string) => { + state!.cache.set(path, content); + } + ); + + And('I perform a get on path {string}', (_ctx: unknown, path: string) => { + state!.retrievedContent = state!.cache.get(path); + }); + + Then('the hit rate is {int}', (_ctx: unknown, expected: number) => { + const stats = state!.cache.getStats(); + expect(stats.hitRate).toBe(expected); + }); + }); + }); + + // =========================================================================== + // Clear + // =========================================================================== + + Rule('Clear resets cache and stats', ({ RuleScenario }) => { + RuleScenario('Clear resets everything', ({ When, And, Then }) => { + When( + 'I store content {string} at path {string}', + (_ctx: unknown, content: string, path: string) => { + state!.cache.set(path, content); + } + ); + + And('I perform a get on path {string}', (_ctx: unknown, path: string) => { + state!.retrievedContent = state!.cache.get(path); + }); + + And('I clear the cache', () => { + state!.cache.clear(); + }); + + Then( + 'the stats show {int} hits and {int} misses', + (_ctx: unknown, expectedHits: number, expectedMisses: number) => { + const stats = state!.cache.getStats(); + expect(stats.hits).toBe(expectedHits); + expect(stats.misses).toBe(expectedMisses); + } + ); + + And('the stats show size {int}', (_ctx: unknown, expectedSize: number) => { + const stats = state!.cache.getStats(); + expect(stats.size).toBe(expectedSize); + }); + + When('I retrieve a non-existent path {string}', (_ctx: unknown, path: string) => { + state!.retrievedContent = state!.cache.get(path); + }); + + Then('the retrieved content is undefined', () => { + expect(state!.retrievedContent).toBeUndefined(); + }); + }); + }); +}); diff --git a/tests/steps/utils/git-branch-diff.steps.ts b/tests/steps/utils/git-branch-diff.steps.ts new file mode 100644 index 00000000..298e5aae --- /dev/null +++ b/tests/steps/utils/git-branch-diff.steps.ts @@ -0,0 +1,214 @@ +/** + * @libar-docs + * @libar-docs-implements GitBranchDiffTesting + * @libar-docs-uses GitBranchDiff, GitHelpers + * + * Git Branch Diff Step Definitions + * + * BDD step definitions for testing branch-scoped git change detection and the + * shared NUL-delimited name-status parser. + */ + +import { execFileSync } from 'node:child_process'; +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { getChangedFilesList, parseGitNameStatus } from '../../../src/git/index.js'; +import { + createTempDir, + writeTempFile, + type TempDirContext, +} from '../../support/helpers/file-system.js'; +import type { DataTableRow } from '../../support/world.js'; + +interface GitBranchDiffState { + tempContext: TempDirContext | null; + changedFiles: readonly string[] | null; + parseOutput: string; + parsedModifiedFiles: string[]; +} + +let state: GitBranchDiffState | null = null; + +function initState(): GitBranchDiffState { + return { + tempContext: null, + changedFiles: null, + parseOutput: '', + parsedModifiedFiles: [], + }; +} + +function getState(): GitBranchDiffState { + if (!state) { + throw new Error('State not initialized'); + } + return state; +} + +function getRepoDir(): string { + const tempDir = getState().tempContext?.tempDir; + if (!tempDir) { + throw new Error('Git repository not initialized'); + } + return tempDir; +} + +function runGit(args: readonly string[], cwd = getRepoDir()): string { + return execFileSync('git', args, { + cwd, + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + }); +} + +async function writeAndStageFile(relativePath: string, content: string): Promise { + const repoDir = getRepoDir(); + await writeTempFile(repoDir, relativePath, content); + runGit(['add', '--', relativePath], repoDir); +} + +async function commitFile(relativePath: string, content: string): Promise { + await writeAndStageFile(relativePath, content); + runGit(['commit', '-m', `Add ${relativePath}`]); +} + +const feature = await loadFeature('tests/features/utils/git-branch-diff.feature'); + +describeFeature(feature, ({ Background, Rule, AfterEachScenario }) => { + AfterEachScenario(async () => { + if (state?.tempContext) { + await state.tempContext.cleanup(); + } + state = null; + }); + + Background(({ Given }) => { + Given('a git branch diff test context', () => { + state = initState(); + }); + }); + + Rule('getChangedFilesList returns only existing changed files', ({ RuleScenario }) => { + RuleScenario( + 'Modified and added files are returned while deleted files are excluded', + ({ Given, And, When, Then }) => { + Given('an initialized git repository', async () => { + state = initState(); + state.tempContext = await createTempDir({ prefix: 'git-branch-diff-test-' }); + runGit(['init', '--initial-branch=main']); + runGit(['config', 'user.email', 'test@example.com']); + runGit(['config', 'user.name', 'Test User']); + }); + + And('these committed files exist:', async (_ctx: unknown, table: DataTableRow[]) => { + for (const row of table) { + await commitFile(row.file ?? '', row.content ?? ''); + } + }); + + When( + 'I modify file {string} to {string}', + async (_ctx: unknown, relativePath: string, content: string) => { + await writeAndStageFile(relativePath, content); + } + ); + + And( + 'I add file {string} with content {string}', + async (_ctx: unknown, relativePath: string, content: string) => { + await writeAndStageFile(relativePath, content); + } + ); + + And('I delete file {string}', async (_ctx: unknown, relativePath: string) => { + await fs.rm(path.join(getRepoDir(), relativePath)); + runGit(['rm', '-f', '--cached', '--', relativePath]); + }); + + And('I list changed files against {string}', (_ctx: unknown, baseBranch: string) => { + const result = getChangedFilesList(getRepoDir(), baseBranch); + expect(result.ok).toBe(true); + state!.changedFiles = result.ok ? result.value : []; + }); + + Then('the changed files should include:', (_ctx: unknown, table: DataTableRow[]) => { + const changedFiles = state!.changedFiles ?? []; + for (const row of table) { + expect(changedFiles).toContain(row.file ?? ''); + } + }); + + And('the changed files should not include:', (_ctx: unknown, table: DataTableRow[]) => { + const changedFiles = state!.changedFiles ?? []; + for (const row of table) { + expect(changedFiles).not.toContain(row.file ?? ''); + } + }); + } + ); + }); + + Rule('Paths with spaces are preserved', ({ RuleScenario }) => { + RuleScenario('File paths with spaces are preserved', ({ Given, And, When, Then }) => { + Given('an initialized git repository', async () => { + state = initState(); + state.tempContext = await createTempDir({ prefix: 'git-branch-diff-test-' }); + runGit(['init', '--initial-branch=main']); + runGit(['config', 'user.email', 'test@example.com']); + runGit(['config', 'user.name', 'Test User']); + }); + + And( + 'a committed file {string} with content {string}', + async (_ctx: unknown, relativePath: string, content: string) => { + await commitFile(relativePath, content); + } + ); + + When( + 'I modify file {string} to {string}', + async (_ctx: unknown, relativePath: string, content: string) => { + await writeAndStageFile(relativePath, content); + } + ); + + And('I list changed files against {string}', (_ctx: unknown, baseBranch: string) => { + const result = getChangedFilesList(getRepoDir(), baseBranch); + expect(result.ok).toBe(true); + state!.changedFiles = result.ok ? result.value : []; + }); + + Then('the changed files should include:', (_ctx: unknown, table: DataTableRow[]) => { + const changedFiles = state!.changedFiles ?? []; + for (const row of table) { + expect(changedFiles).toContain(row.file ?? ''); + } + }); + }); + }); + + Rule('NUL-delimited rename and copy statuses use the new path', ({ RuleScenarioOutline }) => { + RuleScenarioOutline( + 'Similarity status maps to the new path', + ({ Given, When, Then }, variables: { status: string; oldPath: string; newPath: string }) => { + Given( + 'a git name-status output with status "" from "" to ""', + () => { + state = initState(); + state.parseOutput = `${variables.status}\0${variables.oldPath}\0${variables.newPath}\0`; + } + ); + + When('I parse the git name-status output', () => { + state!.parsedModifiedFiles = parseGitNameStatus(state!.parseOutput).modified; + }); + + Then('the parsed modified files should include ""', () => { + expect(state!.parsedModifiedFiles).toContain(variables.newPath); + }); + } + ); + }); +}); diff --git a/tests/steps/validation/codec-utils.steps.ts b/tests/steps/validation/codec-utils.steps.ts new file mode 100644 index 00000000..de03dae9 --- /dev/null +++ b/tests/steps/validation/codec-utils.steps.ts @@ -0,0 +1,317 @@ +/** + * @libar-docs + * @libar-docs-implements CodecUtilsTesting + * @libar-docs-uses CodecUtils + * + * Codec Utils Step Definitions + * + * BDD step definitions for testing codec utility functions: + * - createJsonInputCodec - JSON parsing with schema validation + * - formatCodecError - Error formatting for display + */ +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { z } from 'zod'; +import { + createJsonInputCodec, + formatCodecError, + type CodecError, + type JsonInputCodec, +} from '../../../src/validation-schemas/codec-utils.js'; +import type { Result } from '../../../src/types/index.js'; + +// ============================================================================= +// Type Definitions +// ============================================================================= + +interface TestObject { + name: string; +} + +interface CodecUtilsTestState { + // Schema used for codec creation + schema: z.ZodType | null; + + // Input codec instance + inputCodec: JsonInputCodec | null; + + // Parse result + parseResult: Result | null; + + // SafeParse result + safeParseResult: TestObject | undefined; + + // Formatted error string + formattedError: string; +} + +// ============================================================================= +// Module-level state (reset per scenario) +// ============================================================================= + +let state: CodecUtilsTestState | null = null; + +// ============================================================================= +// Helper Functions +// ============================================================================= + +function initState(): CodecUtilsTestState { + return { + schema: null, + inputCodec: null, + parseResult: null, + safeParseResult: undefined, + formattedError: '', + }; +} + +// ============================================================================= +// Feature: Codec Utils Validation +// ============================================================================= + +const feature = await loadFeature('tests/features/validation/codec-utils.feature'); + +describeFeature(feature, ({ Rule, Background, AfterEachScenario }) => { + AfterEachScenario(() => { + state = null; + }); + + Background(({ Given }) => { + Given('a codec utils test context', () => { + state = initState(); + }); + }); + + // =========================================================================== + // createJsonInputCodec - Parse and Validate JSON + // =========================================================================== + + Rule('createJsonInputCodec parses and validates JSON strings', ({ RuleScenario }) => { + RuleScenario('Input codec parses valid JSON matching schema', ({ Given, When, Then, And }) => { + Given('a Zod schema for an object with a required name string field', () => { + state!.schema = z.object({ name: z.string() }); + state!.inputCodec = createJsonInputCodec(state!.schema); + }); + + When('I parse the JSON string \'{"name": "Alice"}\' with the input codec', () => { + state!.parseResult = state!.inputCodec!.parse('{"name": "Alice"}'); + }); + + Then('the parse result should be ok', () => { + expect(state!.parseResult!.ok).toBe(true); + }); + + And('the parsed value name should be "Alice"', () => { + const result = state!.parseResult!; + if (result.ok) { + expect(result.value.name).toBe('Alice'); + } else { + throw new Error('Expected ok result'); + } + }); + }); + + RuleScenario('Input codec rejects invalid JSON syntax', ({ Given, When, Then, And }) => { + Given('a Zod schema for an object with a required name string field', () => { + state!.schema = z.object({ name: z.string() }); + state!.inputCodec = createJsonInputCodec(state!.schema); + }); + + When("I parse the JSON string '{not valid json}' with the input codec", () => { + state!.parseResult = state!.inputCodec!.parse('{not valid json}'); + }); + + Then('the parse result should be err', () => { + expect(state!.parseResult!.ok).toBe(false); + }); + + And('the codec error operation should be "parse"', () => { + const result = state!.parseResult!; + if (!result.ok) { + expect(result.error.operation).toBe('parse'); + } else { + throw new Error('Expected err result'); + } + }); + + And('the codec error message should contain "Invalid JSON"', () => { + const result = state!.parseResult!; + if (!result.ok) { + expect(result.error.message).toContain('Invalid JSON'); + } else { + throw new Error('Expected err result'); + } + }); + }); + + RuleScenario( + 'Input codec rejects valid JSON that fails schema validation', + ({ Given, When, Then, And }) => { + Given('a Zod schema for an object with a required name string field', () => { + state!.schema = z.object({ name: z.string() }); + state!.inputCodec = createJsonInputCodec(state!.schema); + }); + + When('I parse the JSON string \'{"age": 30}\' with the input codec', () => { + state!.parseResult = state!.inputCodec!.parse('{"age": 30}'); + }); + + Then('the parse result should be err', () => { + expect(state!.parseResult!.ok).toBe(false); + }); + + And('the codec error operation should be "parse"', () => { + const result = state!.parseResult!; + if (!result.ok) { + expect(result.error.operation).toBe('parse'); + } else { + throw new Error('Expected err result'); + } + }); + + And('the codec error message should contain "Schema validation failed"', () => { + const result = state!.parseResult!; + if (!result.ok) { + expect(result.error.message).toContain('Schema validation failed'); + } else { + throw new Error('Expected err result'); + } + }); + + And('the codec error should have validation errors', () => { + const result = state!.parseResult!; + if (!result.ok) { + expect(result.error.validationErrors).toBeDefined(); + expect(result.error.validationErrors!.length).toBeGreaterThan(0); + } else { + throw new Error('Expected err result'); + } + }); + } + ); + + RuleScenario( + 'Input codec includes source in error when provided', + ({ Given, When, Then, And }) => { + Given('a Zod schema for an object with a required name string field', () => { + state!.schema = z.object({ name: z.string() }); + state!.inputCodec = createJsonInputCodec(state!.schema); + }); + + When( + 'I parse the JSON string \'{"age": 30}\' with source "config.json" using the input codec', + () => { + state!.parseResult = state!.inputCodec!.parse('{"age": 30}', 'config.json'); + } + ); + + Then('the parse result should be err', () => { + expect(state!.parseResult!.ok).toBe(false); + }); + + And('the codec error message should contain "config.json"', () => { + const result = state!.parseResult!; + if (!result.ok) { + expect(result.error.message).toContain('config.json'); + } else { + throw new Error('Expected err result'); + } + }); + } + ); + + RuleScenario( + 'Input codec safeParse returns value for valid input', + ({ Given, When, Then, And }) => { + Given('a Zod schema for an object with a required name string field', () => { + state!.schema = z.object({ name: z.string() }); + state!.inputCodec = createJsonInputCodec(state!.schema); + }); + + When('I safeParse the JSON string \'{"name": "Bob"}\' with the input codec', () => { + state!.safeParseResult = state!.inputCodec!.safeParse('{"name": "Bob"}'); + }); + + Then('the safeParse result should not be undefined', () => { + expect(state!.safeParseResult).toBeDefined(); + }); + + And('the safeParse result name should be "Bob"', () => { + expect(state!.safeParseResult!.name).toBe('Bob'); + }); + } + ); + + RuleScenario( + 'Input codec safeParse returns undefined for invalid input', + ({ Given, When, Then }) => { + Given('a Zod schema for an object with a required name string field', () => { + state!.schema = z.object({ name: z.string() }); + state!.inputCodec = createJsonInputCodec(state!.schema); + }); + + When("I safeParse the JSON string '{broken' with the input codec", () => { + state!.safeParseResult = state!.inputCodec!.safeParse('{broken'); + }); + + Then('the safeParse result should be undefined', () => { + expect(state!.safeParseResult).toBeUndefined(); + }); + } + ); + }); + + // =========================================================================== + // formatCodecError - Error Formatting + // =========================================================================== + + Rule('formatCodecError formats errors for display', ({ RuleScenario }) => { + RuleScenario( + 'formatCodecError formats error without validation details', + ({ When, Then, And }) => { + When('I format a codec error with operation "parse" and message "Invalid JSON"', () => { + const error: CodecError = { + type: 'codec-error', + operation: 'parse', + message: 'Invalid JSON', + }; + state!.formattedError = formatCodecError(error); + }); + + Then('the formatted error should contain "parse"', () => { + expect(state!.formattedError).toContain('parse'); + }); + + And('the formatted error should contain "Invalid JSON"', () => { + expect(state!.formattedError).toContain('Invalid JSON'); + }); + } + ); + + RuleScenario( + 'formatCodecError formats error with validation details', + ({ When, Then, And }) => { + When( + 'I format a codec error with operation "parse" and message "Schema validation failed" and validation errors', + () => { + const error: CodecError = { + type: 'codec-error', + operation: 'parse', + message: 'Schema validation failed', + validationErrors: [' - name: Required'], + }; + state!.formattedError = formatCodecError(error); + } + ); + + Then('the formatted error should contain "Schema validation failed"', () => { + expect(state!.formattedError).toContain('Schema validation failed'); + }); + + And('the formatted error should contain "Validation errors"', () => { + expect(state!.formattedError).toContain('Validation errors'); + }); + } + ); + }); +}); diff --git a/tests/steps/validation/tag-registry-schemas.steps.ts b/tests/steps/validation/tag-registry-schemas.steps.ts new file mode 100644 index 00000000..9698b0d0 --- /dev/null +++ b/tests/steps/validation/tag-registry-schemas.steps.ts @@ -0,0 +1,268 @@ +/** + * @libar-docs + * @libar-docs-implements TagRegistrySchemasTesting + * @libar-docs-uses TagRegistrySchema + * + * Tag Registry Schema Step Definitions + * + * BDD step definitions for testing tag registry configuration: + * - createDefaultTagRegistry - Default registry creation from taxonomy + * - mergeTagRegistries - Deep merge of registries by tag + */ +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { + TagRegistrySchema, + createDefaultTagRegistry, + mergeTagRegistries, + type TagRegistry, +} from '../../../src/validation-schemas/tag-registry.js'; + +// ============================================================================= +// Type Definitions +// ============================================================================= + +interface TagRegistryTestState { + // Created registry + registry: TagRegistry | null; + + // Base registry for merge tests + baseRegistry: TagRegistry | null; + + // Merged registry + mergedRegistry: TagRegistry | null; + + // Schema validation result + validationPassed: boolean; +} + +// ============================================================================= +// Module-level state (reset per scenario) +// ============================================================================= + +let state: TagRegistryTestState | null = null; + +// ============================================================================= +// Helper Functions +// ============================================================================= + +function initState(): TagRegistryTestState { + return { + registry: null, + baseRegistry: null, + mergedRegistry: null, + validationPassed: false, + }; +} + +/** + * Create a minimal valid TagRegistry for testing merge behavior. + */ +function createMinimalRegistry(overrides: Partial = {}): TagRegistry { + return { + version: overrides.version ?? '1.0.0', + categories: overrides.categories ?? [], + metadataTags: overrides.metadataTags ?? [], + aggregationTags: overrides.aggregationTags ?? [], + formatOptions: overrides.formatOptions ?? ['full', 'list', 'summary'], + tagPrefix: overrides.tagPrefix ?? '@libar-docs-', + fileOptInTag: overrides.fileOptInTag ?? '@libar-docs', + }; +} + +// ============================================================================= +// Feature: Tag Registry Schema Validation +// ============================================================================= + +const feature = await loadFeature('tests/features/validation/tag-registry-schemas.feature'); + +describeFeature(feature, ({ Rule, Background, AfterEachScenario }) => { + AfterEachScenario(() => { + state = null; + }); + + Background(({ Given }) => { + Given('a tag registry test context', () => { + state = initState(); + }); + }); + + // =========================================================================== + // createDefaultTagRegistry - Default Registry + // =========================================================================== + + Rule( + 'createDefaultTagRegistry produces a valid registry from taxonomy source', + ({ RuleScenario }) => { + RuleScenario('Default registry passes schema validation', ({ When, Then }) => { + When('I create a default tag registry', () => { + state!.registry = createDefaultTagRegistry(); + }); + + Then('the registry should pass TagRegistrySchema validation', () => { + const result = TagRegistrySchema.safeParse(state!.registry); + expect(result.success).toBe(true); + }); + }); + + RuleScenario('Default registry has non-empty categories', ({ When, Then }) => { + When('I create a default tag registry', () => { + state!.registry = createDefaultTagRegistry(); + }); + + Then('the registry should have at least 1 category', () => { + expect(state!.registry!.categories.length).toBeGreaterThanOrEqual(1); + }); + }); + + RuleScenario('Default registry has non-empty metadata tags', ({ When, Then }) => { + When('I create a default tag registry', () => { + state!.registry = createDefaultTagRegistry(); + }); + + Then('the registry should have at least 1 metadata tag', () => { + expect(state!.registry!.metadataTags.length).toBeGreaterThanOrEqual(1); + }); + }); + + RuleScenario('Default registry has expected tag prefix', ({ When, Then }) => { + When('I create a default tag registry', () => { + state!.registry = createDefaultTagRegistry(); + }); + + Then('the registry tag prefix should be "@libar-docs-"', () => { + expect(state!.registry!.tagPrefix).toBe('@libar-docs-'); + }); + }); + } + ); + + // =========================================================================== + // mergeTagRegistries - Deep Merge + // =========================================================================== + + Rule('mergeTagRegistries deep-merges registries by tag', ({ RuleScenario }) => { + RuleScenario('Merge overrides a category by tag', ({ Given, When, Then }) => { + Given('a base registry with a category "core" at priority 1', () => { + state!.baseRegistry = createMinimalRegistry({ + categories: [ + { + tag: 'core', + domain: 'Core', + priority: 1, + description: 'Core utilities', + aliases: [], + }, + ], + }); + }); + + When('I merge with an override that sets category "core" to priority 10', () => { + state!.mergedRegistry = mergeTagRegistries(state!.baseRegistry!, { + categories: [ + { + tag: 'core', + domain: 'Core', + priority: 10, + description: 'Core utilities', + aliases: [], + }, + ], + }); + }); + + Then('the merged registry should have category "core" at priority 10', () => { + const coreCategory = state!.mergedRegistry!.categories.find((c) => c.tag === 'core'); + expect(coreCategory).toBeDefined(); + expect(coreCategory!.priority).toBe(10); + }); + }); + + RuleScenario('Merge adds new categories from override', ({ Given, When, Then, And }) => { + Given('a base registry with a category "core" at priority 1', () => { + state!.baseRegistry = createMinimalRegistry({ + categories: [ + { + tag: 'core', + domain: 'Core', + priority: 1, + description: 'Core utilities', + aliases: [], + }, + ], + }); + }); + + When('I merge with an override that adds category "custom" at priority 5', () => { + state!.mergedRegistry = mergeTagRegistries(state!.baseRegistry!, { + categories: [ + { + tag: 'custom', + domain: 'Custom', + priority: 5, + description: 'Custom category', + aliases: [], + }, + ], + }); + }); + + Then('the merged registry should have 2 categories', () => { + expect(state!.mergedRegistry!.categories).toHaveLength(2); + }); + + And('the merged registry should contain category "custom"', () => { + const customCategory = state!.mergedRegistry!.categories.find((c) => c.tag === 'custom'); + expect(customCategory).toBeDefined(); + }); + }); + + RuleScenario('Merge replaces scalar fields when provided', ({ Given, When, Then }) => { + Given('a base registry with tag prefix "@libar-docs-"', () => { + state!.baseRegistry = createMinimalRegistry({ + tagPrefix: '@libar-docs-', + }); + }); + + When('I merge with an override that sets tag prefix "@custom-"', () => { + state!.mergedRegistry = mergeTagRegistries(state!.baseRegistry!, { + tagPrefix: '@custom-', + }); + }); + + Then('the merged registry tag prefix should be "@custom-"', () => { + expect(state!.mergedRegistry!.tagPrefix).toBe('@custom-'); + }); + }); + + RuleScenario('Merge preserves base when override is empty', ({ Given, When, Then, And }) => { + Given('a base registry with a category "core" at priority 1', () => { + state!.baseRegistry = createMinimalRegistry({ + categories: [ + { + tag: 'core', + domain: 'Core', + priority: 1, + description: 'Core utilities', + aliases: [], + }, + ], + }); + }); + + When('I merge with an empty override', () => { + state!.mergedRegistry = mergeTagRegistries(state!.baseRegistry!, {}); + }); + + Then('the merged registry should have 1 category', () => { + expect(state!.mergedRegistry!.categories).toHaveLength(1); + }); + + And('the merged registry should have category "core" at priority 1', () => { + const coreCategory = state!.mergedRegistry!.categories.find((c) => c.tag === 'core'); + expect(coreCategory).toBeDefined(); + expect(coreCategory!.priority).toBe(1); + }); + }); + }); +}); diff --git a/tests/steps/validation/workflow-config-schemas.steps.ts b/tests/steps/validation/workflow-config-schemas.steps.ts new file mode 100644 index 00000000..58be5c0a --- /dev/null +++ b/tests/steps/validation/workflow-config-schemas.steps.ts @@ -0,0 +1,339 @@ +/** + * @libar-docs + * @libar-docs-implements WorkflowConfigSchemasTesting + * @libar-docs-uses WorkflowConfigSchema + * + * Workflow Config Schema Step Definitions + * + * BDD step definitions for testing workflow configuration schemas: + * - WorkflowConfigSchema - Zod schema validation + * - createLoadedWorkflow - Lookup map construction + * - isWorkflowConfig - Runtime type guard + */ +import { loadFeature, describeFeature } from '@amiceli/vitest-cucumber'; +import { expect } from 'vitest'; +import { + WorkflowConfigSchema, + createLoadedWorkflow, + isWorkflowConfig, + type WorkflowConfig, + type LoadedWorkflow, +} from '../../../src/validation-schemas/workflow-config.js'; + +// ============================================================================= +// Type Definitions +// ============================================================================= + +interface WorkflowConfigTestState { + // Schema validation + validationResult: + | { success: true; data: WorkflowConfig } + | { success: false; error: unknown } + | null; + + // Loaded workflow + loadedWorkflow: LoadedWorkflow | null; + + // Type guard result + typeGuardResult: boolean; + + // Config used for loaded workflow tests + config: WorkflowConfig | null; +} + +// ============================================================================= +// Module-level state (reset per scenario) +// ============================================================================= + +let state: WorkflowConfigTestState | null = null; + +// ============================================================================= +// Helper Functions +// ============================================================================= + +function initState(): WorkflowConfigTestState { + return { + validationResult: null, + loadedWorkflow: null, + typeGuardResult: false, + config: null, + }; +} + +/** + * Create a minimal valid WorkflowConfig for testing. + */ +function createMinimalWorkflowConfig(overrides: Partial = {}): WorkflowConfig { + return { + name: overrides.name ?? 'test-workflow', + version: overrides.version ?? '1.0.0', + statuses: overrides.statuses ?? [{ name: 'roadmap', emoji: '📋' }], + phases: overrides.phases ?? [{ name: 'Inception' }], + ...('description' in overrides ? { description: overrides.description } : {}), + ...('defaultStatus' in overrides ? { defaultStatus: overrides.defaultStatus } : {}), + ...('metadata' in overrides ? { metadata: overrides.metadata } : {}), + }; +} + +// ============================================================================= +// Feature: Workflow Config Schema Validation +// ============================================================================= + +const feature = await loadFeature('tests/features/validation/workflow-config-schemas.feature'); + +describeFeature(feature, ({ Rule, Background, AfterEachScenario }) => { + AfterEachScenario(() => { + state = null; + }); + + Background(({ Given }) => { + Given('a workflow config test context', () => { + state = initState(); + }); + }); + + // =========================================================================== + // WorkflowConfigSchema - Schema Validation + // =========================================================================== + + Rule('WorkflowConfigSchema validates workflow configurations', ({ RuleScenario }) => { + RuleScenario('Valid workflow config passes schema validation', ({ When, Then }) => { + When( + 'I validate a workflow config with name "standard" and version "1.0.0" with 1 status and 1 phase', + () => { + const config = { + name: 'standard', + version: '1.0.0', + statuses: [{ name: 'roadmap', emoji: '📋' }], + phases: [{ name: 'Inception' }], + }; + state!.validationResult = WorkflowConfigSchema.safeParse(config); + } + ); + + Then('the workflow config should be valid', () => { + expect(state!.validationResult!.success).toBe(true); + }); + }); + + RuleScenario('Config without name is rejected', ({ When, Then }) => { + When('I validate a workflow config without a name', () => { + const config = { + version: '1.0.0', + statuses: [{ name: 'roadmap', emoji: '📋' }], + phases: [{ name: 'Inception' }], + }; + state!.validationResult = WorkflowConfigSchema.safeParse(config); + }); + + Then('the workflow config should be invalid', () => { + expect(state!.validationResult!.success).toBe(false); + }); + }); + + RuleScenario('Config with invalid semver version is rejected', ({ When, Then }) => { + When('I validate a workflow config with name "standard" and version "not-semver"', () => { + const config = { + name: 'standard', + version: 'not-semver', + statuses: [{ name: 'roadmap', emoji: '📋' }], + phases: [{ name: 'Inception' }], + }; + state!.validationResult = WorkflowConfigSchema.safeParse(config); + }); + + Then('the workflow config should be invalid', () => { + expect(state!.validationResult!.success).toBe(false); + }); + }); + + RuleScenario('Config without statuses is rejected', ({ When, Then }) => { + When( + 'I validate a workflow config with name "standard" and version "1.0.0" with 0 statuses', + () => { + const config = { + name: 'standard', + version: '1.0.0', + statuses: [], + phases: [{ name: 'Inception' }], + }; + state!.validationResult = WorkflowConfigSchema.safeParse(config); + } + ); + + Then('the workflow config should be invalid', () => { + expect(state!.validationResult!.success).toBe(false); + }); + }); + + RuleScenario('Config without phases is rejected', ({ When, Then }) => { + When( + 'I validate a workflow config with name "standard" and version "1.0.0" with 0 phases', + () => { + const config = { + name: 'standard', + version: '1.0.0', + statuses: [{ name: 'roadmap', emoji: '📋' }], + phases: [], + }; + state!.validationResult = WorkflowConfigSchema.safeParse(config); + } + ); + + Then('the workflow config should be invalid', () => { + expect(state!.validationResult!.success).toBe(false); + }); + }); + }); + + // =========================================================================== + // createLoadedWorkflow - Lookup Map Construction + // =========================================================================== + + Rule('createLoadedWorkflow builds efficient lookup maps', ({ RuleScenario }) => { + RuleScenario('Loaded workflow has status lookup map', ({ Given, When, Then, And }) => { + Given('a valid workflow config with status "roadmap" and status "active"', () => { + state!.config = createMinimalWorkflowConfig({ + statuses: [ + { name: 'roadmap', emoji: '📋' }, + { name: 'active', emoji: '🔨' }, + ], + }); + }); + + When('I create a loaded workflow', () => { + state!.loadedWorkflow = createLoadedWorkflow(state!.config!); + }); + + Then('the status map should contain "roadmap"', () => { + expect(state!.loadedWorkflow!.statusMap.has('roadmap')).toBe(true); + }); + + And('the status map should contain "active"', () => { + expect(state!.loadedWorkflow!.statusMap.has('active')).toBe(true); + }); + + And('the status map should have 2 entries', () => { + expect(state!.loadedWorkflow!.statusMap.size).toBe(2); + }); + }); + + RuleScenario('Status lookup is case-insensitive', ({ Given, When, Then, And }) => { + Given('a valid workflow config with status "Roadmap" and status "Active"', () => { + state!.config = createMinimalWorkflowConfig({ + statuses: [ + { name: 'Roadmap', emoji: '📋' }, + { name: 'Active', emoji: '🔨' }, + ], + }); + }); + + When('I create a loaded workflow', () => { + state!.loadedWorkflow = createLoadedWorkflow(state!.config!); + }); + + Then('the status map should contain "roadmap"', () => { + expect(state!.loadedWorkflow!.statusMap.has('roadmap')).toBe(true); + }); + + And('the status map should contain "active"', () => { + expect(state!.loadedWorkflow!.statusMap.has('active')).toBe(true); + }); + }); + + RuleScenario('Loaded workflow has phase lookup map', ({ Given, When, Then, And }) => { + Given('a valid workflow config with phase "Inception" and phase "Construction"', () => { + state!.config = createMinimalWorkflowConfig({ + phases: [{ name: 'Inception' }, { name: 'Construction' }], + }); + }); + + When('I create a loaded workflow', () => { + state!.loadedWorkflow = createLoadedWorkflow(state!.config!); + }); + + Then('the phase map should contain "inception"', () => { + expect(state!.loadedWorkflow!.phaseMap.has('inception')).toBe(true); + }); + + And('the phase map should contain "construction"', () => { + expect(state!.loadedWorkflow!.phaseMap.has('construction')).toBe(true); + }); + + And('the phase map should have 2 entries', () => { + expect(state!.loadedWorkflow!.phaseMap.size).toBe(2); + }); + }); + + RuleScenario('Phase lookup is case-insensitive', ({ Given, When, Then, And }) => { + Given('a valid workflow config with phase "Inception" and phase "Construction"', () => { + state!.config = createMinimalWorkflowConfig({ + phases: [{ name: 'Inception' }, { name: 'Construction' }], + }); + }); + + When('I create a loaded workflow', () => { + state!.loadedWorkflow = createLoadedWorkflow(state!.config!); + }); + + Then('the phase map should contain "inception"', () => { + expect(state!.loadedWorkflow!.phaseMap.has('inception')).toBe(true); + }); + + And('the phase map should contain "construction"', () => { + expect(state!.loadedWorkflow!.phaseMap.has('construction')).toBe(true); + }); + }); + }); + + // =========================================================================== + // isWorkflowConfig - Type Guard + // =========================================================================== + + Rule('isWorkflowConfig type guard validates at runtime', ({ RuleScenario }) => { + RuleScenario('Type guard accepts valid workflow config', ({ When, Then }) => { + When('I check isWorkflowConfig with a valid config', () => { + const config = createMinimalWorkflowConfig(); + state!.typeGuardResult = isWorkflowConfig(config); + }); + + Then('isWorkflowConfig should return true', () => { + expect(state!.typeGuardResult).toBe(true); + }); + }); + + RuleScenario('Type guard rejects null', ({ When, Then }) => { + When('I check isWorkflowConfig with null', () => { + state!.typeGuardResult = isWorkflowConfig(null); + }); + + Then('isWorkflowConfig should return false', () => { + expect(state!.typeGuardResult).toBe(false); + }); + }); + + RuleScenario('Type guard rejects partial config', ({ When, Then }) => { + When('I check isWorkflowConfig with a partial config missing statuses', () => { + state!.typeGuardResult = isWorkflowConfig({ + name: 'test', + version: '1.0.0', + phases: [{ name: 'Inception' }], + }); + }); + + Then('isWorkflowConfig should return false', () => { + expect(state!.typeGuardResult).toBe(false); + }); + }); + + RuleScenario('Type guard rejects non-object', ({ When, Then }) => { + When('I check isWorkflowConfig with the string "not a config"', () => { + state!.typeGuardResult = isWorkflowConfig('not a config'); + }); + + Then('isWorkflowConfig should return false', () => { + expect(state!.typeGuardResult).toBe(false); + }); + }); + }); +}); diff --git a/tests/support/helpers/cli-runner.ts b/tests/support/helpers/cli-runner.ts index 52d398ac..4690131d 100644 --- a/tests/support/helpers/cli-runner.ts +++ b/tests/support/helpers/cli-runner.ts @@ -55,6 +55,12 @@ const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const PROJECT_ROOT = path.resolve(__dirname, '../../..'); +function createChildEnv(env: NodeJS.ProcessEnv): NodeJS.ProcessEnv { + const childEnv = { ...env, FORCE_COLOR: '0' }; + delete childEnv.NODE_V8_COVERAGE; + return childEnv; +} + // ============================================================================= // CLI Runner // ============================================================================= @@ -106,7 +112,7 @@ export async function runCLI( return new Promise((resolve, reject) => { const child = spawn('npx', ['tsx', cliPath, ...args], { cwd, - env: { ...env, FORCE_COLOR: '0' }, // Disable color codes for easier assertion + env: createChildEnv(env), shell: true, }); diff --git a/tests/support/helpers/design-review-state.ts b/tests/support/helpers/design-review-state.ts index 21fd6207..d247360b 100644 --- a/tests/support/helpers/design-review-state.ts +++ b/tests/support/helpers/design-review-state.ts @@ -16,10 +16,8 @@ import type { import type { RenderableDocument } from '../../../src/renderable/schema.js'; import { getSequenceEntry } from '../../../src/api/pattern-helpers.js'; import { buildSequenceIndexEntry } from '../../../src/generators/pipeline/sequence-utils.js'; -import { - transformToMasterDatasetWithValidation, - type ValidationSummary, -} from '../../../src/generators/pipeline/transform-dataset.js'; +import type { ValidationSummary } from '../../../src/generators/pipeline/transform-types.js'; +import { transformToMasterDatasetWithValidation } from '../../../src/generators/pipeline/transform-dataset.js'; import { createDesignReviewCodec } from '../../../src/renderable/codecs/design-review.js'; import { renderToMarkdown } from '../../../src/renderable/render.js'; import { createDefaultTagRegistry } from '../../../src/validation-schemas/tag-registry.js'; diff --git a/tests/support/helpers/process-api-state.ts b/tests/support/helpers/process-api-state.ts index 0fd49abe..1b8fe136 100644 --- a/tests/support/helpers/process-api-state.ts +++ b/tests/support/helpers/process-api-state.ts @@ -53,10 +53,14 @@ export function getResult(state: CLITestState | null): CLIResult { export async function runCLICommand( state: CLITestState | null, - commandString: string + commandString: string, + options: { timeout?: number } = {} ): Promise { const s = getState(state); - s.result = await runCommand(commandString, { cwd: getTempDir(state) }); + s.result = await runCommand(commandString, { + cwd: getTempDir(state), + ...(options.timeout !== undefined ? { timeout: options.timeout } : {}), + }); } // ============================================================================= diff --git a/vitest.config.ts b/vitest.config.ts index d7cc3e6d..8b4ca8c2 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -25,9 +25,14 @@ export default defineConfig({ ], globals: true, environment: 'node', + coverage: { + provider: 'v8', + reporter: ['text', 'json-summary', 'lcov'], + reportsDirectory: 'coverage', + include: ['src/**/*.ts'], + exclude: ['src/**/*.d.ts'], + }, }, - css: false, root: path.resolve(__dirname), clearScreen: false, - plugins: [], });