This is page 21 of 29. Use http://codebase.md/tosin2013/documcp?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .dockerignore
├── .eslintignore
├── .eslintrc.json
├── .github
│ ├── agents
│ │ ├── documcp-ast.md
│ │ ├── documcp-deploy.md
│ │ ├── documcp-memory.md
│ │ ├── documcp-test.md
│ │ └── documcp-tool.md
│ ├── copilot-instructions.md
│ ├── dependabot.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── automated-changelog.md
│ │ ├── bug_report.md
│ │ ├── bug_report.yml
│ │ ├── documentation_issue.md
│ │ ├── feature_request.md
│ │ ├── feature_request.yml
│ │ ├── npm-publishing-fix.md
│ │ └── release_improvements.md
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-drafter.yml
│ └── workflows
│ ├── auto-merge.yml
│ ├── ci.yml
│ ├── codeql.yml
│ ├── dependency-review.yml
│ ├── deploy-docs.yml
│ ├── README.md
│ ├── release-drafter.yml
│ └── release.yml
├── .gitignore
├── .husky
│ ├── commit-msg
│ └── pre-commit
├── .linkcheck.config.json
├── .markdown-link-check.json
├── .nvmrc
├── .pre-commit-config.yaml
├── .versionrc.json
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── docker-compose.docs.yml
├── Dockerfile.docs
├── docs
│ ├── .docusaurus
│ │ ├── docusaurus-plugin-content-docs
│ │ │ └── default
│ │ │ └── __mdx-loader-dependency.json
│ │ └── docusaurus-plugin-content-pages
│ │ └── default
│ │ └── __plugin.json
│ ├── adrs
│ │ ├── 001-mcp-server-architecture.md
│ │ ├── 002-repository-analysis-engine.md
│ │ ├── 003-static-site-generator-recommendation-engine.md
│ │ ├── 004-diataxis-framework-integration.md
│ │ ├── 005-github-pages-deployment-automation.md
│ │ ├── 006-mcp-tools-api-design.md
│ │ ├── 007-mcp-prompts-and-resources-integration.md
│ │ ├── 008-intelligent-content-population-engine.md
│ │ ├── 009-content-accuracy-validation-framework.md
│ │ ├── 010-mcp-resource-pattern-redesign.md
│ │ └── README.md
│ ├── api
│ │ ├── .nojekyll
│ │ ├── assets
│ │ │ ├── hierarchy.js
│ │ │ ├── highlight.css
│ │ │ ├── icons.js
│ │ │ ├── icons.svg
│ │ │ ├── main.js
│ │ │ ├── navigation.js
│ │ │ ├── search.js
│ │ │ └── style.css
│ │ ├── hierarchy.html
│ │ ├── index.html
│ │ ├── modules.html
│ │ └── variables
│ │ └── TOOLS.html
│ ├── assets
│ │ └── logo.svg
│ ├── development
│ │ └── MCP_INSPECTOR_TESTING.md
│ ├── docusaurus.config.js
│ ├── explanation
│ │ ├── architecture.md
│ │ └── index.md
│ ├── guides
│ │ ├── link-validation.md
│ │ ├── playwright-integration.md
│ │ └── playwright-testing-workflow.md
│ ├── how-to
│ │ ├── analytics-setup.md
│ │ ├── custom-domains.md
│ │ ├── documentation-freshness-tracking.md
│ │ ├── github-pages-deployment.md
│ │ ├── index.md
│ │ ├── local-testing.md
│ │ ├── performance-optimization.md
│ │ ├── prompting-guide.md
│ │ ├── repository-analysis.md
│ │ ├── seo-optimization.md
│ │ ├── site-monitoring.md
│ │ ├── troubleshooting.md
│ │ └── usage-examples.md
│ ├── index.md
│ ├── knowledge-graph.md
│ ├── package-lock.json
│ ├── package.json
│ ├── phase-2-intelligence.md
│ ├── reference
│ │ ├── api-overview.md
│ │ ├── cli.md
│ │ ├── configuration.md
│ │ ├── deploy-pages.md
│ │ ├── index.md
│ │ ├── mcp-tools.md
│ │ └── prompt-templates.md
│ ├── research
│ │ ├── cross-domain-integration
│ │ │ └── README.md
│ │ ├── domain-1-mcp-architecture
│ │ │ ├── index.md
│ │ │ └── mcp-performance-research.md
│ │ ├── domain-2-repository-analysis
│ │ │ └── README.md
│ │ ├── domain-3-ssg-recommendation
│ │ │ ├── index.md
│ │ │ └── ssg-performance-analysis.md
│ │ ├── domain-4-diataxis-integration
│ │ │ └── README.md
│ │ ├── domain-5-github-deployment
│ │ │ ├── github-pages-security-analysis.md
│ │ │ └── index.md
│ │ ├── domain-6-api-design
│ │ │ └── README.md
│ │ ├── README.md
│ │ ├── research-integration-summary-2025-01-14.md
│ │ ├── research-progress-template.md
│ │ └── research-questions-2025-01-14.md
│ ├── robots.txt
│ ├── sidebars.js
│ ├── sitemap.xml
│ ├── src
│ │ └── css
│ │ └── custom.css
│ └── tutorials
│ ├── development-setup.md
│ ├── environment-setup.md
│ ├── first-deployment.md
│ ├── getting-started.md
│ ├── index.md
│ ├── memory-workflows.md
│ └── user-onboarding.md
├── jest.config.js
├── LICENSE
├── Makefile
├── MCP_PHASE2_IMPLEMENTATION.md
├── mcp-config-example.json
├── mcp.json
├── package-lock.json
├── package.json
├── README.md
├── release.sh
├── scripts
│ └── check-package-structure.cjs
├── SECURITY.md
├── setup-precommit.sh
├── src
│ ├── benchmarks
│ │ └── performance.ts
│ ├── index.ts
│ ├── memory
│ │ ├── contextual-retrieval.ts
│ │ ├── deployment-analytics.ts
│ │ ├── enhanced-manager.ts
│ │ ├── export-import.ts
│ │ ├── freshness-kg-integration.ts
│ │ ├── index.ts
│ │ ├── integration.ts
│ │ ├── kg-code-integration.ts
│ │ ├── kg-health.ts
│ │ ├── kg-integration.ts
│ │ ├── kg-link-validator.ts
│ │ ├── kg-storage.ts
│ │ ├── knowledge-graph.ts
│ │ ├── learning.ts
│ │ ├── manager.ts
│ │ ├── multi-agent-sharing.ts
│ │ ├── pruning.ts
│ │ ├── schemas.ts
│ │ ├── storage.ts
│ │ ├── temporal-analysis.ts
│ │ ├── user-preferences.ts
│ │ └── visualization.ts
│ ├── prompts
│ │ └── technical-writer-prompts.ts
│ ├── scripts
│ │ └── benchmark.ts
│ ├── templates
│ │ └── playwright
│ │ ├── accessibility.spec.template.ts
│ │ ├── Dockerfile.template
│ │ ├── docs-e2e.workflow.template.yml
│ │ ├── link-validation.spec.template.ts
│ │ └── playwright.config.template.ts
│ ├── tools
│ │ ├── analyze-deployments.ts
│ │ ├── analyze-readme.ts
│ │ ├── analyze-repository.ts
│ │ ├── check-documentation-links.ts
│ │ ├── deploy-pages.ts
│ │ ├── detect-gaps.ts
│ │ ├── evaluate-readme-health.ts
│ │ ├── generate-config.ts
│ │ ├── generate-contextual-content.ts
│ │ ├── generate-llm-context.ts
│ │ ├── generate-readme-template.ts
│ │ ├── generate-technical-writer-prompts.ts
│ │ ├── kg-health-check.ts
│ │ ├── manage-preferences.ts
│ │ ├── manage-sitemap.ts
│ │ ├── optimize-readme.ts
│ │ ├── populate-content.ts
│ │ ├── readme-best-practices.ts
│ │ ├── recommend-ssg.ts
│ │ ├── setup-playwright-tests.ts
│ │ ├── setup-structure.ts
│ │ ├── sync-code-to-docs.ts
│ │ ├── test-local-deployment.ts
│ │ ├── track-documentation-freshness.ts
│ │ ├── update-existing-documentation.ts
│ │ ├── validate-content.ts
│ │ ├── validate-documentation-freshness.ts
│ │ ├── validate-readme-checklist.ts
│ │ └── verify-deployment.ts
│ ├── types
│ │ └── api.ts
│ ├── utils
│ │ ├── ast-analyzer.ts
│ │ ├── code-scanner.ts
│ │ ├── content-extractor.ts
│ │ ├── drift-detector.ts
│ │ ├── freshness-tracker.ts
│ │ ├── language-parsers-simple.ts
│ │ ├── permission-checker.ts
│ │ └── sitemap-generator.ts
│ └── workflows
│ └── documentation-workflow.ts
├── test-docs-local.sh
├── tests
│ ├── api
│ │ └── mcp-responses.test.ts
│ ├── benchmarks
│ │ └── performance.test.ts
│ ├── edge-cases
│ │ └── error-handling.test.ts
│ ├── functional
│ │ └── tools.test.ts
│ ├── integration
│ │ ├── kg-documentation-workflow.test.ts
│ │ ├── knowledge-graph-workflow.test.ts
│ │ ├── mcp-readme-tools.test.ts
│ │ ├── memory-mcp-tools.test.ts
│ │ ├── readme-technical-writer.test.ts
│ │ └── workflow.test.ts
│ ├── memory
│ │ ├── contextual-retrieval.test.ts
│ │ ├── enhanced-manager.test.ts
│ │ ├── export-import.test.ts
│ │ ├── freshness-kg-integration.test.ts
│ │ ├── kg-code-integration.test.ts
│ │ ├── kg-health.test.ts
│ │ ├── kg-link-validator.test.ts
│ │ ├── kg-storage-validation.test.ts
│ │ ├── kg-storage.test.ts
│ │ ├── knowledge-graph-enhanced.test.ts
│ │ ├── knowledge-graph.test.ts
│ │ ├── learning.test.ts
│ │ ├── manager-advanced.test.ts
│ │ ├── manager.test.ts
│ │ ├── mcp-resource-integration.test.ts
│ │ ├── mcp-tool-persistence.test.ts
│ │ ├── schemas.test.ts
│ │ ├── storage.test.ts
│ │ ├── temporal-analysis.test.ts
│ │ └── user-preferences.test.ts
│ ├── performance
│ │ ├── memory-load-testing.test.ts
│ │ └── memory-stress-testing.test.ts
│ ├── prompts
│ │ ├── guided-workflow-prompts.test.ts
│ │ └── technical-writer-prompts.test.ts
│ ├── server.test.ts
│ ├── setup.ts
│ ├── tools
│ │ ├── all-tools.test.ts
│ │ ├── analyze-coverage.test.ts
│ │ ├── analyze-deployments.test.ts
│ │ ├── analyze-readme.test.ts
│ │ ├── analyze-repository.test.ts
│ │ ├── check-documentation-links.test.ts
│ │ ├── deploy-pages-kg-retrieval.test.ts
│ │ ├── deploy-pages-tracking.test.ts
│ │ ├── deploy-pages.test.ts
│ │ ├── detect-gaps.test.ts
│ │ ├── evaluate-readme-health.test.ts
│ │ ├── generate-contextual-content.test.ts
│ │ ├── generate-llm-context.test.ts
│ │ ├── generate-readme-template.test.ts
│ │ ├── generate-technical-writer-prompts.test.ts
│ │ ├── kg-health-check.test.ts
│ │ ├── manage-sitemap.test.ts
│ │ ├── optimize-readme.test.ts
│ │ ├── readme-best-practices.test.ts
│ │ ├── recommend-ssg-historical.test.ts
│ │ ├── recommend-ssg-preferences.test.ts
│ │ ├── recommend-ssg.test.ts
│ │ ├── simple-coverage.test.ts
│ │ ├── sync-code-to-docs.test.ts
│ │ ├── test-local-deployment.test.ts
│ │ ├── tool-error-handling.test.ts
│ │ ├── track-documentation-freshness.test.ts
│ │ ├── validate-content.test.ts
│ │ ├── validate-documentation-freshness.test.ts
│ │ └── validate-readme-checklist.test.ts
│ ├── types
│ │ └── type-safety.test.ts
│ └── utils
│ ├── ast-analyzer.test.ts
│ ├── content-extractor.test.ts
│ ├── drift-detector.test.ts
│ ├── freshness-tracker.test.ts
│ └── sitemap-generator.test.ts
├── tsconfig.json
└── typedoc.json
```
# Files
--------------------------------------------------------------------------------
/tests/utils/drift-detector.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Drift Detector Tests (Phase 3)
3 | */
4 |
5 | import {
6 | DriftDetector,
7 | DriftDetectionResult,
8 | } from "../../src/utils/drift-detector.js";
9 | import { promises as fs } from "fs";
10 | import { tmpdir } from "os";
11 | import { join } from "path";
12 | import { mkdtemp, rm } from "fs/promises";
13 |
14 | describe("DriftDetector", () => {
15 | let detector: DriftDetector;
16 | let tempDir: string;
17 | let projectPath: string;
18 | let docsPath: string;
19 |
20 | beforeAll(async () => {
21 | tempDir = await mkdtemp(join(tmpdir(), "drift-test-"));
22 | projectPath = join(tempDir, "project");
23 | docsPath = join(tempDir, "docs");
24 |
25 | await fs.mkdir(projectPath, { recursive: true });
26 | await fs.mkdir(join(projectPath, "src"), { recursive: true });
27 | await fs.mkdir(docsPath, { recursive: true });
28 |
29 | detector = new DriftDetector(tempDir);
30 | await detector.initialize();
31 | });
32 |
33 | afterAll(async () => {
34 | await rm(tempDir, { recursive: true, force: true });
35 | });
36 |
37 | describe("Snapshot Creation", () => {
38 | test("should create snapshot of codebase and documentation", async () => {
39 | // Create sample source file
40 | const sourceCode = `
41 | export function calculateSum(a: number, b: number): number {
42 | return a + b;
43 | }
44 | `.trim();
45 |
46 | await fs.writeFile(join(projectPath, "src", "math.ts"), sourceCode);
47 |
48 | // Create sample documentation
49 | const docContent = `
50 | # Math Module
51 |
52 | ## calculateSum
53 |
54 | Adds two numbers together.
55 |
56 | \`\`\`typescript
57 | calculateSum(a: number, b: number): number
58 | \`\`\`
59 | `.trim();
60 |
61 | await fs.writeFile(join(docsPath, "math.md"), docContent);
62 |
63 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
64 |
65 | expect(snapshot).toBeDefined();
66 | expect(snapshot.projectPath).toBe(projectPath);
67 | expect(snapshot.timestamp).toBeTruthy();
68 | expect(snapshot.files.size).toBeGreaterThan(0);
69 | expect(snapshot.documentation.size).toBeGreaterThan(0);
70 | });
71 |
72 | test("should store snapshot to disk", async () => {
73 | const sourceCode = `export function test(): void {}`;
74 | await fs.writeFile(join(projectPath, "src", "test.ts"), sourceCode);
75 |
76 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
77 |
78 | // Check that snapshot directory was created
79 | const snapshotDir = join(tempDir, ".documcp", "snapshots");
80 | const files = await fs.readdir(snapshotDir);
81 |
82 | expect(files.length).toBeGreaterThan(0);
83 | expect(files.some((f) => f.startsWith("snapshot-"))).toBe(true);
84 | });
85 |
86 | test("should load latest snapshot", async () => {
87 | const sourceCode = `export function loadTest(): void {}`;
88 | await fs.writeFile(join(projectPath, "src", "load-test.ts"), sourceCode);
89 |
90 | await detector.createSnapshot(projectPath, docsPath);
91 |
92 | const loaded = await detector.loadLatestSnapshot();
93 |
94 | expect(loaded).toBeDefined();
95 | expect(loaded?.projectPath).toBe(projectPath);
96 | });
97 | });
98 |
99 | describe("Drift Detection", () => {
100 | test("should detect when function signature changes", async () => {
101 | // Create initial version
102 | const oldCode = `
103 | export function processData(data: string): void {
104 | console.log(data);
105 | }
106 | `.trim();
107 |
108 | await fs.writeFile(join(projectPath, "src", "processor.ts"), oldCode);
109 |
110 | const oldDoc = `
111 | # Processor
112 |
113 | ## processData(data: string): void
114 |
115 | Processes string data.
116 | `.trim();
117 |
118 | await fs.writeFile(join(docsPath, "processor.md"), oldDoc);
119 |
120 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
121 |
122 | // Modify function signature
123 | const newCode = `
124 | export function processData(data: string, options: object): Promise<string> {
125 | console.log(data, options);
126 | return Promise.resolve("done");
127 | }
128 | `.trim();
129 |
130 | await fs.writeFile(join(projectPath, "src", "processor.ts"), newCode);
131 |
132 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
133 |
134 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
135 |
136 | expect(drifts.length).toBeGreaterThan(0);
137 |
138 | const processorDrift = drifts.find((d) =>
139 | d.filePath.includes("processor.ts"),
140 | );
141 |
142 | expect(processorDrift).toBeDefined();
143 | expect(processorDrift?.hasDrift).toBe(true);
144 | expect(processorDrift?.drifts.length).toBeGreaterThan(0);
145 | });
146 |
147 | test("should detect when functions are removed", async () => {
148 | // Initial code with two functions
149 | const oldCode = `
150 | export function keepMe(): void {}
151 | export function removeMe(): void {}
152 | `.trim();
153 |
154 | await fs.writeFile(join(projectPath, "src", "removal.ts"), oldCode);
155 |
156 | const oldDoc = `
157 | # Functions
158 |
159 | ## keepMe
160 | ## removeMe
161 | `.trim();
162 |
163 | await fs.writeFile(join(docsPath, "removal.md"), oldDoc);
164 |
165 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
166 |
167 | // Remove one function
168 | const newCode = `
169 | export function keepMe(): void {}
170 | `.trim();
171 |
172 | await fs.writeFile(join(projectPath, "src", "removal.ts"), newCode);
173 |
174 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
175 |
176 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
177 |
178 | const removalDrift = drifts.find((d) =>
179 | d.filePath.includes("removal.ts"),
180 | );
181 |
182 | expect(removalDrift).toBeDefined();
183 | expect(
184 | removalDrift?.drifts.some((drift) => drift.type === "breaking"),
185 | ).toBe(true);
186 | });
187 |
188 | test("should detect when new functions are added", async () => {
189 | const oldCode = `
190 | export function existing(): void {}
191 | `.trim();
192 |
193 | await fs.writeFile(join(projectPath, "src", "addition.ts"), oldCode);
194 |
195 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
196 |
197 | const newCode = `
198 | export function existing(): void {}
199 | export function newFunction(): void {}
200 | `.trim();
201 |
202 | await fs.writeFile(join(projectPath, "src", "addition.ts"), newCode);
203 |
204 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
205 |
206 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
207 |
208 | const additionDrift = drifts.find((d) =>
209 | d.filePath.includes("addition.ts"),
210 | );
211 |
212 | expect(additionDrift).toBeDefined();
213 | expect(
214 | additionDrift?.drifts.some((drift) => drift.type === "missing"),
215 | ).toBe(true);
216 | });
217 |
218 | test("should classify drift severity correctly", async () => {
219 | // Breaking change
220 | const oldCode = `
221 | export function criticalFunction(param: string): void {}
222 | `.trim();
223 |
224 | await fs.writeFile(join(projectPath, "src", "severity.ts"), oldCode);
225 |
226 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
227 |
228 | // Remove exported function - breaking change
229 | const newCode = `
230 | function criticalFunction(param: string): void {}
231 | `.trim();
232 |
233 | await fs.writeFile(join(projectPath, "src", "severity.ts"), newCode);
234 |
235 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
236 |
237 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
238 |
239 | const severityDrift = drifts.find((d) =>
240 | d.filePath.includes("severity.ts"),
241 | );
242 |
243 | expect(severityDrift).toBeDefined();
244 | expect(severityDrift?.severity).toBe("critical"); // Removing export is breaking
245 | });
246 | });
247 |
248 | describe("Suggestion Generation", () => {
249 | test("should generate suggestions for outdated documentation", async () => {
250 | const oldCode = `
251 | export function calculate(x: number): number {
252 | return x * 2;
253 | }
254 | `.trim();
255 |
256 | await fs.writeFile(join(projectPath, "src", "calc.ts"), oldCode);
257 |
258 | const oldDoc = `
259 | # Calculator
260 |
261 | ## calculate(x: number): number
262 |
263 | Doubles the input.
264 | `.trim();
265 |
266 | await fs.writeFile(join(docsPath, "calc.md"), oldDoc);
267 |
268 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
269 |
270 | // Change function signature
271 | const newCode = `
272 | export function calculate(x: number, y: number): number {
273 | return x * y;
274 | }
275 | `.trim();
276 |
277 | await fs.writeFile(join(projectPath, "src", "calc.ts"), newCode);
278 |
279 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
280 |
281 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
282 |
283 | const calcDrift = drifts.find((d) => d.filePath.includes("calc.ts"));
284 |
285 | expect(calcDrift).toBeDefined();
286 | expect(calcDrift?.suggestions.length).toBeGreaterThan(0);
287 |
288 | const suggestion = calcDrift?.suggestions[0];
289 | expect(suggestion).toBeDefined();
290 | expect(suggestion?.suggestedContent).toBeTruthy();
291 | expect(suggestion?.confidence).toBeGreaterThan(0);
292 | });
293 |
294 | test("should provide auto-applicable flag for safe changes", async () => {
295 | const oldCode = `
296 | export function simpleChange(a: number): number {
297 | return a;
298 | }
299 | `.trim();
300 |
301 | await fs.writeFile(join(projectPath, "src", "simple.ts"), oldCode);
302 |
303 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
304 |
305 | // Minor change
306 | const newCode = `
307 | export function simpleChange(a: number): number {
308 | return a * 2;
309 | }
310 | `.trim();
311 |
312 | await fs.writeFile(join(projectPath, "src", "simple.ts"), newCode);
313 |
314 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
315 |
316 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
317 |
318 | // Minor internal changes shouldn't require doc updates if signature is same
319 | const simpleDrift = drifts.find((d) => d.filePath.includes("simple.ts"));
320 |
321 | if (simpleDrift && simpleDrift.suggestions.length > 0) {
322 | const suggestion = simpleDrift.suggestions[0];
323 | expect(typeof suggestion.autoApplicable).toBe("boolean");
324 | }
325 | });
326 | });
327 |
328 | describe("Impact Analysis", () => {
329 | test("should analyze impact of changes", async () => {
330 | const oldCode = `
331 | export function breaking(): void {}
332 | export function major(): void {}
333 | `.trim();
334 |
335 | await fs.writeFile(join(projectPath, "src", "impact.ts"), oldCode);
336 |
337 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
338 |
339 | // Breaking change - remove function
340 | const newCode = `
341 | export function major(): void {}
342 | `.trim();
343 |
344 | await fs.writeFile(join(projectPath, "src", "impact.ts"), newCode);
345 |
346 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
347 |
348 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
349 |
350 | const impactDrift = drifts.find((d) => d.filePath.includes("impact.ts"));
351 |
352 | expect(impactDrift?.impactAnalysis).toBeDefined();
353 | expect(impactDrift?.impactAnalysis.breakingChanges).toBeGreaterThan(0);
354 | expect(impactDrift?.impactAnalysis.estimatedUpdateEffort).toBeDefined();
355 | });
356 |
357 | test("should identify affected documentation files", async () => {
358 | const code = `
359 | export function documented(): void {}
360 | `.trim();
361 |
362 | await fs.writeFile(join(projectPath, "src", "documented.ts"), code);
363 |
364 | const doc = `
365 | # Documentation
366 |
367 | \`documented()\` is a function.
368 | `.trim();
369 |
370 | await fs.writeFile(join(docsPath, "documented.md"), doc);
371 |
372 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
373 |
374 | // Change the function
375 | const newCode = `
376 | export function documented(param: string): void {}
377 | `.trim();
378 |
379 | await fs.writeFile(join(projectPath, "src", "documented.ts"), newCode);
380 |
381 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
382 |
383 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
384 |
385 | const docDrift = drifts.find((d) => d.filePath.includes("documented.ts"));
386 |
387 | expect(docDrift?.impactAnalysis.affectedDocFiles.length).toBeGreaterThan(
388 | 0,
389 | );
390 | });
391 | });
392 |
393 | describe("Edge Cases", () => {
394 | test("should handle no drift scenario", async () => {
395 | const code = `
396 | export function unchangedFunction(): void {}
397 | `.trim();
398 |
399 | await fs.writeFile(join(projectPath, "src", "unchanged.ts"), code);
400 |
401 | const snapshot1 = await detector.createSnapshot(projectPath, docsPath);
402 | const snapshot2 = await detector.createSnapshot(projectPath, docsPath);
403 |
404 | const drifts = await detector.detectDrift(snapshot1, snapshot2);
405 |
406 | // No changes should mean no drifts
407 | const unchangedDrift = drifts.find((d) =>
408 | d.filePath.includes("unchanged.ts"),
409 | );
410 |
411 | if (unchangedDrift) {
412 | expect(unchangedDrift.hasDrift).toBe(false);
413 | }
414 | });
415 |
416 | test("should handle missing documentation gracefully", async () => {
417 | const code = `
418 | export function undocumentedFunction(): void {}
419 | `.trim();
420 |
421 | await fs.writeFile(join(projectPath, "src", "undocumented.ts"), code);
422 |
423 | // Don't create documentation
424 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
425 |
426 | expect(snapshot).toBeDefined();
427 | expect(snapshot.documentation.size).toBeGreaterThanOrEqual(0);
428 | });
429 |
430 | test("should handle new files correctly", async () => {
431 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
432 |
433 | // Add new file
434 | const newCode = `
435 | export function brandNew(): void {}
436 | `.trim();
437 |
438 | await fs.writeFile(join(projectPath, "src", "brand-new.ts"), newCode);
439 |
440 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
441 |
442 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
443 |
444 | // New files might not show as drift if they have no corresponding docs
445 | expect(Array.isArray(drifts)).toBe(true);
446 | });
447 | });
448 |
449 | describe("Documentation Section Extraction", () => {
450 | test("should extract documentation sections", async () => {
451 | const doc = `
452 | # Main Title
453 |
454 | This is the introduction.
455 |
456 | ## Section 1
457 |
458 | Content for section 1.
459 |
460 | \`\`\`typescript
461 | function example(): void {}
462 | \`\`\`
463 |
464 | ## Section 2
465 |
466 | Content for section 2.
467 | `.trim();
468 |
469 | await fs.writeFile(join(docsPath, "sections.md"), doc);
470 |
471 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
472 |
473 | const docSnapshot = snapshot.documentation.get(
474 | join(docsPath, "sections.md"),
475 | );
476 |
477 | expect(docSnapshot).toBeDefined();
478 | expect(docSnapshot?.sections.length).toBeGreaterThan(0);
479 |
480 | const section1 = docSnapshot?.sections.find(
481 | (s) => s.title === "Section 1",
482 | );
483 | expect(section1).toBeDefined();
484 | expect(section1?.codeExamples.length).toBeGreaterThan(0);
485 | });
486 |
487 | test("should extract code references from documentation", async () => {
488 | const doc = `
489 | # API Reference
490 |
491 | See \`calculateSum()\` for details.
492 |
493 | The function is in \`src/math.ts\`.
494 |
495 | Check out the \`MathUtils\` class.
496 | `.trim();
497 |
498 | await fs.writeFile(join(docsPath, "references.md"), doc);
499 |
500 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
501 |
502 | const docSnapshot = snapshot.documentation.get(
503 | join(docsPath, "references.md"),
504 | );
505 |
506 | expect(docSnapshot).toBeDefined();
507 |
508 | const section = docSnapshot?.sections[0];
509 | expect(section?.referencedFunctions.length).toBeGreaterThan(0);
510 | });
511 | });
512 |
513 | describe("Suggestion Generation Helper Methods", () => {
514 | test("should generate removal suggestion with deprecation notice", async () => {
515 | const oldCode = `
516 | export function deprecatedFunc(x: number): number {
517 | return x;
518 | }
519 | `.trim();
520 |
521 | await fs.writeFile(join(projectPath, "src", "deprecated.ts"), oldCode);
522 |
523 | const oldDoc = `
524 | # API
525 |
526 | ## deprecatedFunc(x: number): number
527 |
528 | This function does something.
529 | `.trim();
530 |
531 | await fs.writeFile(join(docsPath, "deprecated.md"), oldDoc);
532 |
533 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
534 |
535 | // Remove the function
536 | const newCode = `// Function removed`;
537 |
538 | await fs.writeFile(join(projectPath, "src", "deprecated.ts"), newCode);
539 |
540 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
541 |
542 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
543 | const deprecatedDrift = drifts.find((d) =>
544 | d.filePath.includes("deprecated.ts"),
545 | );
546 |
547 | expect(deprecatedDrift).toBeDefined();
548 | expect(deprecatedDrift?.suggestions.length).toBeGreaterThan(0);
549 |
550 | const suggestion = deprecatedDrift?.suggestions[0];
551 | expect(suggestion?.suggestedContent).toContain("removed");
552 | expect(suggestion?.suggestedContent).toContain("Note");
553 | });
554 |
555 | test("should generate addition suggestion with code signature", async () => {
556 | const oldCode = `export function existing(): void {}`;
557 |
558 | await fs.writeFile(join(projectPath, "src", "additions.ts"), oldCode);
559 |
560 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
561 |
562 | // Add new function
563 | const newCode = `
564 | export function existing(): void {}
565 | export function newAddedFunc(a: number, b: string): boolean {
566 | return true;
567 | }
568 | `.trim();
569 |
570 | await fs.writeFile(join(projectPath, "src", "additions.ts"), newCode);
571 |
572 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
573 |
574 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
575 | const additionDrift = drifts.find((d) =>
576 | d.filePath.includes("additions.ts"),
577 | );
578 |
579 | expect(additionDrift?.drifts.some((d) => d.type === "missing")).toBe(
580 | true,
581 | );
582 | });
583 |
584 | test("should generate modification suggestion with signature update", async () => {
585 | const oldCode = `
586 | export function modifyMe(x: number): number {
587 | return x;
588 | }
589 | `.trim();
590 |
591 | await fs.writeFile(join(projectPath, "src", "modify.ts"), oldCode);
592 |
593 | const oldDoc = `
594 | # API
595 |
596 | ## modifyMe(x: number): number
597 |
598 | Returns the input number.
599 | `.trim();
600 |
601 | await fs.writeFile(join(docsPath, "modify.md"), oldDoc);
602 |
603 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
604 |
605 | // Modify the function signature
606 | const newCode = `
607 | export function modifyMe(x: number, y: number): number {
608 | return x + y;
609 | }
610 | `.trim();
611 |
612 | await fs.writeFile(join(projectPath, "src", "modify.ts"), newCode);
613 |
614 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
615 |
616 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
617 | const modifyDrift = drifts.find((d) => d.filePath.includes("modify.ts"));
618 |
619 | expect(modifyDrift).toBeDefined();
620 | expect(modifyDrift?.suggestions.length).toBeGreaterThan(0);
621 |
622 | const suggestion = modifyDrift?.suggestions[0];
623 | expect(suggestion?.suggestedContent).toBeTruthy();
624 | });
625 |
626 | test("should set auto-applicable flag correctly for safe changes", async () => {
627 | const oldCode = `
628 | export function safeChange(x: number): number {
629 | return x;
630 | }
631 | `.trim();
632 |
633 | await fs.writeFile(join(projectPath, "src", "safe.ts"), oldCode);
634 |
635 | const oldDoc = `
636 | # API
637 |
638 | ## safeChange(x: number): number
639 | `.trim();
640 |
641 | await fs.writeFile(join(docsPath, "safe.md"), oldDoc);
642 |
643 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
644 |
645 | // Internal implementation change (patch level)
646 | const newCode = `
647 | export function safeChange(x: number): number {
648 | return x * 2; // Changed implementation but not signature
649 | }
650 | `.trim();
651 |
652 | await fs.writeFile(join(projectPath, "src", "safe.ts"), newCode);
653 |
654 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
655 |
656 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
657 |
658 | // If there are any drifts, check their suggestions
659 | if (drifts.length > 0) {
660 | const safeDrift = drifts.find((d) => d.filePath.includes("safe.ts"));
661 | if (safeDrift && safeDrift.suggestions.length > 0) {
662 | const suggestion = safeDrift.suggestions[0];
663 | expect(typeof suggestion.autoApplicable).toBe("boolean");
664 | }
665 | }
666 | });
667 | });
668 |
669 | describe("Comparison Helper Methods", () => {
670 | test("should correctly identify affected sections by function name", async () => {
671 | const code = `
672 | export function targetFunc(): void {}
673 | export function otherFunc(): void {}
674 | `.trim();
675 |
676 | await fs.writeFile(join(projectPath, "src", "target.ts"), code);
677 |
678 | const doc = `
679 | # API
680 |
681 | See \`targetFunc()\` for details.
682 |
683 | ## targetFunc
684 |
685 | This documents the target function.
686 |
687 | ## otherFunc
688 |
689 | This documents another function.
690 | `.trim();
691 |
692 | await fs.writeFile(join(docsPath, "target.md"), doc);
693 |
694 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
695 |
696 | // Modify only targetFunc
697 | const newCode = `
698 | export function targetFunc(param: string): void {}
699 | export function otherFunc(): void {}
700 | `.trim();
701 |
702 | await fs.writeFile(join(projectPath, "src", "target.ts"), newCode);
703 |
704 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
705 |
706 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
707 | const targetDrift = drifts.find((d) => d.filePath.includes("target.ts"));
708 |
709 | expect(targetDrift).toBeDefined();
710 | // Drift was detected, and impact analysis was performed
711 | expect(targetDrift?.impactAnalysis).toBeDefined();
712 | expect(
713 | targetDrift?.impactAnalysis.affectedDocFiles.length,
714 | ).toBeGreaterThanOrEqual(0);
715 | });
716 |
717 | test("should correctly classify drift types", async () => {
718 | const oldCode = `
719 | export function removedFunc(): void {}
720 | export function modifiedFunc(): void {}
721 | `.trim();
722 |
723 | await fs.writeFile(join(projectPath, "src", "classify.ts"), oldCode);
724 |
725 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
726 |
727 | // Remove one function, keep the other unchanged
728 | const newCode = `
729 | export function modifiedFunc(): void {}
730 | `.trim();
731 |
732 | await fs.writeFile(join(projectPath, "src", "classify.ts"), newCode);
733 |
734 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
735 |
736 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
737 | const classifyDrift = drifts.find((d) =>
738 | d.filePath.includes("classify.ts"),
739 | );
740 |
741 | expect(classifyDrift).toBeDefined();
742 | expect(classifyDrift?.drifts.length).toBeGreaterThan(0);
743 |
744 | // Verify drift types are correctly classified
745 | const driftTypes = classifyDrift?.drifts.map((d) => d.type) || [];
746 | expect(driftTypes.length).toBeGreaterThan(0);
747 |
748 | // Should have breaking or incorrect drift for removed function
749 | const hasRemovalDrift = classifyDrift?.drifts.some(
750 | (d) => d.type === "breaking" || d.type === "incorrect",
751 | );
752 | expect(hasRemovalDrift).toBe(true);
753 | });
754 |
755 | test("should map impact levels to severity correctly", async () => {
756 | const oldCode = `
757 | export function critical(): void {}
758 | export function major(): void {}
759 | export function minor(): void {}
760 | `.trim();
761 |
762 | await fs.writeFile(join(projectPath, "src", "severity-map.ts"), oldCode);
763 |
764 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
765 |
766 | // Breaking change
767 | const newCode = `
768 | export function major(): void {}
769 | export function minor(): void {}
770 | `.trim();
771 |
772 | await fs.writeFile(join(projectPath, "src", "severity-map.ts"), newCode);
773 |
774 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
775 |
776 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
777 | const severityDrift = drifts.find((d) =>
778 | d.filePath.includes("severity-map.ts"),
779 | );
780 |
781 | expect(severityDrift).toBeDefined();
782 | expect(severityDrift?.severity).toBe("critical");
783 | });
784 |
785 | test("should estimate update effort based on drift count", async () => {
786 | const oldCode = `
787 | export function func1(): void {}
788 | export function func2(): void {}
789 | export function func3(): void {}
790 | export function func4(): void {}
791 | export function func5(): void {}
792 | `.trim();
793 |
794 | await fs.writeFile(join(projectPath, "src", "effort.ts"), oldCode);
795 |
796 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
797 |
798 | // Remove multiple functions - high effort
799 | const newCode = `
800 | export function func5(): void {}
801 | `.trim();
802 |
803 | await fs.writeFile(join(projectPath, "src", "effort.ts"), newCode);
804 |
805 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
806 |
807 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
808 | const effortDrift = drifts.find((d) => d.filePath.includes("effort.ts"));
809 |
810 | expect(effortDrift).toBeDefined();
811 | expect(effortDrift?.impactAnalysis.estimatedUpdateEffort).toBeDefined();
812 | expect(
813 | ["low", "medium", "high"].includes(
814 | effortDrift!.impactAnalysis.estimatedUpdateEffort,
815 | ),
816 | ).toBe(true);
817 | });
818 |
819 | test("should calculate overall severity from multiple drifts", async () => {
820 | const oldCode = `
821 | export function criticalChange(): void {}
822 | export function minorChange(): void {}
823 | `.trim();
824 |
825 | await fs.writeFile(
826 | join(projectPath, "src", "overall-severity.ts"),
827 | oldCode,
828 | );
829 |
830 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
831 |
832 | // Breaking change dominates
833 | const newCode = `
834 | export function minorChange(x: number): void {}
835 | `.trim();
836 |
837 | await fs.writeFile(
838 | join(projectPath, "src", "overall-severity.ts"),
839 | newCode,
840 | );
841 |
842 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
843 |
844 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
845 | const overallDrift = drifts.find((d) =>
846 | d.filePath.includes("overall-severity.ts"),
847 | );
848 |
849 | expect(overallDrift).toBeDefined();
850 | expect(
851 | ["none", "low", "medium", "high", "critical"].includes(
852 | overallDrift!.severity,
853 | ),
854 | ).toBe(true);
855 | });
856 |
857 | test("should handle multiple documentation files referencing same code", async () => {
858 | const code = `
859 | export function sharedFunc(): void {}
860 | `.trim();
861 |
862 | await fs.writeFile(join(projectPath, "src", "shared.ts"), code);
863 |
864 | const doc1 = `
865 | # Guide 1
866 |
867 | See \`sharedFunc()\` for details.
868 | `.trim();
869 |
870 | const doc2 = `
871 | # Guide 2
872 |
873 | Also uses \`sharedFunc()\`.
874 | `.trim();
875 |
876 | await fs.writeFile(join(docsPath, "guide1.md"), doc1);
877 | await fs.writeFile(join(docsPath, "guide2.md"), doc2);
878 |
879 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
880 |
881 | // Change the shared function
882 | const newCode = `
883 | export function sharedFunc(param: string): void {}
884 | `.trim();
885 |
886 | await fs.writeFile(join(projectPath, "src", "shared.ts"), newCode);
887 |
888 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
889 |
890 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
891 | const sharedDrift = drifts.find((d) => d.filePath.includes("shared.ts"));
892 |
893 | expect(sharedDrift).toBeDefined();
894 | // Should affect both documentation files
895 | expect(
896 | sharedDrift?.impactAnalysis.affectedDocFiles.length,
897 | ).toBeGreaterThanOrEqual(1);
898 | });
899 | });
900 |
901 | describe("Advanced Suggestion Generation", () => {
902 | test("should generate suggestions for added functions with signatures", async () => {
903 | const oldCode = `export function existing(): void {}`;
904 |
905 | await fs.writeFile(
906 | join(projectPath, "src", "added-with-sig.ts"),
907 | oldCode,
908 | );
909 |
910 | const oldDoc = `
911 | # API
912 |
913 | ## existing
914 |
915 | Existing function documentation.
916 | `.trim();
917 |
918 | await fs.writeFile(join(docsPath, "added-with-sig.md"), oldDoc);
919 |
920 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
921 |
922 | // Add new function with signature
923 | const newCode = `
924 | export function existing(): void {}
925 | export async function newFunction(param: string, count: number): Promise<boolean> {
926 | return true;
927 | }
928 | `.trim();
929 |
930 | await fs.writeFile(
931 | join(projectPath, "src", "added-with-sig.ts"),
932 | newCode,
933 | );
934 |
935 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
936 |
937 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
938 | const addedDrift = drifts.find((d) =>
939 | d.filePath.includes("added-with-sig.ts"),
940 | );
941 |
942 | expect(addedDrift).toBeDefined();
943 | expect(addedDrift?.drifts.some((d) => d.type === "missing")).toBe(true);
944 |
945 | // Should detect the added function
946 | const hasAddedFunction = addedDrift?.drifts.some((d) =>
947 | d.codeChanges.some((c) => c.name === "newFunction"),
948 | );
949 | expect(hasAddedFunction).toBe(true);
950 | });
951 |
952 | test("should handle class changes in suggestions", async () => {
953 | const oldCode = `
954 | export class OldClass {
955 | method(): void {}
956 | }
957 | `.trim();
958 |
959 | await fs.writeFile(join(projectPath, "src", "class-change.ts"), oldCode);
960 |
961 | const oldDoc = `
962 | # Classes
963 |
964 | ## OldClass
965 |
966 | Documentation for OldClass.
967 | `.trim();
968 |
969 | await fs.writeFile(join(docsPath, "class-change.md"), oldDoc);
970 |
971 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
972 |
973 | // Modify class
974 | const newCode = `
975 | export class OldClass {
976 | method(): void {}
977 | newMethod(): void {}
978 | }
979 | `.trim();
980 |
981 | await fs.writeFile(join(projectPath, "src", "class-change.ts"), newCode);
982 |
983 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
984 |
985 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
986 |
987 | expect(drifts.length).toBeGreaterThanOrEqual(0);
988 | });
989 |
990 | test("should handle interface changes in suggestions", async () => {
991 | const oldCode = `
992 | export interface UserInterface {
993 | id: string;
994 | }
995 | `.trim();
996 |
997 | await fs.writeFile(
998 | join(projectPath, "src", "interface-change.ts"),
999 | oldCode,
1000 | );
1001 |
1002 | const oldDoc = `
1003 | # Interfaces
1004 |
1005 | ## UserInterface
1006 |
1007 | The UserInterface interface.
1008 | `.trim();
1009 |
1010 | await fs.writeFile(join(docsPath, "interface-change.md"), oldDoc);
1011 |
1012 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1013 |
1014 | // Modify interface
1015 | const newCode = `
1016 | export interface UserInterface {
1017 | id: string;
1018 | name: string;
1019 | }
1020 | `.trim();
1021 |
1022 | await fs.writeFile(
1023 | join(projectPath, "src", "interface-change.ts"),
1024 | newCode,
1025 | );
1026 |
1027 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1028 |
1029 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1030 |
1031 | expect(drifts.length).toBeGreaterThanOrEqual(0);
1032 | });
1033 |
1034 | test("should handle type alias changes in suggestions", async () => {
1035 | const oldCode = `
1036 | export type Status = "active" | "inactive";
1037 | `.trim();
1038 |
1039 | await fs.writeFile(join(projectPath, "src", "type-change.ts"), oldCode);
1040 |
1041 | const oldDoc = `
1042 | # Types
1043 |
1044 | ## Status
1045 |
1046 | The Status type.
1047 | `.trim();
1048 |
1049 | await fs.writeFile(join(docsPath, "type-change.md"), oldDoc);
1050 |
1051 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1052 |
1053 | // Modify type
1054 | const newCode = `
1055 | export type Status = "active" | "inactive" | "pending";
1056 | `.trim();
1057 |
1058 | await fs.writeFile(join(projectPath, "src", "type-change.ts"), newCode);
1059 |
1060 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1061 |
1062 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1063 |
1064 | expect(drifts.length).toBeGreaterThanOrEqual(0);
1065 | });
1066 |
1067 | test("should detect documentation referencing classes", async () => {
1068 | const code = `
1069 | export class DocumentedClass {
1070 | public property: string;
1071 | constructor(prop: string) {
1072 | this.property = prop;
1073 | }
1074 | }
1075 | `.trim();
1076 |
1077 | await fs.writeFile(join(projectPath, "src", "doc-class.ts"), code);
1078 |
1079 | const doc = `
1080 | # Classes
1081 |
1082 | See the \`DocumentedClass\` for details.
1083 |
1084 | ## DocumentedClass
1085 |
1086 | This class does something important.
1087 | `.trim();
1088 |
1089 | await fs.writeFile(join(docsPath, "doc-class.md"), doc);
1090 |
1091 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1092 |
1093 | // Modify class
1094 | const newCode = `
1095 | export class DocumentedClass {
1096 | public property: string;
1097 | public newProperty: number;
1098 | constructor(prop: string, num: number) {
1099 | this.property = prop;
1100 | this.newProperty = num;
1101 | }
1102 | }
1103 | `.trim();
1104 |
1105 | await fs.writeFile(join(projectPath, "src", "doc-class.ts"), newCode);
1106 |
1107 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1108 |
1109 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1110 | const classDrift = drifts.find((d) =>
1111 | d.filePath.includes("doc-class.ts"),
1112 | );
1113 |
1114 | // Check that affected docs were identified
1115 | if (classDrift && classDrift.hasDrift) {
1116 | expect(classDrift.impactAnalysis).toBeDefined();
1117 | }
1118 | });
1119 |
1120 | test("should detect documentation referencing types", async () => {
1121 | const code = `
1122 | export type ConfigType = {
1123 | apiKey: string;
1124 | timeout: number;
1125 | };
1126 | `.trim();
1127 |
1128 | await fs.writeFile(join(projectPath, "src", "doc-type.ts"), code);
1129 |
1130 | const doc = `
1131 | # Configuration
1132 |
1133 | The \`ConfigType\` defines configuration options.
1134 | `.trim();
1135 |
1136 | await fs.writeFile(join(docsPath, "doc-type.md"), doc);
1137 |
1138 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1139 |
1140 | // Modify type
1141 | const newCode = `
1142 | export type ConfigType = {
1143 | apiKey: string;
1144 | timeout: number;
1145 | retries: number;
1146 | };
1147 | `.trim();
1148 |
1149 | await fs.writeFile(join(projectPath, "src", "doc-type.ts"), newCode);
1150 |
1151 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1152 |
1153 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1154 | const typeDrift = drifts.find((d) => d.filePath.includes("doc-type.ts"));
1155 |
1156 | if (typeDrift && typeDrift.hasDrift) {
1157 | expect(typeDrift.impactAnalysis).toBeDefined();
1158 | }
1159 | });
1160 | });
1161 | });
1162 |
```
--------------------------------------------------------------------------------
/tests/tools/evaluate-readme-health.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { evaluateReadmeHealth } from "../../src/tools/evaluate-readme-health.js";
2 | import { writeFile, mkdir, rm } from "fs/promises";
3 | import { join } from "path";
4 |
5 | describe("evaluateReadmeHealth", () => {
6 | const testDir = join(process.cwd(), "test-readme-temp");
7 |
8 | beforeEach(async () => {
9 | // Create test directory
10 | await mkdir(testDir, { recursive: true });
11 | });
12 |
13 | afterEach(async () => {
14 | // Clean up test directory
15 | try {
16 | await rm(testDir, { recursive: true, force: true });
17 | } catch (error) {
18 | // Ignore cleanup errors
19 | }
20 | });
21 |
22 | describe("Basic Functionality", () => {
23 | test("should evaluate README health with default parameters", async () => {
24 | const readmePath = join(testDir, "README.md");
25 | await writeFile(
26 | readmePath,
27 | `# Test Project
28 |
29 | ## Description
30 | This is a test project for evaluating README health.
31 |
32 | ## Installation
33 | \`\`\`bash
34 | npm install test-project
35 | \`\`\`
36 |
37 | ## Usage
38 | \`\`\`javascript
39 | const test = require('test-project');
40 | \`\`\`
41 |
42 | ## Contributing
43 | Please read CONTRIBUTING.md for details.
44 |
45 | ## License
46 | MIT License
47 | `,
48 | );
49 |
50 | const result = await evaluateReadmeHealth({
51 | readme_path: readmePath,
52 | });
53 |
54 | expect(result.content).toBeDefined();
55 | expect(result.content.length).toBeGreaterThan(0);
56 | expect(result.isError).toBe(false);
57 |
58 | // Check that it contains health report data
59 | const healthData = result.content.find((c) =>
60 | c.text.includes("healthReport"),
61 | );
62 | expect(healthData).toBeDefined();
63 | });
64 |
65 | test("should handle different project types", async () => {
66 | const readmePath = join(testDir, "README.md");
67 | await writeFile(
68 | readmePath,
69 | "# Enterprise Tool\n\nA professional enterprise tool.",
70 | );
71 |
72 | const result = await evaluateReadmeHealth({
73 | readme_path: readmePath,
74 | project_type: "enterprise_tool",
75 | });
76 |
77 | expect(result.content).toBeDefined();
78 | expect(result.isError).toBe(false);
79 | });
80 |
81 | test("should include repository context when provided", async () => {
82 | const readmePath = join(testDir, "README.md");
83 | await writeFile(readmePath, "# Project with Repo Context");
84 |
85 | // Create a simple repository structure
86 | await writeFile(join(testDir, "package.json"), '{"name": "test"}');
87 |
88 | const result = await evaluateReadmeHealth({
89 | readme_path: readmePath,
90 | repository_path: testDir,
91 | });
92 |
93 | expect(result.content).toBeDefined();
94 | expect(result.isError).toBe(false);
95 | });
96 | });
97 |
98 | describe("Error Handling", () => {
99 | test("should handle missing README file", async () => {
100 | const result = await evaluateReadmeHealth({
101 | readme_path: join(testDir, "nonexistent.md"),
102 | });
103 |
104 | expect(result.isError).toBe(true);
105 | expect(result.content[0].text).toContain(
106 | "Failed to evaluate README health",
107 | );
108 | });
109 |
110 | test("should handle invalid project type", async () => {
111 | const readmePath = join(testDir, "README.md");
112 | await writeFile(readmePath, "# Test");
113 |
114 | const result = await evaluateReadmeHealth({
115 | readme_path: readmePath,
116 | project_type: "invalid_type" as any,
117 | });
118 |
119 | expect(result.isError).toBe(true);
120 | });
121 | });
122 |
123 | describe("Health Report Structure", () => {
124 | test("should include all required health components", async () => {
125 | const readmePath = join(testDir, "README.md");
126 | await writeFile(
127 | readmePath,
128 | `# Complete Project
129 |
130 | ## Table of Contents
131 | - [Installation](#installation)
132 | - [Usage](#usage)
133 |
134 | ## Description
135 | Comprehensive project description here.
136 |
137 | ## Installation
138 | Installation instructions.
139 |
140 | ## Usage
141 | Usage examples.
142 |
143 | ## Contributing
144 | How to contribute.
145 |
146 | ## License
147 | MIT
148 | `,
149 | );
150 |
151 | const result = await evaluateReadmeHealth({
152 | readme_path: readmePath,
153 | });
154 |
155 | const dataContent = result.content.find((c) =>
156 | c.text.includes("healthReport"),
157 | );
158 | expect(dataContent).toBeDefined();
159 |
160 | const data = JSON.parse(dataContent!.text);
161 | expect(data.healthReport).toBeDefined();
162 | expect(data.healthReport.components).toBeDefined();
163 | expect(data.healthReport.components.communityHealth).toBeDefined();
164 | expect(data.healthReport.components.accessibility).toBeDefined();
165 | expect(data.healthReport.components.onboarding).toBeDefined();
166 | expect(data.healthReport.components.contentQuality).toBeDefined();
167 | });
168 |
169 | test("should provide grade and score", async () => {
170 | const readmePath = join(testDir, "README.md");
171 | await writeFile(readmePath, "# Basic Project\n\nMinimal content.");
172 |
173 | const result = await evaluateReadmeHealth({
174 | readme_path: readmePath,
175 | });
176 |
177 | const dataContent = result.content.find((c) =>
178 | c.text.includes("healthReport"),
179 | );
180 | const data = JSON.parse(dataContent!.text);
181 |
182 | expect(data.healthReport.overallScore).toBeGreaterThanOrEqual(0);
183 | expect(data.healthReport.overallScore).toBeLessThanOrEqual(100);
184 | expect(["A", "B", "C", "D", "F"]).toContain(data.healthReport.grade);
185 | });
186 |
187 | test("should include recommendations and next steps", async () => {
188 | const readmePath = join(testDir, "README.md");
189 | await writeFile(readmePath, "# Incomplete Project");
190 |
191 | const result = await evaluateReadmeHealth({
192 | readme_path: readmePath,
193 | });
194 |
195 | const dataContent = result.content.find((c) =>
196 | c.text.includes("recommendations"),
197 | );
198 | expect(dataContent).toBeDefined();
199 |
200 | const data = JSON.parse(dataContent!.text);
201 | expect(data.healthReport.recommendations).toBeDefined();
202 | expect(Array.isArray(data.healthReport.recommendations)).toBe(true);
203 | expect(data.nextSteps).toBeDefined();
204 | expect(Array.isArray(data.nextSteps)).toBe(true);
205 | });
206 | });
207 |
208 | describe("Response Format", () => {
209 | test("should return properly formatted MCP response", async () => {
210 | const readmePath = join(testDir, "README.md");
211 | await writeFile(readmePath, "# Test Project");
212 |
213 | const result = await evaluateReadmeHealth({
214 | readme_path: readmePath,
215 | });
216 |
217 | expect(result.content).toBeDefined();
218 | expect(Array.isArray(result.content)).toBe(true);
219 | expect(result.content.length).toBeGreaterThan(0);
220 |
221 | // Should include execution metadata
222 | const metadataContent = result.content.find((c) =>
223 | c.text.includes("Execution completed"),
224 | );
225 | expect(metadataContent).toBeDefined();
226 | });
227 | });
228 |
229 | describe("Repository Context Analysis", () => {
230 | test("should analyze repository context when path is provided", async () => {
231 | const readmePath = join(testDir, "README.md");
232 | await writeFile(readmePath, "# Project with Context");
233 |
234 | // Create repository files
235 | await writeFile(join(testDir, "CODE_OF_CONDUCT.md"), "# Code of Conduct");
236 | await writeFile(join(testDir, "CONTRIBUTING.md"), "# Contributing");
237 | await writeFile(join(testDir, "SECURITY.md"), "# Security Policy");
238 | await mkdir(join(testDir, ".github"), { recursive: true });
239 | await writeFile(join(testDir, "package.json"), '{"name": "test"}');
240 |
241 | const result = await evaluateReadmeHealth({
242 | readme_path: readmePath,
243 | repository_path: testDir,
244 | });
245 |
246 | expect(result.isError).toBe(false);
247 | const dataContent = result.content.find((c) =>
248 | c.text.includes("healthReport"),
249 | );
250 | expect(dataContent).toBeDefined();
251 | });
252 |
253 | test("should handle repository context analysis errors gracefully", async () => {
254 | const readmePath = join(testDir, "README.md");
255 | await writeFile(readmePath, "# Project");
256 |
257 | const result = await evaluateReadmeHealth({
258 | readme_path: readmePath,
259 | repository_path: "/nonexistent/path",
260 | });
261 |
262 | expect(result.isError).toBe(false); // Should not fail, just return null context
263 | });
264 | });
265 |
266 | describe("Community Health Evaluation", () => {
267 | test("should detect code of conduct references", async () => {
268 | const readmePath = join(testDir, "README.md");
269 | await writeFile(
270 | readmePath,
271 | `# Project
272 |
273 | Please read our [Code of Conduct](CODE_OF_CONDUCT.md) before contributing.
274 | `,
275 | );
276 |
277 | const result = await evaluateReadmeHealth({
278 | readme_path: readmePath,
279 | });
280 |
281 | const dataContent = result.content.find((c) =>
282 | c.text.includes("healthReport"),
283 | );
284 | const data = JSON.parse(dataContent!.text);
285 |
286 | const conductCheck =
287 | data.healthReport.components.communityHealth.details.find(
288 | (d: any) => d.check === "Code of Conduct linked",
289 | );
290 | expect(conductCheck.passed).toBe(true);
291 | expect(conductCheck.points).toBe(5);
292 | });
293 |
294 | test("should detect contributing guidelines", async () => {
295 | const readmePath = join(testDir, "README.md");
296 | await writeFile(
297 | readmePath,
298 | `# Project
299 |
300 | See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines.
301 | `,
302 | );
303 |
304 | const result = await evaluateReadmeHealth({
305 | readme_path: readmePath,
306 | });
307 |
308 | const dataContent = result.content.find((c) =>
309 | c.text.includes("healthReport"),
310 | );
311 | const data = JSON.parse(dataContent!.text);
312 |
313 | const contributingCheck =
314 | data.healthReport.components.communityHealth.details.find(
315 | (d: any) => d.check === "Contributing guidelines visible",
316 | );
317 | expect(contributingCheck.passed).toBe(true);
318 | });
319 |
320 | test("should detect security policy references", async () => {
321 | const readmePath = join(testDir, "README.md");
322 | await writeFile(
323 | readmePath,
324 | `# Project
325 |
326 | Report security issues via our [Security Policy](SECURITY.md).
327 | `,
328 | );
329 |
330 | const result = await evaluateReadmeHealth({
331 | readme_path: readmePath,
332 | });
333 |
334 | const dataContent = result.content.find((c) =>
335 | c.text.includes("healthReport"),
336 | );
337 | const data = JSON.parse(dataContent!.text);
338 |
339 | const securityCheck =
340 | data.healthReport.components.communityHealth.details.find(
341 | (d: any) => d.check === "Security policy linked",
342 | );
343 | expect(securityCheck.passed).toBe(true);
344 | });
345 |
346 | test("should detect support channels", async () => {
347 | const readmePath = join(testDir, "README.md");
348 | await writeFile(
349 | readmePath,
350 | `# Project
351 |
352 | Join our Discord community for support and discussions.
353 | `,
354 | );
355 |
356 | const result = await evaluateReadmeHealth({
357 | readme_path: readmePath,
358 | });
359 |
360 | const dataContent = result.content.find((c) =>
361 | c.text.includes("healthReport"),
362 | );
363 | const data = JSON.parse(dataContent!.text);
364 |
365 | const supportCheck =
366 | data.healthReport.components.communityHealth.details.find(
367 | (d: any) => d.check === "Support channels provided",
368 | );
369 | expect(supportCheck.passed).toBe(true);
370 | });
371 | });
372 |
373 | describe("Accessibility Evaluation", () => {
374 | test("should detect proper spacing and structure", async () => {
375 | const readmePath = join(testDir, "README.md");
376 | await writeFile(
377 | readmePath,
378 | `# Project
379 |
380 | ## Description
381 |
382 | This is a well-structured README with proper spacing.
383 |
384 | ## Installation
385 |
386 | Instructions here.
387 |
388 | ## Usage
389 |
390 | Usage examples here.
391 |
392 | ## Contributing
393 |
394 | Contributing guidelines.
395 |
396 | ## License
397 |
398 | MIT License
399 | `,
400 | );
401 |
402 | const result = await evaluateReadmeHealth({
403 | readme_path: readmePath,
404 | });
405 |
406 | const dataContent = result.content.find((c) =>
407 | c.text.includes("healthReport"),
408 | );
409 | const data = JSON.parse(dataContent!.text);
410 |
411 | const spacingCheck =
412 | data.healthReport.components.accessibility.details.find(
413 | (d: any) => d.check === "Scannable structure with proper spacing",
414 | );
415 | expect(spacingCheck.passed).toBe(true);
416 | });
417 |
418 | test("should detect heading hierarchy", async () => {
419 | const readmePath = join(testDir, "README.md");
420 | await writeFile(
421 | readmePath,
422 | `# Main Title
423 |
424 | ## Section 1
425 |
426 | ### Subsection 1.1
427 |
428 | ## Section 2
429 |
430 | ### Subsection 2.1
431 | `,
432 | );
433 |
434 | const result = await evaluateReadmeHealth({
435 | readme_path: readmePath,
436 | });
437 |
438 | const dataContent = result.content.find((c) =>
439 | c.text.includes("healthReport"),
440 | );
441 | const data = JSON.parse(dataContent!.text);
442 |
443 | const headingCheck =
444 | data.healthReport.components.accessibility.details.find(
445 | (d: any) => d.check === "Clear heading hierarchy",
446 | );
447 | expect(headingCheck.passed).toBe(true);
448 | });
449 |
450 | test("should detect images with alt text", async () => {
451 | const readmePath = join(testDir, "README.md");
452 | await writeFile(
453 | readmePath,
454 | `# Project
455 |
456 | 
457 | 
458 | `,
459 | );
460 |
461 | const result = await evaluateReadmeHealth({
462 | readme_path: readmePath,
463 | });
464 |
465 | const dataContent = result.content.find((c) =>
466 | c.text.includes("healthReport"),
467 | );
468 | const data = JSON.parse(dataContent!.text);
469 |
470 | const altTextCheck =
471 | data.healthReport.components.accessibility.details.find(
472 | (d: any) => d.check === "Alt text for images",
473 | );
474 | expect(altTextCheck.passed).toBe(true);
475 | });
476 |
477 | test("should detect images without alt text", async () => {
478 | const readmePath = join(testDir, "README.md");
479 | await writeFile(
480 | readmePath,
481 | `# Project
482 |
483 | 
484 | `,
485 | );
486 |
487 | const result = await evaluateReadmeHealth({
488 | readme_path: readmePath,
489 | });
490 |
491 | const dataContent = result.content.find((c) =>
492 | c.text.includes("healthReport"),
493 | );
494 | const data = JSON.parse(dataContent!.text);
495 |
496 | const altTextCheck =
497 | data.healthReport.components.accessibility.details.find(
498 | (d: any) => d.check === "Alt text for images",
499 | );
500 | expect(altTextCheck.passed).toBe(false);
501 | });
502 |
503 | test("should detect inclusive language violations", async () => {
504 | const readmePath = join(testDir, "README.md");
505 | await writeFile(
506 | readmePath,
507 | `# Project
508 |
509 | Hey guys, this project uses a master branch and maintains a whitelist of contributors.
510 | `,
511 | );
512 |
513 | const result = await evaluateReadmeHealth({
514 | readme_path: readmePath,
515 | });
516 |
517 | const dataContent = result.content.find((c) =>
518 | c.text.includes("healthReport"),
519 | );
520 | const data = JSON.parse(dataContent!.text);
521 |
522 | const inclusiveCheck =
523 | data.healthReport.components.accessibility.details.find(
524 | (d: any) => d.check === "Inclusive language",
525 | );
526 | expect(inclusiveCheck.passed).toBe(false);
527 | });
528 |
529 | test("should pass inclusive language check with good content", async () => {
530 | const readmePath = join(testDir, "README.md");
531 | await writeFile(
532 | readmePath,
533 | `# Project
534 |
535 | Welcome team! This project uses the main branch and maintains an allowlist of contributors.
536 | `,
537 | );
538 |
539 | const result = await evaluateReadmeHealth({
540 | readme_path: readmePath,
541 | });
542 |
543 | const dataContent = result.content.find((c) =>
544 | c.text.includes("healthReport"),
545 | );
546 | const data = JSON.parse(dataContent!.text);
547 |
548 | const inclusiveCheck =
549 | data.healthReport.components.accessibility.details.find(
550 | (d: any) => d.check === "Inclusive language",
551 | );
552 | expect(inclusiveCheck.passed).toBe(true);
553 | });
554 | });
555 |
556 | describe("Onboarding Evaluation", () => {
557 | test("should detect quick start sections", async () => {
558 | const readmePath = join(testDir, "README.md");
559 | await writeFile(
560 | readmePath,
561 | `# Project
562 |
563 | ## Quick Start
564 |
565 | Get up and running in minutes!
566 | `,
567 | );
568 |
569 | const result = await evaluateReadmeHealth({
570 | readme_path: readmePath,
571 | });
572 |
573 | const dataContent = result.content.find((c) =>
574 | c.text.includes("healthReport"),
575 | );
576 | const data = JSON.parse(dataContent!.text);
577 |
578 | const quickStartCheck =
579 | data.healthReport.components.onboarding.details.find(
580 | (d: any) => d.check === "Quick start section",
581 | );
582 | expect(quickStartCheck.passed).toBe(true);
583 | });
584 |
585 | test("should detect prerequisites", async () => {
586 | const readmePath = join(testDir, "README.md");
587 | await writeFile(
588 | readmePath,
589 | `# Project
590 |
591 | ## Prerequisites
592 |
593 | - Node.js 16+
594 | - npm or yarn
595 | `,
596 | );
597 |
598 | const result = await evaluateReadmeHealth({
599 | readme_path: readmePath,
600 | });
601 |
602 | const dataContent = result.content.find((c) =>
603 | c.text.includes("healthReport"),
604 | );
605 | const data = JSON.parse(dataContent!.text);
606 |
607 | const prereqCheck = data.healthReport.components.onboarding.details.find(
608 | (d: any) => d.check === "Prerequisites clearly listed",
609 | );
610 | expect(prereqCheck.passed).toBe(true);
611 | });
612 |
613 | test("should detect first contribution guidance", async () => {
614 | const readmePath = join(testDir, "README.md");
615 | await writeFile(
616 | readmePath,
617 | `# Project
618 |
619 | ## For New Contributors
620 |
621 | Welcome first-time contributors! Here's how to get started.
622 | `,
623 | );
624 |
625 | const result = await evaluateReadmeHealth({
626 | readme_path: readmePath,
627 | });
628 |
629 | const dataContent = result.content.find((c) =>
630 | c.text.includes("healthReport"),
631 | );
632 | const data = JSON.parse(dataContent!.text);
633 |
634 | const firstContribCheck =
635 | data.healthReport.components.onboarding.details.find(
636 | (d: any) => d.check === "First contribution guide",
637 | );
638 | expect(firstContribCheck.passed).toBe(true);
639 | });
640 |
641 | test("should detect good first issues", async () => {
642 | const readmePath = join(testDir, "README.md");
643 | await writeFile(
644 | readmePath,
645 | `# Project
646 |
647 | Check out our good first issues for beginners!
648 | `,
649 | );
650 |
651 | const result = await evaluateReadmeHealth({
652 | readme_path: readmePath,
653 | });
654 |
655 | const dataContent = result.content.find((c) =>
656 | c.text.includes("healthReport"),
657 | );
658 | const data = JSON.parse(dataContent!.text);
659 |
660 | const goodFirstCheck =
661 | data.healthReport.components.onboarding.details.find(
662 | (d: any) => d.check === "Good first issues mentioned",
663 | );
664 | expect(goodFirstCheck.passed).toBe(true);
665 | });
666 | });
667 |
668 | describe("Content Quality Evaluation", () => {
669 | test("should evaluate adequate content length", async () => {
670 | const readmePath = join(testDir, "README.md");
671 | const content =
672 | "# Project\n\n" +
673 | "This is a well-sized README with adequate content. ".repeat(20);
674 | await writeFile(readmePath, content);
675 |
676 | const result = await evaluateReadmeHealth({
677 | readme_path: readmePath,
678 | });
679 |
680 | const dataContent = result.content.find((c) =>
681 | c.text.includes("healthReport"),
682 | );
683 | const data = JSON.parse(dataContent!.text);
684 |
685 | const lengthCheck =
686 | data.healthReport.components.contentQuality.details.find(
687 | (d: any) => d.check === "Adequate content length",
688 | );
689 | expect(lengthCheck.passed).toBe(true);
690 | });
691 |
692 | test("should detect insufficient content length", async () => {
693 | const readmePath = join(testDir, "README.md");
694 | await writeFile(readmePath, "# Project\n\nToo short.");
695 |
696 | const result = await evaluateReadmeHealth({
697 | readme_path: readmePath,
698 | });
699 |
700 | const dataContent = result.content.find((c) =>
701 | c.text.includes("healthReport"),
702 | );
703 | const data = JSON.parse(dataContent!.text);
704 |
705 | const lengthCheck =
706 | data.healthReport.components.contentQuality.details.find(
707 | (d: any) => d.check === "Adequate content length",
708 | );
709 | expect(lengthCheck.passed).toBe(false);
710 | });
711 |
712 | test("should detect code examples", async () => {
713 | const readmePath = join(testDir, "README.md");
714 | await writeFile(
715 | readmePath,
716 | `# Project
717 |
718 | ## Installation
719 |
720 | \`\`\`bash
721 | npm install project
722 | \`\`\`
723 |
724 | ## Usage
725 |
726 | \`\`\`javascript
727 | const project = require('project');
728 | project.run();
729 | \`\`\`
730 | `,
731 | );
732 |
733 | const result = await evaluateReadmeHealth({
734 | readme_path: readmePath,
735 | });
736 |
737 | const dataContent = result.content.find((c) =>
738 | c.text.includes("healthReport"),
739 | );
740 | const data = JSON.parse(dataContent!.text);
741 |
742 | const codeCheck =
743 | data.healthReport.components.contentQuality.details.find(
744 | (d: any) => d.check === "Code examples provided",
745 | );
746 | expect(codeCheck.passed).toBe(true);
747 | });
748 |
749 | test("should detect external links", async () => {
750 | const readmePath = join(testDir, "README.md");
751 | await writeFile(
752 | readmePath,
753 | `# Project
754 |
755 | Check out our [documentation](https://docs.example.com),
756 | [demo](https://demo.example.com), and [related project](https://github.com/example/related).
757 | `,
758 | );
759 |
760 | const result = await evaluateReadmeHealth({
761 | readme_path: readmePath,
762 | });
763 |
764 | const dataContent = result.content.find((c) =>
765 | c.text.includes("healthReport"),
766 | );
767 | const data = JSON.parse(dataContent!.text);
768 |
769 | const linksCheck =
770 | data.healthReport.components.contentQuality.details.find(
771 | (d: any) => d.check === "External links present",
772 | );
773 | expect(linksCheck.passed).toBe(true);
774 | });
775 |
776 | test("should evaluate project description clarity", async () => {
777 | const readmePath = join(testDir, "README.md");
778 | const longContent = `# Project
779 |
780 | ## Description
781 |
782 | This is a comprehensive project description that provides detailed information about what the project does, how it works, and why it's useful. The description is long enough and well-structured to meet the clarity requirements. This content needs to be over 500 characters to pass the clarity check, so I'm adding more detailed information about the project features, installation process, usage examples, and comprehensive documentation that explains all aspects of the project in great detail.
783 |
784 | ## Features
785 |
786 | - Feature 1: Advanced functionality
787 | - Feature 2: Enhanced performance
788 | - Feature 3: User-friendly interface
789 |
790 | ## Installation
791 |
792 | Detailed installation instructions here with step-by-step guidance.
793 |
794 | ## Usage
795 |
796 | Comprehensive usage examples and documentation with code samples.
797 | `;
798 | await writeFile(readmePath, longContent);
799 |
800 | const result = await evaluateReadmeHealth({
801 | readme_path: readmePath,
802 | });
803 |
804 | const dataContent = result.content.find((c) =>
805 | c.text.includes("healthReport"),
806 | );
807 | const data = JSON.parse(dataContent!.text);
808 |
809 | const clarityCheck =
810 | data.healthReport.components.contentQuality.details.find(
811 | (d: any) => d.check === "Project description clarity",
812 | );
813 | expect(clarityCheck.passed).toBe(true);
814 | });
815 | });
816 |
817 | describe("Grade Calculation", () => {
818 | test("should assign grade A for 90%+ score", async () => {
819 | const readmePath = join(testDir, "README.md");
820 | // Create comprehensive README that should score high
821 | await writeFile(
822 | readmePath,
823 | `# Excellent Project
824 |
825 | ## Table of Contents
826 | - [Description](#description)
827 | - [Installation](#installation)
828 | - [Usage](#usage)
829 |
830 | ## Description
831 |
832 | This is a comprehensive project with excellent documentation. It includes all necessary sections and follows best practices for community health, accessibility, onboarding, and content quality.
833 |
834 | ## Quick Start
835 |
836 | Get started in minutes with our simple installation process.
837 |
838 | ## Prerequisites
839 |
840 | - Node.js 16+
841 | - npm or yarn
842 |
843 | ## Installation
844 |
845 | \`\`\`bash
846 | npm install excellent-project
847 | \`\`\`
848 |
849 | ## Usage
850 |
851 | \`\`\`javascript
852 | const project = require('excellent-project');
853 | project.start();
854 | \`\`\`
855 |
856 | ## Contributing
857 |
858 | Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct and the process for submitting pull requests.
859 |
860 | ## First Contribution
861 |
862 | New contributors welcome! Check out our good first issues for beginners.
863 |
864 | ## Support
865 |
866 | Join our Discord community for help and discussions.
867 |
868 | ## Security
869 |
870 | Report security issues via our [Security Policy](SECURITY.md).
871 |
872 | ## Links
873 |
874 | - [Documentation](https://docs.example.com)
875 | - [Demo](https://demo.example.com)
876 | - [API Reference](https://api.example.com)
877 | - [GitHub Issues](https://github.com/example/issues)
878 |
879 | ## License
880 |
881 | MIT License
882 | `,
883 | );
884 |
885 | const result = await evaluateReadmeHealth({
886 | readme_path: readmePath,
887 | });
888 |
889 | const dataContent = result.content.find((c) =>
890 | c.text.includes("healthReport"),
891 | );
892 | const data = JSON.parse(dataContent!.text);
893 |
894 | expect(data.healthReport.overallScore).toBeGreaterThanOrEqual(90);
895 | expect(data.healthReport.grade).toBe("A");
896 | });
897 |
898 | test("should assign grade F for very low scores", async () => {
899 | const readmePath = join(testDir, "README.md");
900 | await writeFile(readmePath, "# Bad\n\nMinimal.");
901 |
902 | const result = await evaluateReadmeHealth({
903 | readme_path: readmePath,
904 | });
905 |
906 | const dataContent = result.content.find((c) =>
907 | c.text.includes("healthReport"),
908 | );
909 | const data = JSON.parse(dataContent!.text);
910 |
911 | expect(data.healthReport.overallScore).toBeLessThan(60);
912 | expect(data.healthReport.grade).toBe("F");
913 | });
914 | });
915 |
916 | describe("Recommendations and Critical Issues", () => {
917 | test("should identify critical issues for low-scoring components", async () => {
918 | const readmePath = join(testDir, "README.md");
919 | await writeFile(readmePath, "# Minimal Project\n\nVery basic content.");
920 |
921 | const result = await evaluateReadmeHealth({
922 | readme_path: readmePath,
923 | });
924 |
925 | const dataContent = result.content.find((c) =>
926 | c.text.includes("healthReport"),
927 | );
928 | const data = JSON.parse(dataContent!.text);
929 |
930 | expect(data.healthReport.criticalIssues.length).toBeGreaterThan(0);
931 | expect(
932 | data.healthReport.criticalIssues.some((issue: string) =>
933 | issue.includes("Critical:"),
934 | ),
935 | ).toBe(true);
936 | });
937 |
938 | test("should generate appropriate recommendations", async () => {
939 | const readmePath = join(testDir, "README.md");
940 | await writeFile(
941 | readmePath,
942 | "# Project\n\nBasic project with minimal content that will fail most health checks.",
943 | );
944 |
945 | const result = await evaluateReadmeHealth({
946 | readme_path: readmePath,
947 | });
948 |
949 | const dataContent = result.content.find((c) =>
950 | c.text.includes("healthReport"),
951 | );
952 | const data = JSON.parse(dataContent!.text);
953 |
954 | // Should have recommendations since most checks will fail with minimal content
955 | expect(data.healthReport.recommendations.length).toBeGreaterThan(0);
956 | expect(data.healthReport.recommendations.length).toBeLessThanOrEqual(10);
957 | });
958 |
959 | test("should identify strengths in well-structured components", async () => {
960 | const readmePath = join(testDir, "README.md");
961 | await writeFile(
962 | readmePath,
963 | `# Project
964 |
965 | ## Description
966 |
967 | This project has good content quality with proper structure and adequate length.
968 |
969 | ## Installation
970 |
971 | \`\`\`bash
972 | npm install
973 | \`\`\`
974 |
975 | ## Usage
976 |
977 | \`\`\`javascript
978 | const app = require('./app');
979 | app.start();
980 | \`\`\`
981 |
982 | ## Links
983 |
984 | - [Docs](https://example.com)
985 | - [Demo](https://demo.com)
986 | - [API](https://api.com)
987 | - [Support](https://support.com)
988 | `,
989 | );
990 |
991 | const result = await evaluateReadmeHealth({
992 | readme_path: readmePath,
993 | });
994 |
995 | const dataContent = result.content.find((c) =>
996 | c.text.includes("healthReport"),
997 | );
998 | const data = JSON.parse(dataContent!.text);
999 |
1000 | expect(data.healthReport.strengths.length).toBeGreaterThan(0);
1001 | });
1002 | });
1003 |
1004 | describe("Time Estimation", () => {
1005 | test("should estimate time in minutes for quick fixes", async () => {
1006 | const readmePath = join(testDir, "README.md");
1007 | await writeFile(
1008 | readmePath,
1009 | `# Excellent Project
1010 |
1011 | ## Table of Contents
1012 | - [Description](#description)
1013 | - [Installation](#installation)
1014 |
1015 | ## Description
1016 | This is a comprehensive project with excellent documentation. It includes all necessary sections and follows best practices for community health, accessibility, onboarding, and content quality.
1017 |
1018 | ## Quick Start
1019 | Get started in minutes with our simple installation process.
1020 |
1021 | ## Prerequisites
1022 | - Node.js 16+
1023 | - npm or yarn
1024 |
1025 | ## Installation
1026 | \`\`\`bash
1027 | npm install excellent-project
1028 | \`\`\`
1029 |
1030 | ## Usage
1031 | \`\`\`javascript
1032 | const project = require('excellent-project');
1033 | project.start();
1034 | \`\`\`
1035 |
1036 | ## Contributing
1037 | Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details.
1038 |
1039 | ## First Contribution
1040 | New contributors welcome! Check out our good first issues for beginners.
1041 |
1042 | ## Support
1043 | Join our Discord community for help and discussions.
1044 |
1045 | ## Security
1046 | Report security issues via our [Security Policy](SECURITY.md).
1047 |
1048 | ## Links
1049 | - [Documentation](https://docs.example.com)
1050 | - [Demo](https://demo.example.com)
1051 | - [API Reference](https://api.example.com)
1052 |
1053 | ## License
1054 | MIT License
1055 | `,
1056 | );
1057 |
1058 | const result = await evaluateReadmeHealth({
1059 | readme_path: readmePath,
1060 | });
1061 |
1062 | const dataContent = result.content.find((c) =>
1063 | c.text.includes("healthReport"),
1064 | );
1065 | const data = JSON.parse(dataContent!.text);
1066 |
1067 | // Should have very few recommendations, resulting in minutes
1068 | expect(data.healthReport.estimatedImprovementTime).toMatch(/\d+ minutes/);
1069 | });
1070 |
1071 | test("should estimate time in hours for moderate improvements", async () => {
1072 | const readmePath = join(testDir, "README.md");
1073 | await writeFile(
1074 | readmePath,
1075 | "# Project\n\nBasic project needing improvements.",
1076 | );
1077 |
1078 | const result = await evaluateReadmeHealth({
1079 | readme_path: readmePath,
1080 | });
1081 |
1082 | const dataContent = result.content.find((c) =>
1083 | c.text.includes("healthReport"),
1084 | );
1085 | const data = JSON.parse(dataContent!.text);
1086 |
1087 | // Should have enough recommendations to warrant hours
1088 | expect(data.healthReport.estimatedImprovementTime).toMatch(/\d+ hours?/);
1089 | });
1090 | });
1091 |
1092 | describe("Next Steps Generation", () => {
1093 | test("should prioritize critical issues in next steps", async () => {
1094 | const readmePath = join(testDir, "README.md");
1095 | await writeFile(readmePath, "# Minimal\n\nBad.");
1096 |
1097 | const result = await evaluateReadmeHealth({
1098 | readme_path: readmePath,
1099 | });
1100 |
1101 | const dataContent = result.content.find((c) =>
1102 | c.text.includes("nextSteps"),
1103 | );
1104 | const data = JSON.parse(dataContent!.text);
1105 |
1106 | expect(
1107 | data.nextSteps.some((step: string) => step.includes("critical issues")),
1108 | ).toBe(true);
1109 | });
1110 |
1111 | test("should suggest targeting 85+ score for low-scoring READMEs", async () => {
1112 | const readmePath = join(testDir, "README.md");
1113 | await writeFile(readmePath, "# Project\n\nNeeds improvement.");
1114 |
1115 | const result = await evaluateReadmeHealth({
1116 | readme_path: readmePath,
1117 | });
1118 |
1119 | const dataContent = result.content.find((c) =>
1120 | c.text.includes("nextSteps"),
1121 | );
1122 | const data = JSON.parse(dataContent!.text);
1123 |
1124 | expect(
1125 | data.nextSteps.some((step: string) =>
1126 | step.includes("85+ health score"),
1127 | ),
1128 | ).toBe(true);
1129 | });
1130 |
1131 | test("should always include re-evaluation step", async () => {
1132 | const readmePath = join(testDir, "README.md");
1133 | await writeFile(readmePath, "# Any Project");
1134 |
1135 | const result = await evaluateReadmeHealth({
1136 | readme_path: readmePath,
1137 | });
1138 |
1139 | const dataContent = result.content.find((c) =>
1140 | c.text.includes("nextSteps"),
1141 | );
1142 | const data = JSON.parse(dataContent!.text);
1143 |
1144 | expect(
1145 | data.nextSteps.some((step: string) => step.includes("Re-evaluate")),
1146 | ).toBe(true);
1147 | });
1148 | });
1149 |
1150 | describe("Project Type Variations", () => {
1151 | test("should handle enterprise_tool project type", async () => {
1152 | const readmePath = join(testDir, "README.md");
1153 | await writeFile(
1154 | readmePath,
1155 | "# Enterprise Tool\n\nProfessional enterprise solution.",
1156 | );
1157 |
1158 | const result = await evaluateReadmeHealth({
1159 | readme_path: readmePath,
1160 | project_type: "enterprise_tool",
1161 | });
1162 |
1163 | expect(result.isError).toBe(false);
1164 | const dataContent = result.content.find((c) =>
1165 | c.text.includes("projectType"),
1166 | );
1167 | const data = JSON.parse(dataContent!.text);
1168 | expect(data.projectType).toBe("enterprise_tool");
1169 | });
1170 |
1171 | test("should handle personal_project project type", async () => {
1172 | const readmePath = join(testDir, "README.md");
1173 | await writeFile(
1174 | readmePath,
1175 | "# Personal Project\n\nMy personal coding project.",
1176 | );
1177 |
1178 | const result = await evaluateReadmeHealth({
1179 | readme_path: readmePath,
1180 | project_type: "personal_project",
1181 | });
1182 |
1183 | expect(result.isError).toBe(false);
1184 | const dataContent = result.content.find((c) =>
1185 | c.text.includes("projectType"),
1186 | );
1187 | const data = JSON.parse(dataContent!.text);
1188 | expect(data.projectType).toBe("personal_project");
1189 | });
1190 |
1191 | test("should handle documentation project type", async () => {
1192 | const readmePath = join(testDir, "README.md");
1193 | await writeFile(
1194 | readmePath,
1195 | "# Documentation Project\n\nComprehensive documentation.",
1196 | );
1197 |
1198 | const result = await evaluateReadmeHealth({
1199 | readme_path: readmePath,
1200 | project_type: "documentation",
1201 | });
1202 |
1203 | expect(result.isError).toBe(false);
1204 | const dataContent = result.content.find((c) =>
1205 | c.text.includes("projectType"),
1206 | );
1207 | const data = JSON.parse(dataContent!.text);
1208 | expect(data.projectType).toBe("documentation");
1209 | });
1210 | });
1211 | });
1212 |
```
--------------------------------------------------------------------------------
/src/tools/update-existing-documentation.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { Tool } from "@modelcontextprotocol/sdk/types.js";
2 | import * as fs from "fs/promises";
3 | import * as path from "path";
4 | import {
5 | handleMemoryRecall,
6 | handleMemoryEnhancedRecommendation,
7 | handleMemoryIntelligentAnalysis,
8 | } from "../memory/index.js";
9 |
10 | interface UpdateOptions {
11 | analysisId: string;
12 | docsPath: string;
13 | compareMode: "comprehensive" | "gap-detection" | "accuracy-check";
14 | updateStrategy: "conservative" | "moderate" | "aggressive";
15 | preserveStyle: boolean;
16 | focusAreas?: string[];
17 | }
18 |
19 | interface DocumentationGap {
20 | type: "missing" | "outdated" | "incorrect" | "incomplete";
21 | location: string;
22 | description: string;
23 | severity: "low" | "medium" | "high" | "critical";
24 | suggestedUpdate: string;
25 | memoryEvidence?: any[];
26 | }
27 |
28 | interface CodeDocumentationComparison {
29 | codeFeatures: any[];
30 | documentedFeatures: any[];
31 | gaps: DocumentationGap[];
32 | outdatedSections: any[];
33 | accuracyIssues: any[];
34 | }
35 |
36 | interface UpdateRecommendation {
37 | section: string;
38 | currentContent: string;
39 | suggestedContent: string;
40 | reasoning: string;
41 | memoryEvidence: any[];
42 | confidence: number;
43 | effort: "low" | "medium" | "high";
44 | }
45 |
46 | interface UpdateResult {
47 | success: boolean;
48 | analysisPerformed: CodeDocumentationComparison;
49 | recommendations: UpdateRecommendation[];
50 | memoryInsights: {
51 | similarProjects: any[];
52 | successfulUpdatePatterns: any[];
53 | commonGapTypes: Record<string, number>;
54 | };
55 | updateMetrics: {
56 | gapsDetected: number;
57 | recommendationsGenerated: number;
58 | confidenceScore: number;
59 | estimatedEffort: string;
60 | };
61 | nextSteps: string[];
62 | }
63 |
64 | class DocumentationUpdateEngine {
65 | private memoryInsights: any = null;
66 | private codeAnalysis: any = null;
67 | private existingDocs: Map<string, any> = new Map();
68 |
69 | async updateExistingDocumentation(
70 | options: UpdateOptions,
71 | ): Promise<UpdateResult> {
72 | // 1. Load repository analysis and memory insights
73 | const analysis = await this.getRepositoryAnalysis(options.analysisId);
74 | this.codeAnalysis = analysis;
75 |
76 | // 2. Load memory insights for intelligent comparison
77 | await this.loadMemoryInsights(analysis, options);
78 |
79 | // 3. Analyze existing documentation structure and content
80 | const existingDocs = await this.analyzeExistingDocumentation(
81 | options.docsPath,
82 | );
83 | this.existingDocs = existingDocs;
84 |
85 | // 4. Perform comprehensive code-documentation comparison
86 | const comparison = await this.performCodeDocumentationComparison(
87 | analysis,
88 | existingDocs,
89 | options,
90 | );
91 |
92 | // 5. Generate memory-informed update recommendations
93 | const recommendations = await this.generateUpdateRecommendations(
94 | comparison,
95 | options,
96 | );
97 |
98 | // 6. Calculate metrics and confidence scores
99 | const updateMetrics = this.calculateUpdateMetrics(
100 | comparison,
101 | recommendations,
102 | );
103 |
104 | return {
105 | success: true,
106 | analysisPerformed: comparison,
107 | recommendations,
108 | memoryInsights: this.memoryInsights,
109 | updateMetrics,
110 | nextSteps: this.generateMemoryInformedNextSteps(
111 | comparison,
112 | recommendations,
113 | ),
114 | };
115 | }
116 |
117 | private async getRepositoryAnalysis(analysisId: string): Promise<any> {
118 | // Try to get analysis from memory system first
119 | try {
120 | const memoryRecall = await handleMemoryRecall({
121 | query: analysisId,
122 | type: "analysis",
123 | limit: 1,
124 | });
125 |
126 | // Handle the memory recall result structure
127 | if (
128 | memoryRecall &&
129 | memoryRecall.memories &&
130 | memoryRecall.memories.length > 0
131 | ) {
132 | const memory = memoryRecall.memories[0];
133 |
134 | // Handle wrapped content structure
135 | if (
136 | memory.data &&
137 | memory.data.content &&
138 | Array.isArray(memory.data.content)
139 | ) {
140 | // Extract the JSON from the first text content
141 | const firstContent = memory.data.content[0];
142 | if (
143 | firstContent &&
144 | firstContent.type === "text" &&
145 | firstContent.text
146 | ) {
147 | try {
148 | return JSON.parse(firstContent.text);
149 | } catch (parseError) {
150 | console.warn(
151 | "Failed to parse analysis content from memory:",
152 | parseError,
153 | );
154 | return memory.data;
155 | }
156 | }
157 | }
158 |
159 | // Try direct content access (legacy format)
160 | if (memory.content) {
161 | return memory.content;
162 | }
163 |
164 | // Try data field
165 | if (memory.data) {
166 | return memory.data;
167 | }
168 | }
169 | } catch (error) {
170 | console.warn("Failed to retrieve from memory system:", error);
171 | }
172 |
173 | // Fallback to reading from cached analysis file
174 | const analysisPath = path.join(
175 | ".documcp",
176 | "analyses",
177 | `${analysisId}.json`,
178 | );
179 | try {
180 | const content = await fs.readFile(analysisPath, "utf-8");
181 | return JSON.parse(content);
182 | } catch {
183 | throw new Error(
184 | `Repository analysis with ID '${analysisId}' not found. Please run analyze_repository first.`,
185 | );
186 | }
187 | }
188 |
189 | private async loadMemoryInsights(
190 | analysis: any,
191 | options: UpdateOptions,
192 | ): Promise<void> {
193 | try {
194 | // Get similar projects that had successful documentation updates
195 | const similarProjectsQuery = `${
196 | analysis.metadata?.primaryLanguage || ""
197 | } ${analysis.metadata?.ecosystem || ""} documentation update`;
198 | const similarProjects = await handleMemoryRecall({
199 | query: similarProjectsQuery,
200 | type: "recommendation",
201 | limit: 10,
202 | });
203 |
204 | // Get patterns for successful documentation updates
205 | const updatePatternsQuery =
206 | "documentation update successful patterns gaps outdated";
207 | const updatePatterns = await handleMemoryRecall({
208 | query: updatePatternsQuery,
209 | type: "configuration",
210 | limit: 5,
211 | });
212 |
213 | // Get memory-enhanced analysis for this specific update task
214 | const enhancedAnalysis = await handleMemoryIntelligentAnalysis({
215 | projectPath: analysis.projectPath || "",
216 | baseAnalysis: analysis,
217 | });
218 |
219 | // Get memory-enhanced recommendations for update strategy
220 | const enhancedRecommendations = await handleMemoryEnhancedRecommendation({
221 | projectPath: analysis.projectPath || "",
222 | baseRecommendation: {
223 | updateStrategy: options.updateStrategy,
224 | compareMode: options.compareMode,
225 | focusAreas: options.focusAreas || [],
226 | },
227 | projectFeatures: {
228 | ecosystem: analysis.metadata?.ecosystem || "unknown",
229 | primaryLanguage: analysis.metadata?.primaryLanguage || "unknown",
230 | complexity: analysis.complexity || "medium",
231 | hasTests: analysis.structure?.hasTests || false,
232 | hasCI: analysis.structure?.hasCI || false,
233 | docStructure: "existing", // Indicates we're updating existing docs
234 | },
235 | });
236 |
237 | this.memoryInsights = {
238 | similarProjects: similarProjects.memories || [],
239 | updatePatterns: updatePatterns.memories || [],
240 | enhancedAnalysis: enhancedAnalysis,
241 | enhancedRecommendations: enhancedRecommendations,
242 | successfulUpdatePatterns: this.extractUpdatePatterns(
243 | similarProjects.memories || [],
244 | ),
245 | commonGapTypes: this.extractCommonGapTypes(
246 | similarProjects.memories || [],
247 | ),
248 | };
249 | } catch (error) {
250 | console.warn("Failed to load memory insights:", error);
251 | this.memoryInsights = {
252 | similarProjects: [],
253 | updatePatterns: [],
254 | enhancedAnalysis: null,
255 | enhancedRecommendations: null,
256 | successfulUpdatePatterns: [],
257 | commonGapTypes: {},
258 | };
259 | }
260 | }
261 |
262 | private extractUpdatePatterns(projects: any[]): any[] {
263 | return projects
264 | .filter(
265 | (p) => p.content?.updatePatterns || p.content?.documentationUpdates,
266 | )
267 | .map((p) => p.content?.updatePatterns || p.content?.documentationUpdates)
268 | .flat()
269 | .filter(Boolean);
270 | }
271 |
272 | private extractCommonGapTypes(projects: any[]): Record<string, number> {
273 | const gapTypes: Record<string, number> = {};
274 |
275 | projects.forEach((p) => {
276 | const gaps = p.content?.documentationGaps || [];
277 | gaps.forEach((gap: any) => {
278 | const type = gap.type || "unknown";
279 | gapTypes[type] = (gapTypes[type] || 0) + 1;
280 | });
281 | });
282 |
283 | return gapTypes;
284 | }
285 |
286 | private async analyzeExistingDocumentation(
287 | docsPath: string,
288 | ): Promise<Map<string, any>> {
289 | const docs = new Map<string, any>();
290 |
291 | try {
292 | await this.recursivelyAnalyzeDocuments(docsPath, docs);
293 | } catch (error) {
294 | console.warn("Failed to analyze existing documentation:", error);
295 | }
296 |
297 | return docs;
298 | }
299 |
300 | private async recursivelyAnalyzeDocuments(
301 | dirPath: string,
302 | docs: Map<string, any>,
303 | relativePath: string = "",
304 | ): Promise<void> {
305 | try {
306 | const entries = await fs.readdir(dirPath, { withFileTypes: true });
307 |
308 | for (const entry of entries) {
309 | const fullPath = path.join(dirPath, entry.name);
310 | const docPath = path.join(relativePath, entry.name);
311 |
312 | if (entry.isDirectory()) {
313 | await this.recursivelyAnalyzeDocuments(fullPath, docs, docPath);
314 | } else if (entry.name.endsWith(".md") || entry.name.endsWith(".mdx")) {
315 | try {
316 | const content = await fs.readFile(fullPath, "utf-8");
317 | const analysis = this.analyzeDocumentContent(content, docPath);
318 | docs.set(docPath, {
319 | content,
320 | analysis,
321 | lastModified: (await fs.stat(fullPath)).mtime,
322 | path: fullPath,
323 | });
324 | } catch (error) {
325 | console.warn(`Failed to read document ${fullPath}:`, error);
326 | }
327 | }
328 | }
329 | } catch (error) {
330 | console.warn(`Failed to read directory ${dirPath}:`, error);
331 | }
332 | }
333 |
334 | private analyzeDocumentContent(content: string, filePath: string): any {
335 | return {
336 | type: this.inferDocumentType(filePath, content),
337 | sections: this.extractSections(content),
338 | codeBlocks: this.extractCodeBlocks(content),
339 | links: this.extractLinks(content),
340 | lastUpdated: this.extractLastUpdated(content),
341 | version: this.extractVersion(content),
342 | dependencies: this.extractMentionedDependencies(content),
343 | features: this.extractDocumentedFeatures(content),
344 | wordCount: content.split(/\s+/).length,
345 | headingStructure: this.extractHeadingStructure(content),
346 | };
347 | }
348 |
349 | private inferDocumentType(filePath: string, content: string): string {
350 | const fileName = path.basename(filePath).toLowerCase();
351 | const pathParts = filePath.toLowerCase().split(path.sep);
352 |
353 | // Diataxis categories
354 | if (pathParts.includes("tutorials")) return "tutorial";
355 | if (pathParts.includes("how-to") || pathParts.includes("howto"))
356 | return "how-to";
357 | if (pathParts.includes("reference")) return "reference";
358 | if (pathParts.includes("explanation")) return "explanation";
359 |
360 | // Common documentation types
361 | if (fileName.includes("readme")) return "readme";
362 | if (fileName.includes("getting-started") || fileName.includes("quickstart"))
363 | return "getting-started";
364 | if (fileName.includes("api")) return "api-reference";
365 | if (fileName.includes("install") || fileName.includes("setup"))
366 | return "installation";
367 | if (fileName.includes("deploy")) return "deployment";
368 | if (fileName.includes("config")) return "configuration";
369 |
370 | // Infer from content
371 | if (
372 | content.includes("# Getting Started") ||
373 | content.includes("## Getting Started")
374 | )
375 | return "getting-started";
376 | if (content.includes("# API") || content.includes("## API"))
377 | return "api-reference";
378 | if (
379 | content.includes("# Installation") ||
380 | content.includes("## Installation")
381 | )
382 | return "installation";
383 |
384 | return "general";
385 | }
386 |
387 | private extractSections(content: string): any[] {
388 | const sections: any[] = [];
389 | const lines = content.split("\n");
390 | let currentSection: any = null;
391 |
392 | for (let i = 0; i < lines.length; i++) {
393 | const line = lines[i];
394 | const headingMatch = line.match(/^(#{1,6})\s+(.+)/);
395 |
396 | if (headingMatch) {
397 | if (currentSection) {
398 | sections.push(currentSection);
399 | }
400 |
401 | currentSection = {
402 | level: headingMatch[1].length,
403 | title: headingMatch[2],
404 | startLine: i + 1,
405 | content: [],
406 | };
407 | } else if (currentSection) {
408 | currentSection.content.push(line);
409 | }
410 | }
411 |
412 | if (currentSection) {
413 | sections.push(currentSection);
414 | }
415 |
416 | return sections.map((section) => ({
417 | ...section,
418 | content: section.content.join("\n"),
419 | wordCount: section.content.join(" ").split(/\s+/).length,
420 | }));
421 | }
422 |
423 | private extractCodeBlocks(content: string): any[] {
424 | const codeBlocks: any[] = [];
425 | const codeBlockRegex = /```(\w+)?\n([\s\S]*?)```/g;
426 | let match;
427 |
428 | while ((match = codeBlockRegex.exec(content)) !== null) {
429 | codeBlocks.push({
430 | language: match[1] || "text",
431 | code: match[2],
432 | startIndex: match.index,
433 | endIndex: match.index + match[0].length,
434 | });
435 | }
436 |
437 | return codeBlocks;
438 | }
439 |
440 | private extractLinks(content: string): any[] {
441 | const links: any[] = [];
442 | const linkRegex = /\[([^\]]+)\]\(([^)]+)\)/g;
443 | let match;
444 |
445 | while ((match = linkRegex.exec(content)) !== null) {
446 | links.push({
447 | text: match[1],
448 | url: match[2],
449 | isInternal: !match[2].startsWith("http"),
450 | startIndex: match.index,
451 | });
452 | }
453 |
454 | return links;
455 | }
456 |
457 | private extractLastUpdated(content: string): string | null {
458 | const updateMatch = content.match(
459 | /(?:last updated|updated|modified):\s*(.+)/i,
460 | );
461 | return updateMatch ? updateMatch[1] : null;
462 | }
463 |
464 | private extractVersion(content: string): string | null {
465 | const versionMatch = content.match(/(?:version|v)[\s:]+([\d.]+)/i);
466 | return versionMatch ? versionMatch[1] : null;
467 | }
468 |
469 | private extractMentionedDependencies(content: string): string[] {
470 | const dependencies: Set<string> = new Set();
471 |
472 | // Extract from npm install commands
473 | const npmMatches = content.match(/npm install\s+([^`\n]+)/g);
474 | if (npmMatches) {
475 | npmMatches.forEach((match) => {
476 | const packages = match.replace("npm install", "").trim().split(/\s+/);
477 | packages.forEach((pkg) => {
478 | if (pkg && !pkg.startsWith("-")) {
479 | dependencies.add(pkg);
480 | }
481 | });
482 | });
483 | }
484 |
485 | // Extract from import statements
486 | const importMatches = content.match(/import.*from\s+['"]([^'"]+)['"]/g);
487 | if (importMatches) {
488 | importMatches.forEach((match) => {
489 | const packageMatch = match.match(/from\s+['"]([^'"]+)['"]/);
490 | if (packageMatch && !packageMatch[1].startsWith(".")) {
491 | dependencies.add(packageMatch[1]);
492 | }
493 | });
494 | }
495 |
496 | return Array.from(dependencies);
497 | }
498 |
499 | private extractDocumentedFeatures(content: string): string[] {
500 | const features: Set<string> = new Set();
501 |
502 | // Extract function names from code blocks
503 | const functionMatches = content.match(
504 | /(?:function|const|let|var)\s+(\w+)/g,
505 | );
506 | if (functionMatches) {
507 | functionMatches.forEach((match) => {
508 | const functionMatch = match.match(/(?:function|const|let|var)\s+(\w+)/);
509 | if (functionMatch) {
510 | features.add(functionMatch[1]);
511 | }
512 | });
513 | }
514 |
515 | // Extract API endpoints
516 | const apiMatches = content.match(
517 | /(?:GET|POST|PUT|DELETE|PATCH)\s+([/\w-]+)/g,
518 | );
519 | if (apiMatches) {
520 | apiMatches.forEach((match) => {
521 | const endpointMatch = match.match(
522 | /(?:GET|POST|PUT|DELETE|PATCH)\s+([/\w-]+)/,
523 | );
524 | if (endpointMatch) {
525 | features.add(endpointMatch[1]);
526 | }
527 | });
528 | }
529 |
530 | // Extract mentioned features from headings
531 | const headings = content.match(/#{1,6}\s+(.+)/g);
532 | if (headings) {
533 | headings.forEach((heading) => {
534 | const headingText = heading.replace(/#{1,6}\s+/, "").toLowerCase();
535 | if (
536 | headingText.includes("feature") ||
537 | headingText.includes("functionality")
538 | ) {
539 | features.add(headingText);
540 | }
541 | });
542 | }
543 |
544 | return Array.from(features);
545 | }
546 |
547 | private extractHeadingStructure(content: string): any[] {
548 | const headings: any[] = [];
549 | const lines = content.split("\n");
550 |
551 | lines.forEach((line, index) => {
552 | const headingMatch = line.match(/^(#{1,6})\s+(.+)/);
553 | if (headingMatch) {
554 | headings.push({
555 | level: headingMatch[1].length,
556 | text: headingMatch[2],
557 | line: index + 1,
558 | });
559 | }
560 | });
561 |
562 | return headings;
563 | }
564 |
565 | private async performCodeDocumentationComparison(
566 | analysis: any,
567 | existingDocs: Map<string, any>,
568 | _options: UpdateOptions,
569 | ): Promise<CodeDocumentationComparison> {
570 | const codeFeatures = this.extractCodeFeatures(analysis);
571 | const documentedFeatures = this.extractAllDocumentedFeatures(existingDocs);
572 |
573 | const gaps = await this.detectDocumentationGaps(
574 | codeFeatures,
575 | documentedFeatures,
576 | _options,
577 | );
578 | const outdatedSections = await this.detectOutdatedSections(
579 | analysis,
580 | existingDocs,
581 | );
582 | const accuracyIssues = await this.detectAccuracyIssues(
583 | analysis,
584 | existingDocs,
585 | );
586 |
587 | return {
588 | codeFeatures,
589 | documentedFeatures,
590 | gaps,
591 | outdatedSections,
592 | accuracyIssues,
593 | };
594 | }
595 |
596 | private extractCodeFeatures(analysis: any): any[] {
597 | const features: any[] = [];
598 |
599 | // Extract from dependencies
600 | if (analysis.dependencies?.packages) {
601 | analysis.dependencies.packages.forEach((pkg: string) => {
602 | features.push({
603 | type: "dependency",
604 | name: pkg,
605 | source: "package.json",
606 | });
607 | });
608 | }
609 |
610 | // Extract from scripts
611 | const packageJson = this.findPackageJsonInAnalysis(analysis);
612 | if (packageJson?.scripts) {
613 | Object.keys(packageJson.scripts).forEach((script) => {
614 | features.push({
615 | type: "script",
616 | name: script,
617 | command: packageJson.scripts[script],
618 | source: "package.json",
619 | });
620 | });
621 | }
622 |
623 | // Extract from file structure
624 | if (analysis.structure) {
625 | if (analysis.structure.hasTests) {
626 | features.push({
627 | type: "testing",
628 | name: "test suite",
629 | source: "structure",
630 | });
631 | }
632 | if (analysis.structure.hasCI) {
633 | features.push({
634 | type: "ci-cd",
635 | name: "continuous integration",
636 | source: "structure",
637 | });
638 | }
639 | }
640 |
641 | // Extract from technologies
642 | if (analysis.technologies) {
643 | Object.entries(analysis.technologies).forEach(([key, value]) => {
644 | if (value) {
645 | features.push({
646 | type: "technology",
647 | name: key,
648 | value: value,
649 | source: "analysis",
650 | });
651 | }
652 | });
653 | }
654 |
655 | return features;
656 | }
657 |
658 | private findPackageJsonInAnalysis(analysis: any): any {
659 | const files = analysis.files || [];
660 | const packageFile = files.find((f: any) => f.name === "package.json");
661 |
662 | if (packageFile?.content) {
663 | try {
664 | return JSON.parse(packageFile.content);
665 | } catch {
666 | return null;
667 | }
668 | }
669 |
670 | return null;
671 | }
672 |
673 | private extractAllDocumentedFeatures(existingDocs: Map<string, any>): any[] {
674 | const allFeatures: any[] = [];
675 |
676 | existingDocs.forEach((doc, docPath) => {
677 | const features = doc.analysis?.features || [];
678 | const dependencies = doc.analysis?.dependencies || [];
679 |
680 | features.forEach((feature: string) => {
681 | allFeatures.push({
682 | name: feature,
683 | source: docPath,
684 | type: "documented-feature",
685 | });
686 | });
687 |
688 | dependencies.forEach((dep: string) => {
689 | allFeatures.push({
690 | name: dep,
691 | source: docPath,
692 | type: "documented-dependency",
693 | });
694 | });
695 | });
696 |
697 | return allFeatures;
698 | }
699 |
700 | private async detectDocumentationGaps(
701 | codeFeatures: any[],
702 | documentedFeatures: any[],
703 | _options: UpdateOptions,
704 | ): Promise<DocumentationGap[]> {
705 | const gaps: DocumentationGap[] = [];
706 | const memoryGapPatterns = this.memoryInsights?.commonGapTypes || {};
707 |
708 | // Find features in code that aren't documented
709 | codeFeatures.forEach((codeFeature) => {
710 | const isDocumented = documentedFeatures.some((docFeature) =>
711 | this.featuresMatch(codeFeature, docFeature),
712 | );
713 |
714 | if (!isDocumented) {
715 | const severity = this.determineGapSeverity(
716 | codeFeature,
717 | memoryGapPatterns,
718 | );
719 | const suggestedUpdate = this.generateGapSuggestion(
720 | codeFeature,
721 | _options,
722 | );
723 |
724 | gaps.push({
725 | type: "missing",
726 | location: `${codeFeature.source} -> documentation`,
727 | description: `${codeFeature.type} '${codeFeature.name}' exists in code but is not documented`,
728 | severity,
729 | suggestedUpdate,
730 | memoryEvidence: this.findMemoryEvidenceForGap(codeFeature),
731 | });
732 | }
733 | });
734 |
735 | // Find documented features that no longer exist in code
736 | documentedFeatures.forEach((docFeature) => {
737 | const existsInCode = codeFeatures.some((codeFeature) =>
738 | this.featuresMatch(codeFeature, docFeature),
739 | );
740 |
741 | if (!existsInCode) {
742 | gaps.push({
743 | type: "outdated",
744 | location: docFeature.source,
745 | description: `Documented feature '${docFeature.name}' no longer exists in code`,
746 | severity: "medium",
747 | suggestedUpdate: `Remove or update documentation for '${docFeature.name}'`,
748 | memoryEvidence: this.findMemoryEvidenceForOutdated(docFeature),
749 | });
750 | }
751 | });
752 |
753 | return gaps;
754 | }
755 |
756 | private featuresMatch(codeFeature: any, docFeature: any): boolean {
757 | // Exact name match
758 | if (codeFeature.name === docFeature.name) return true;
759 |
760 | // Type-specific matching
761 | if (
762 | codeFeature.type === "dependency" &&
763 | docFeature.type === "documented-dependency"
764 | ) {
765 | return codeFeature.name === docFeature.name;
766 | }
767 |
768 | // Partial match for similar names
769 | const codeName = codeFeature.name.toLowerCase();
770 | const docName = docFeature.name.toLowerCase();
771 |
772 | return codeName.includes(docName) || docName.includes(codeName);
773 | }
774 |
775 | private determineGapSeverity(
776 | codeFeature: any,
777 | memoryGapPatterns: Record<string, number>,
778 | ): "low" | "medium" | "high" | "critical" {
779 | // High importance features
780 | if (
781 | codeFeature.type === "script" &&
782 | ["start", "dev", "build", "test"].includes(codeFeature.name)
783 | ) {
784 | return "high";
785 | }
786 |
787 | if (
788 | codeFeature.type === "dependency" &&
789 | this.isCriticalDependency(codeFeature.name)
790 | ) {
791 | return "high";
792 | }
793 |
794 | if (codeFeature.type === "testing" || codeFeature.type === "ci-cd") {
795 | return "medium";
796 | }
797 |
798 | // Check memory patterns for common gaps
799 | const gapFrequency = memoryGapPatterns[codeFeature.type] || 0;
800 | if (gapFrequency > 5) return "medium"; // Common gap type
801 | if (gapFrequency > 2) return "low";
802 |
803 | return "low";
804 | }
805 |
806 | private isCriticalDependency(depName: string): boolean {
807 | const criticalDeps = [
808 | "react",
809 | "vue",
810 | "angular",
811 | "express",
812 | "fastify",
813 | "next",
814 | "nuxt",
815 | "gatsby",
816 | "typescript",
817 | "jest",
818 | "mocha",
819 | "webpack",
820 | "vite",
821 | "rollup",
822 | ];
823 |
824 | return criticalDeps.some((critical) => depName.includes(critical));
825 | }
826 |
827 | private generateGapSuggestion(
828 | codeFeature: any,
829 | _options: UpdateOptions,
830 | ): string {
831 | switch (codeFeature.type) {
832 | case "script":
833 | return `Add documentation for the '${codeFeature.name}' script: \`npm run ${codeFeature.name}\``;
834 | case "dependency":
835 | return `Document the '${codeFeature.name}' dependency and its usage`;
836 | case "testing":
837 | return `Add testing documentation explaining how to run and write tests`;
838 | case "ci-cd":
839 | return `Document the CI/CD pipeline and deployment process`;
840 | case "technology":
841 | return `Add explanation for ${codeFeature.name}: ${codeFeature.value}`;
842 | default:
843 | return `Document the ${codeFeature.type} '${codeFeature.name}'`;
844 | }
845 | }
846 |
847 | private findMemoryEvidenceForGap(codeFeature: any): any[] {
848 | return (
849 | this.memoryInsights?.similarProjects
850 | .filter(
851 | (p: any) =>
852 | p.content?.gaps?.some((gap: any) => gap.type === codeFeature.type),
853 | )
854 | .slice(0, 3) || []
855 | );
856 | }
857 |
858 | private findMemoryEvidenceForOutdated(docFeature: any): any[] {
859 | return (
860 | this.memoryInsights?.similarProjects
861 | .filter(
862 | (p: any) =>
863 | p.content?.outdatedSections?.some(
864 | (section: any) => section.feature === docFeature.name,
865 | ),
866 | )
867 | .slice(0, 3) || []
868 | );
869 | }
870 |
871 | private async detectOutdatedSections(
872 | analysis: any,
873 | existingDocs: Map<string, any>,
874 | ): Promise<any[]> {
875 | const outdatedSections: any[] = [];
876 |
877 | existingDocs.forEach((doc, docPath) => {
878 | const sections = doc.analysis?.sections || [];
879 |
880 | sections.forEach((section: any) => {
881 | const isOutdated = this.checkSectionOutdated(section, analysis);
882 |
883 | if (isOutdated) {
884 | outdatedSections.push({
885 | location: docPath,
886 | section: section.title,
887 | reason: isOutdated.reason,
888 | confidence: isOutdated.confidence,
889 | suggestedUpdate: isOutdated.suggestedUpdate,
890 | });
891 | }
892 | });
893 | });
894 |
895 | return outdatedSections;
896 | }
897 |
898 | private checkSectionOutdated(section: any, analysis: any): any {
899 | const sectionContent = section.content.toLowerCase();
900 |
901 | // Check for outdated Node.js versions
902 | const nodeVersionMatch = sectionContent.match(/node(?:\.js)?\s+(\d+)/);
903 | if (nodeVersionMatch) {
904 | const documentedVersion = parseInt(nodeVersionMatch[1], 10);
905 | const currentRecommended = 18; // Current LTS
906 |
907 | if (documentedVersion < currentRecommended - 2) {
908 | return {
909 | reason: `Documented Node.js version ${documentedVersion} is outdated`,
910 | confidence: 0.9,
911 | suggestedUpdate: `Update to recommend Node.js ${currentRecommended}+`,
912 | };
913 | }
914 | }
915 |
916 | // Check for outdated package names
917 | const packageJson = this.findPackageJsonInAnalysis(analysis);
918 | if (packageJson?.dependencies) {
919 | const currentDeps = Object.keys(packageJson.dependencies);
920 |
921 | // Look for documented packages that are no longer dependencies
922 | for (const dep of currentDeps) {
923 | if (sectionContent.includes(dep)) {
924 | const version = packageJson.dependencies[dep];
925 | if (
926 | sectionContent.includes(dep) &&
927 | !sectionContent.includes(version)
928 | ) {
929 | return {
930 | reason: `Package version information may be outdated for ${dep}`,
931 | confidence: 0.7,
932 | suggestedUpdate: `Update ${dep} version references to ${version}`,
933 | };
934 | }
935 | }
936 | }
937 | }
938 |
939 | return null;
940 | }
941 |
942 | private async detectAccuracyIssues(
943 | analysis: any,
944 | existingDocs: Map<string, any>,
945 | ): Promise<any[]> {
946 | const accuracyIssues: any[] = [];
947 |
948 | existingDocs.forEach((doc, docPath) => {
949 | const codeBlocks = doc.analysis?.codeBlocks || [];
950 |
951 | codeBlocks.forEach((codeBlock: any, index: number) => {
952 | const issues = this.validateCodeBlock(codeBlock, analysis);
953 |
954 | issues.forEach((issue) => {
955 | accuracyIssues.push({
956 | location: `${docPath}:code-block-${index}`,
957 | type: issue.type,
958 | description: issue.description,
959 | severity: issue.severity,
960 | suggestedFix: issue.suggestedFix,
961 | });
962 | });
963 | });
964 | });
965 |
966 | return accuracyIssues;
967 | }
968 |
969 | private validateCodeBlock(codeBlock: any, analysis: any): any[] {
970 | const issues: any[] = [];
971 | const code = codeBlock.code;
972 |
973 | // Check npm install commands against actual dependencies
974 | const npmInstallMatches = code.match(/npm install\s+([^`\n]+)/g);
975 | if (npmInstallMatches) {
976 | const packageJson = this.findPackageJsonInAnalysis(analysis);
977 | const actualDeps = packageJson
978 | ? Object.keys(packageJson.dependencies || {})
979 | : [];
980 |
981 | npmInstallMatches.forEach((match: string) => {
982 | const packages = match.replace("npm install", "").trim().split(/\s+/);
983 | packages.forEach((pkg: string) => {
984 | if (pkg && !pkg.startsWith("-") && !actualDeps.includes(pkg)) {
985 | issues.push({
986 | type: "incorrect-dependency",
987 | description: `npm install command includes '${pkg}' which is not in package.json`,
988 | severity: "medium",
989 | suggestedFix: `Remove '${pkg}' or add it to dependencies`,
990 | });
991 | }
992 | });
993 | });
994 | }
995 |
996 | // Check for outdated import syntax
997 | if (
998 | code.includes("require(") &&
999 | analysis.metadata?.primaryLanguage === "TypeScript"
1000 | ) {
1001 | issues.push({
1002 | type: "outdated-syntax",
1003 | description: "Using require() syntax in TypeScript project",
1004 | severity: "low",
1005 | suggestedFix: "Update to ES6 import syntax",
1006 | });
1007 | }
1008 |
1009 | return issues;
1010 | }
1011 |
1012 | private async generateUpdateRecommendations(
1013 | comparison: CodeDocumentationComparison,
1014 | _options: UpdateOptions,
1015 | ): Promise<UpdateRecommendation[]> {
1016 | const recommendations: UpdateRecommendation[] = [];
1017 |
1018 | // Generate recommendations for gaps
1019 | for (const gap of comparison.gaps) {
1020 | if (
1021 | gap.severity === "critical" ||
1022 | gap.severity === "high" ||
1023 | (gap.severity === "medium" &&
1024 | _options.updateStrategy !== "conservative")
1025 | ) {
1026 | const recommendation = await this.generateGapRecommendation(
1027 | gap,
1028 | _options,
1029 | );
1030 | recommendations.push(recommendation);
1031 | }
1032 | }
1033 |
1034 | // Generate recommendations for outdated sections
1035 | for (const outdated of comparison.outdatedSections) {
1036 | const recommendation = await this.generateOutdatedRecommendation(
1037 | outdated,
1038 | _options,
1039 | );
1040 | recommendations.push(recommendation);
1041 | }
1042 |
1043 | // Generate recommendations for accuracy issues
1044 | for (const issue of comparison.accuracyIssues) {
1045 | if (
1046 | issue.severity !== "low" ||
1047 | _options.updateStrategy === "aggressive"
1048 | ) {
1049 | const recommendation = await this.generateAccuracyRecommendation(
1050 | issue,
1051 | _options,
1052 | );
1053 | recommendations.push(recommendation);
1054 | }
1055 | }
1056 |
1057 | return recommendations.sort((a, b) => b.confidence - a.confidence);
1058 | }
1059 |
1060 | private async generateGapRecommendation(
1061 | gap: DocumentationGap,
1062 | _options: UpdateOptions,
1063 | ): Promise<UpdateRecommendation> {
1064 | const memoryEvidence = gap.memoryEvidence || [];
1065 | const successfulPatterns =
1066 | this.memoryInsights?.successfulUpdatePatterns || [];
1067 |
1068 | return {
1069 | section: gap.location,
1070 | currentContent: "", // No current content for missing items
1071 | suggestedContent: this.generateContentForGap(gap, successfulPatterns),
1072 | reasoning: `${gap.description}. ${memoryEvidence.length} similar projects had similar gaps.`,
1073 | memoryEvidence,
1074 | confidence: this.calculateGapConfidence(gap, memoryEvidence),
1075 | effort: this.estimateGapEffort(gap),
1076 | };
1077 | }
1078 |
1079 | private generateContentForGap(
1080 | gap: DocumentationGap,
1081 | patterns: any[],
1082 | ): string {
1083 | // Use memory patterns to generate appropriate content
1084 | const relevantPatterns = patterns.filter((p) => p.gapType === gap.type);
1085 |
1086 | if (relevantPatterns.length > 0) {
1087 | const bestPattern = relevantPatterns[0];
1088 | return this.adaptPatternToGap(bestPattern, gap);
1089 | }
1090 |
1091 | return gap.suggestedUpdate;
1092 | }
1093 |
1094 | private adaptPatternToGap(pattern: any, gap: DocumentationGap): string {
1095 | let content = pattern.template || pattern.content || gap.suggestedUpdate;
1096 |
1097 | // Replace placeholders with actual gap information
1098 | content = content.replace(/\{feature\}/g, gap.description);
1099 | content = content.replace(/\{location\}/g, gap.location);
1100 |
1101 | return content;
1102 | }
1103 |
1104 | private calculateGapConfidence(
1105 | gap: DocumentationGap,
1106 | evidence: any[],
1107 | ): number {
1108 | let confidence = 0.5; // Base confidence
1109 |
1110 | // Increase confidence based on severity
1111 | switch (gap.severity) {
1112 | case "critical":
1113 | confidence += 0.4;
1114 | break;
1115 | case "high":
1116 | confidence += 0.3;
1117 | break;
1118 | case "medium":
1119 | confidence += 0.2;
1120 | break;
1121 | case "low":
1122 | confidence += 0.1;
1123 | break;
1124 | }
1125 |
1126 | // Increase confidence based on memory evidence
1127 | confidence += Math.min(evidence.length * 0.1, 0.3);
1128 |
1129 | return Math.min(confidence, 1.0);
1130 | }
1131 |
1132 | private estimateGapEffort(gap: DocumentationGap): "low" | "medium" | "high" {
1133 | switch (gap.type) {
1134 | case "missing":
1135 | return gap.severity === "critical" ? "high" : "medium";
1136 | case "outdated":
1137 | return "low";
1138 | case "incorrect":
1139 | return "medium";
1140 | case "incomplete":
1141 | return "low";
1142 | default:
1143 | return "medium";
1144 | }
1145 | }
1146 |
1147 | private async generateOutdatedRecommendation(
1148 | outdated: any,
1149 | _options: UpdateOptions,
1150 | ): Promise<UpdateRecommendation> {
1151 | return {
1152 | section: outdated.location,
1153 | currentContent: outdated.section,
1154 | suggestedContent: outdated.suggestedUpdate,
1155 | reasoning: outdated.reason,
1156 | memoryEvidence: [],
1157 | confidence: outdated.confidence || 0.8,
1158 | effort: "low",
1159 | };
1160 | }
1161 |
1162 | private async generateAccuracyRecommendation(
1163 | issue: any,
1164 | _options: UpdateOptions,
1165 | ): Promise<UpdateRecommendation> {
1166 | return {
1167 | section: issue.location,
1168 | currentContent: "Code block with accuracy issues",
1169 | suggestedContent: issue.suggestedFix,
1170 | reasoning: issue.description,
1171 | memoryEvidence: [],
1172 | confidence: issue.severity === "high" ? 0.9 : 0.7,
1173 | effort: issue.severity === "high" ? "medium" : "low",
1174 | };
1175 | }
1176 |
1177 | private calculateUpdateMetrics(
1178 | comparison: CodeDocumentationComparison,
1179 | recommendations: UpdateRecommendation[],
1180 | ): any {
1181 | const totalGaps = comparison.gaps.length;
1182 | const totalRecommendations = recommendations.length;
1183 | const avgConfidence =
1184 | recommendations.reduce((sum, r) => sum + r.confidence, 0) /
1185 | recommendations.length || 0;
1186 |
1187 | const effortCounts = recommendations.reduce(
1188 | (acc, r) => {
1189 | acc[r.effort] = (acc[r.effort] || 0) + 1;
1190 | return acc;
1191 | },
1192 | {} as Record<string, number>,
1193 | );
1194 |
1195 | let estimatedEffort = "low";
1196 | if (effortCounts.high > 0) estimatedEffort = "high";
1197 | else if (effortCounts.medium > effortCounts.low) estimatedEffort = "medium";
1198 |
1199 | return {
1200 | gapsDetected: totalGaps,
1201 | recommendationsGenerated: totalRecommendations,
1202 | confidenceScore: Math.round(avgConfidence * 100) / 100,
1203 | estimatedEffort,
1204 | };
1205 | }
1206 |
1207 | private generateMemoryInformedNextSteps(
1208 | comparison: CodeDocumentationComparison,
1209 | recommendations: UpdateRecommendation[],
1210 | ): string[] {
1211 | const nextSteps = [];
1212 | const highConfidenceRecs = recommendations.filter(
1213 | (r) => r.confidence > 0.8,
1214 | );
1215 | const criticalGaps = comparison.gaps.filter(
1216 | (g) => g.severity === "critical",
1217 | );
1218 |
1219 | if (criticalGaps.length > 0) {
1220 | nextSteps.push(
1221 | `Address ${criticalGaps.length} critical documentation gaps immediately`,
1222 | );
1223 | }
1224 |
1225 | if (highConfidenceRecs.length > 0) {
1226 | nextSteps.push(
1227 | `Implement ${highConfidenceRecs.length} high-confidence recommendations first`,
1228 | );
1229 | }
1230 |
1231 | if (comparison.accuracyIssues.length > 0) {
1232 | nextSteps.push(
1233 | `Fix ${comparison.accuracyIssues.length} code accuracy issues in documentation`,
1234 | );
1235 | }
1236 |
1237 | nextSteps.push(
1238 | "Review and validate all recommended changes before implementation",
1239 | );
1240 | nextSteps.push("Test updated code examples to ensure they work correctly");
1241 |
1242 | const memoryInsights = this.memoryInsights?.similarProjects?.length || 0;
1243 | if (memoryInsights > 0) {
1244 | nextSteps.push(
1245 | `Leverage patterns from ${memoryInsights} similar projects for additional improvements`,
1246 | );
1247 | }
1248 |
1249 | return nextSteps;
1250 | }
1251 | }
1252 |
1253 | // Export the tool implementation
1254 | export const updateExistingDocumentation: Tool = {
1255 | name: "update_existing_documentation",
1256 | description:
1257 | "Intelligently analyze and update existing documentation using memory insights and code comparison",
1258 | inputSchema: {
1259 | type: "object",
1260 | properties: {
1261 | analysisId: {
1262 | type: "string",
1263 | description: "Repository analysis ID from analyze_repository tool",
1264 | },
1265 | docsPath: {
1266 | type: "string",
1267 | description: "Path to existing documentation directory",
1268 | },
1269 | compareMode: {
1270 | type: "string",
1271 | enum: ["comprehensive", "gap-detection", "accuracy-check"],
1272 | default: "comprehensive",
1273 | description: "Mode of comparison between code and documentation",
1274 | },
1275 | updateStrategy: {
1276 | type: "string",
1277 | enum: ["conservative", "moderate", "aggressive"],
1278 | default: "moderate",
1279 | description: "How aggressively to suggest updates",
1280 | },
1281 | preserveStyle: {
1282 | type: "boolean",
1283 | default: true,
1284 | description: "Preserve existing documentation style and formatting",
1285 | },
1286 | focusAreas: {
1287 | type: "array",
1288 | items: { type: "string" },
1289 | description:
1290 | 'Specific areas to focus updates on (e.g., "dependencies", "scripts", "api")',
1291 | },
1292 | },
1293 | required: ["analysisId", "docsPath"],
1294 | },
1295 | };
1296 |
1297 | export async function handleUpdateExistingDocumentation(
1298 | args: any,
1299 | ): Promise<UpdateResult> {
1300 | const engine = new DocumentationUpdateEngine();
1301 | return await engine.updateExistingDocumentation(args);
1302 | }
1303 |
```