This is page 24 of 33. Use http://codebase.md/tosin2013/documcp?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .dockerignore
├── .eslintignore
├── .eslintrc.json
├── .github
│ ├── agents
│ │ ├── documcp-ast.md
│ │ ├── documcp-deploy.md
│ │ ├── documcp-memory.md
│ │ ├── documcp-test.md
│ │ └── documcp-tool.md
│ ├── copilot-instructions.md
│ ├── dependabot.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── automated-changelog.md
│ │ ├── bug_report.md
│ │ ├── bug_report.yml
│ │ ├── documentation_issue.md
│ │ ├── feature_request.md
│ │ ├── feature_request.yml
│ │ ├── npm-publishing-fix.md
│ │ └── release_improvements.md
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-drafter.yml
│ └── workflows
│ ├── auto-merge.yml
│ ├── ci.yml
│ ├── codeql.yml
│ ├── dependency-review.yml
│ ├── deploy-docs.yml
│ ├── README.md
│ ├── release-drafter.yml
│ └── release.yml
├── .gitignore
├── .husky
│ ├── commit-msg
│ └── pre-commit
├── .linkcheck.config.json
├── .markdown-link-check.json
├── .nvmrc
├── .pre-commit-config.yaml
├── .versionrc.json
├── ARCHITECTURAL_CHANGES_SUMMARY.md
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── docker-compose.docs.yml
├── Dockerfile.docs
├── docs
│ ├── .docusaurus
│ │ ├── docusaurus-plugin-content-docs
│ │ │ └── default
│ │ │ └── __mdx-loader-dependency.json
│ │ └── docusaurus-plugin-content-pages
│ │ └── default
│ │ └── __plugin.json
│ ├── adrs
│ │ ├── adr-0001-mcp-server-architecture.md
│ │ ├── adr-0002-repository-analysis-engine.md
│ │ ├── adr-0003-static-site-generator-recommendation-engine.md
│ │ ├── adr-0004-diataxis-framework-integration.md
│ │ ├── adr-0005-github-pages-deployment-automation.md
│ │ ├── adr-0006-mcp-tools-api-design.md
│ │ ├── adr-0007-mcp-prompts-and-resources-integration.md
│ │ ├── adr-0008-intelligent-content-population-engine.md
│ │ ├── adr-0009-content-accuracy-validation-framework.md
│ │ ├── adr-0010-mcp-resource-pattern-redesign.md
│ │ ├── adr-0011-ce-mcp-compatibility.md
│ │ ├── adr-0012-priority-scoring-system-for-documentation-drift.md
│ │ ├── adr-0013-release-pipeline-and-package-distribution.md
│ │ └── README.md
│ ├── api
│ │ ├── .nojekyll
│ │ ├── assets
│ │ │ ├── hierarchy.js
│ │ │ ├── highlight.css
│ │ │ ├── icons.js
│ │ │ ├── icons.svg
│ │ │ ├── main.js
│ │ │ ├── navigation.js
│ │ │ ├── search.js
│ │ │ └── style.css
│ │ ├── hierarchy.html
│ │ ├── index.html
│ │ ├── modules.html
│ │ └── variables
│ │ └── TOOLS.html
│ ├── assets
│ │ └── logo.svg
│ ├── CE-MCP-FINDINGS.md
│ ├── development
│ │ └── MCP_INSPECTOR_TESTING.md
│ ├── docusaurus.config.js
│ ├── explanation
│ │ ├── architecture.md
│ │ └── index.md
│ ├── guides
│ │ ├── link-validation.md
│ │ ├── playwright-integration.md
│ │ └── playwright-testing-workflow.md
│ ├── how-to
│ │ ├── analytics-setup.md
│ │ ├── change-watcher.md
│ │ ├── custom-domains.md
│ │ ├── documentation-freshness-tracking.md
│ │ ├── drift-priority-scoring.md
│ │ ├── github-pages-deployment.md
│ │ ├── index.md
│ │ ├── llm-integration.md
│ │ ├── local-testing.md
│ │ ├── performance-optimization.md
│ │ ├── prompting-guide.md
│ │ ├── repository-analysis.md
│ │ ├── seo-optimization.md
│ │ ├── site-monitoring.md
│ │ ├── troubleshooting.md
│ │ └── usage-examples.md
│ ├── index.md
│ ├── knowledge-graph.md
│ ├── package-lock.json
│ ├── package.json
│ ├── phase-2-intelligence.md
│ ├── reference
│ │ ├── api-overview.md
│ │ ├── cli.md
│ │ ├── configuration.md
│ │ ├── deploy-pages.md
│ │ ├── index.md
│ │ ├── mcp-tools.md
│ │ └── prompt-templates.md
│ ├── research
│ │ ├── cross-domain-integration
│ │ │ └── README.md
│ │ ├── domain-1-mcp-architecture
│ │ │ ├── index.md
│ │ │ └── mcp-performance-research.md
│ │ ├── domain-2-repository-analysis
│ │ │ └── README.md
│ │ ├── domain-3-ssg-recommendation
│ │ │ ├── index.md
│ │ │ └── ssg-performance-analysis.md
│ │ ├── domain-4-diataxis-integration
│ │ │ └── README.md
│ │ ├── domain-5-github-deployment
│ │ │ ├── github-pages-security-analysis.md
│ │ │ └── index.md
│ │ ├── domain-6-api-design
│ │ │ └── README.md
│ │ ├── README.md
│ │ ├── research-integration-summary-2025-01-14.md
│ │ ├── research-progress-template.md
│ │ └── research-questions-2025-01-14.md
│ ├── robots.txt
│ ├── sidebars.js
│ ├── sitemap.xml
│ ├── src
│ │ └── css
│ │ └── custom.css
│ └── tutorials
│ ├── development-setup.md
│ ├── environment-setup.md
│ ├── first-deployment.md
│ ├── getting-started.md
│ ├── index.md
│ ├── memory-workflows.md
│ └── user-onboarding.md
├── ISSUE_IMPLEMENTATION_SUMMARY.md
├── jest.config.js
├── LICENSE
├── Makefile
├── MCP_PHASE2_IMPLEMENTATION.md
├── mcp-config-example.json
├── mcp.json
├── package-lock.json
├── package.json
├── README.md
├── release.sh
├── scripts
│ └── check-package-structure.cjs
├── SECURITY.md
├── setup-precommit.sh
├── src
│ ├── benchmarks
│ │ └── performance.ts
│ ├── index.ts
│ ├── memory
│ │ ├── contextual-retrieval.ts
│ │ ├── deployment-analytics.ts
│ │ ├── enhanced-manager.ts
│ │ ├── export-import.ts
│ │ ├── freshness-kg-integration.ts
│ │ ├── index.ts
│ │ ├── integration.ts
│ │ ├── kg-code-integration.ts
│ │ ├── kg-health.ts
│ │ ├── kg-integration.ts
│ │ ├── kg-link-validator.ts
│ │ ├── kg-storage.ts
│ │ ├── knowledge-graph.ts
│ │ ├── learning.ts
│ │ ├── manager.ts
│ │ ├── multi-agent-sharing.ts
│ │ ├── pruning.ts
│ │ ├── schemas.ts
│ │ ├── storage.ts
│ │ ├── temporal-analysis.ts
│ │ ├── user-preferences.ts
│ │ └── visualization.ts
│ ├── prompts
│ │ └── technical-writer-prompts.ts
│ ├── scripts
│ │ └── benchmark.ts
│ ├── templates
│ │ └── playwright
│ │ ├── accessibility.spec.template.ts
│ │ ├── Dockerfile.template
│ │ ├── docs-e2e.workflow.template.yml
│ │ ├── link-validation.spec.template.ts
│ │ └── playwright.config.template.ts
│ ├── tools
│ │ ├── analyze-deployments.ts
│ │ ├── analyze-readme.ts
│ │ ├── analyze-repository.ts
│ │ ├── change-watcher.ts
│ │ ├── check-documentation-links.ts
│ │ ├── cleanup-agent-artifacts.ts
│ │ ├── deploy-pages.ts
│ │ ├── detect-gaps.ts
│ │ ├── evaluate-readme-health.ts
│ │ ├── generate-config.ts
│ │ ├── generate-contextual-content.ts
│ │ ├── generate-llm-context.ts
│ │ ├── generate-readme-template.ts
│ │ ├── generate-technical-writer-prompts.ts
│ │ ├── kg-health-check.ts
│ │ ├── manage-preferences.ts
│ │ ├── manage-sitemap.ts
│ │ ├── optimize-readme.ts
│ │ ├── populate-content.ts
│ │ ├── readme-best-practices.ts
│ │ ├── recommend-ssg.ts
│ │ ├── setup-playwright-tests.ts
│ │ ├── setup-structure.ts
│ │ ├── simulate-execution.ts
│ │ ├── sync-code-to-docs.ts
│ │ ├── test-local-deployment.ts
│ │ ├── track-documentation-freshness.ts
│ │ ├── update-existing-documentation.ts
│ │ ├── validate-content.ts
│ │ ├── validate-documentation-freshness.ts
│ │ ├── validate-readme-checklist.ts
│ │ └── verify-deployment.ts
│ ├── types
│ │ └── api.ts
│ ├── utils
│ │ ├── artifact-detector.ts
│ │ ├── ast-analyzer.ts
│ │ ├── change-watcher.ts
│ │ ├── code-scanner.ts
│ │ ├── content-extractor.ts
│ │ ├── drift-detector.ts
│ │ ├── execution-simulator.ts
│ │ ├── freshness-tracker.ts
│ │ ├── language-parsers-simple.ts
│ │ ├── llm-client.ts
│ │ ├── permission-checker.ts
│ │ ├── semantic-analyzer.ts
│ │ ├── sitemap-generator.ts
│ │ ├── usage-metadata.ts
│ │ └── user-feedback-integration.ts
│ └── workflows
│ └── documentation-workflow.ts
├── test-docs-local.sh
├── tests
│ ├── api
│ │ └── mcp-responses.test.ts
│ ├── benchmarks
│ │ └── performance.test.ts
│ ├── call-graph-builder.test.ts
│ ├── change-watcher-priority.integration.test.ts
│ ├── change-watcher.test.ts
│ ├── edge-cases
│ │ └── error-handling.test.ts
│ ├── execution-simulator.test.ts
│ ├── functional
│ │ └── tools.test.ts
│ ├── integration
│ │ ├── kg-documentation-workflow.test.ts
│ │ ├── knowledge-graph-workflow.test.ts
│ │ ├── mcp-readme-tools.test.ts
│ │ ├── memory-mcp-tools.test.ts
│ │ ├── readme-technical-writer.test.ts
│ │ └── workflow.test.ts
│ ├── memory
│ │ ├── contextual-retrieval.test.ts
│ │ ├── enhanced-manager.test.ts
│ │ ├── export-import.test.ts
│ │ ├── freshness-kg-integration.test.ts
│ │ ├── kg-code-integration.test.ts
│ │ ├── kg-health.test.ts
│ │ ├── kg-link-validator.test.ts
│ │ ├── kg-storage-validation.test.ts
│ │ ├── kg-storage.test.ts
│ │ ├── knowledge-graph-documentation-examples.test.ts
│ │ ├── knowledge-graph-enhanced.test.ts
│ │ ├── knowledge-graph.test.ts
│ │ ├── learning.test.ts
│ │ ├── manager-advanced.test.ts
│ │ ├── manager.test.ts
│ │ ├── mcp-resource-integration.test.ts
│ │ ├── mcp-tool-persistence.test.ts
│ │ ├── schemas-documentation-examples.test.ts
│ │ ├── schemas.test.ts
│ │ ├── storage.test.ts
│ │ ├── temporal-analysis.test.ts
│ │ └── user-preferences.test.ts
│ ├── performance
│ │ ├── memory-load-testing.test.ts
│ │ └── memory-stress-testing.test.ts
│ ├── prompts
│ │ ├── guided-workflow-prompts.test.ts
│ │ └── technical-writer-prompts.test.ts
│ ├── server.test.ts
│ ├── setup.ts
│ ├── tools
│ │ ├── all-tools.test.ts
│ │ ├── analyze-coverage.test.ts
│ │ ├── analyze-deployments.test.ts
│ │ ├── analyze-readme.test.ts
│ │ ├── analyze-repository.test.ts
│ │ ├── check-documentation-links.test.ts
│ │ ├── cleanup-agent-artifacts.test.ts
│ │ ├── deploy-pages-kg-retrieval.test.ts
│ │ ├── deploy-pages-tracking.test.ts
│ │ ├── deploy-pages.test.ts
│ │ ├── detect-gaps.test.ts
│ │ ├── evaluate-readme-health.test.ts
│ │ ├── generate-contextual-content.test.ts
│ │ ├── generate-llm-context.test.ts
│ │ ├── generate-readme-template.test.ts
│ │ ├── generate-technical-writer-prompts.test.ts
│ │ ├── kg-health-check.test.ts
│ │ ├── manage-sitemap.test.ts
│ │ ├── optimize-readme.test.ts
│ │ ├── readme-best-practices.test.ts
│ │ ├── recommend-ssg-historical.test.ts
│ │ ├── recommend-ssg-preferences.test.ts
│ │ ├── recommend-ssg.test.ts
│ │ ├── simple-coverage.test.ts
│ │ ├── sync-code-to-docs.test.ts
│ │ ├── test-local-deployment.test.ts
│ │ ├── tool-error-handling.test.ts
│ │ ├── track-documentation-freshness.test.ts
│ │ ├── validate-content.test.ts
│ │ ├── validate-documentation-freshness.test.ts
│ │ └── validate-readme-checklist.test.ts
│ ├── types
│ │ └── type-safety.test.ts
│ └── utils
│ ├── artifact-detector.test.ts
│ ├── ast-analyzer.test.ts
│ ├── content-extractor.test.ts
│ ├── drift-detector-diataxis.test.ts
│ ├── drift-detector-priority.test.ts
│ ├── drift-detector.test.ts
│ ├── freshness-tracker.test.ts
│ ├── llm-client.test.ts
│ ├── semantic-analyzer.test.ts
│ ├── sitemap-generator.test.ts
│ ├── usage-metadata.test.ts
│ └── user-feedback-integration.test.ts
├── tsconfig.json
└── typedoc.json
```
# Files
--------------------------------------------------------------------------------
/tests/utils/drift-detector.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Drift Detector Tests (Phase 3)
3 | */
4 |
5 | import {
6 | DriftDetector,
7 | DriftDetectionResult,
8 | } from "../../src/utils/drift-detector.js";
9 | import { promises as fs } from "fs";
10 | import { tmpdir } from "os";
11 | import { join } from "path";
12 | import { mkdtemp, rm } from "fs/promises";
13 |
14 | describe("DriftDetector", () => {
15 | let detector: DriftDetector;
16 | let tempDir: string;
17 | let projectPath: string;
18 | let docsPath: string;
19 |
20 | beforeAll(async () => {
21 | tempDir = await mkdtemp(join(tmpdir(), "drift-test-"));
22 | projectPath = join(tempDir, "project");
23 | docsPath = join(tempDir, "docs");
24 |
25 | await fs.mkdir(projectPath, { recursive: true });
26 | await fs.mkdir(join(projectPath, "src"), { recursive: true });
27 | await fs.mkdir(docsPath, { recursive: true });
28 |
29 | detector = new DriftDetector(tempDir);
30 | await detector.initialize();
31 | });
32 |
33 | afterAll(async () => {
34 | await rm(tempDir, { recursive: true, force: true });
35 | });
36 |
37 | describe("Snapshot Creation", () => {
38 | test("should create snapshot of codebase and documentation", async () => {
39 | // Create sample source file
40 | const sourceCode = `
41 | export function calculateSum(a: number, b: number): number {
42 | return a + b;
43 | }
44 | `.trim();
45 |
46 | await fs.writeFile(join(projectPath, "src", "math.ts"), sourceCode);
47 |
48 | // Create sample documentation
49 | const docContent = `
50 | # Math Module
51 |
52 | ## calculateSum
53 |
54 | Adds two numbers together.
55 |
56 | \`\`\`typescript
57 | calculateSum(a: number, b: number): number
58 | \`\`\`
59 | `.trim();
60 |
61 | await fs.writeFile(join(docsPath, "math.md"), docContent);
62 |
63 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
64 |
65 | expect(snapshot).toBeDefined();
66 | expect(snapshot.projectPath).toBe(projectPath);
67 | expect(snapshot.timestamp).toBeTruthy();
68 | expect(snapshot.files.size).toBeGreaterThan(0);
69 | expect(snapshot.documentation.size).toBeGreaterThan(0);
70 | });
71 |
72 | test("should store snapshot to disk", async () => {
73 | const sourceCode = `export function test(): void {}`;
74 | await fs.writeFile(join(projectPath, "src", "test.ts"), sourceCode);
75 |
76 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
77 |
78 | // Check that snapshot directory was created
79 | const snapshotDir = join(tempDir, ".documcp", "snapshots");
80 | const files = await fs.readdir(snapshotDir);
81 |
82 | expect(files.length).toBeGreaterThan(0);
83 | expect(files.some((f) => f.startsWith("snapshot-"))).toBe(true);
84 | });
85 |
86 | test("should load latest snapshot", async () => {
87 | const sourceCode = `export function loadTest(): void {}`;
88 | await fs.writeFile(join(projectPath, "src", "load-test.ts"), sourceCode);
89 |
90 | await detector.createSnapshot(projectPath, docsPath);
91 |
92 | const loaded = await detector.loadLatestSnapshot();
93 |
94 | expect(loaded).toBeDefined();
95 | expect(loaded?.projectPath).toBe(projectPath);
96 | });
97 | });
98 |
99 | describe("Drift Detection", () => {
100 | test("should detect when function signature changes", async () => {
101 | // Create initial version
102 | const oldCode = `
103 | export function processData(data: string): void {
104 | console.log(data);
105 | }
106 | `.trim();
107 |
108 | await fs.writeFile(join(projectPath, "src", "processor.ts"), oldCode);
109 |
110 | const oldDoc = `
111 | # Processor
112 |
113 | ## processData(data: string): void
114 |
115 | Processes string data.
116 | `.trim();
117 |
118 | await fs.writeFile(join(docsPath, "processor.md"), oldDoc);
119 |
120 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
121 |
122 | // Modify function signature
123 | const newCode = `
124 | export function processData(data: string, options: object): Promise<string> {
125 | console.log(data, options);
126 | return Promise.resolve("done");
127 | }
128 | `.trim();
129 |
130 | await fs.writeFile(join(projectPath, "src", "processor.ts"), newCode);
131 |
132 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
133 |
134 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
135 |
136 | expect(drifts.length).toBeGreaterThan(0);
137 |
138 | const processorDrift = drifts.find((d) =>
139 | d.filePath.includes("processor.ts"),
140 | );
141 |
142 | expect(processorDrift).toBeDefined();
143 | expect(processorDrift?.hasDrift).toBe(true);
144 | expect(processorDrift?.drifts.length).toBeGreaterThan(0);
145 | });
146 |
147 | test("should detect when functions are removed", async () => {
148 | // Initial code with two functions
149 | const oldCode = `
150 | export function keepMe(): void {}
151 | export function removeMe(): void {}
152 | `.trim();
153 |
154 | await fs.writeFile(join(projectPath, "src", "removal.ts"), oldCode);
155 |
156 | const oldDoc = `
157 | # Functions
158 |
159 | ## keepMe
160 | ## removeMe
161 | `.trim();
162 |
163 | await fs.writeFile(join(docsPath, "removal.md"), oldDoc);
164 |
165 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
166 |
167 | // Remove one function
168 | const newCode = `
169 | export function keepMe(): void {}
170 | `.trim();
171 |
172 | await fs.writeFile(join(projectPath, "src", "removal.ts"), newCode);
173 |
174 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
175 |
176 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
177 |
178 | const removalDrift = drifts.find((d) =>
179 | d.filePath.includes("removal.ts"),
180 | );
181 |
182 | expect(removalDrift).toBeDefined();
183 | expect(
184 | removalDrift?.drifts.some((drift) => drift.type === "breaking"),
185 | ).toBe(true);
186 | });
187 |
188 | test("should detect when new functions are added", async () => {
189 | const oldCode = `
190 | export function existing(): void {}
191 | `.trim();
192 |
193 | await fs.writeFile(join(projectPath, "src", "addition.ts"), oldCode);
194 |
195 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
196 |
197 | const newCode = `
198 | export function existing(): void {}
199 | export function newFunction(): void {}
200 | `.trim();
201 |
202 | await fs.writeFile(join(projectPath, "src", "addition.ts"), newCode);
203 |
204 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
205 |
206 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
207 |
208 | const additionDrift = drifts.find((d) =>
209 | d.filePath.includes("addition.ts"),
210 | );
211 |
212 | expect(additionDrift).toBeDefined();
213 | expect(
214 | additionDrift?.drifts.some((drift) => drift.type === "missing"),
215 | ).toBe(true);
216 | });
217 |
218 | test("should classify drift severity correctly", async () => {
219 | // Breaking change
220 | const oldCode = `
221 | export function criticalFunction(param: string): void {}
222 | `.trim();
223 |
224 | await fs.writeFile(join(projectPath, "src", "severity.ts"), oldCode);
225 |
226 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
227 |
228 | // Remove exported function - breaking change
229 | const newCode = `
230 | function criticalFunction(param: string): void {}
231 | `.trim();
232 |
233 | await fs.writeFile(join(projectPath, "src", "severity.ts"), newCode);
234 |
235 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
236 |
237 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
238 |
239 | const severityDrift = drifts.find((d) =>
240 | d.filePath.includes("severity.ts"),
241 | );
242 |
243 | expect(severityDrift).toBeDefined();
244 | expect(severityDrift?.severity).toBe("critical"); // Removing export is breaking
245 | });
246 | });
247 |
248 | describe("Suggestion Generation", () => {
249 | test("should generate suggestions for outdated documentation", async () => {
250 | const oldCode = `
251 | export function calculate(x: number): number {
252 | return x * 2;
253 | }
254 | `.trim();
255 |
256 | await fs.writeFile(join(projectPath, "src", "calc.ts"), oldCode);
257 |
258 | const oldDoc = `
259 | # Calculator
260 |
261 | ## calculate(x: number): number
262 |
263 | Doubles the input.
264 | `.trim();
265 |
266 | await fs.writeFile(join(docsPath, "calc.md"), oldDoc);
267 |
268 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
269 |
270 | // Change function signature
271 | const newCode = `
272 | export function calculate(x: number, y: number): number {
273 | return x * y;
274 | }
275 | `.trim();
276 |
277 | await fs.writeFile(join(projectPath, "src", "calc.ts"), newCode);
278 |
279 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
280 |
281 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
282 |
283 | const calcDrift = drifts.find((d) => d.filePath.includes("calc.ts"));
284 |
285 | expect(calcDrift).toBeDefined();
286 | expect(calcDrift?.suggestions.length).toBeGreaterThan(0);
287 |
288 | const suggestion = calcDrift?.suggestions[0];
289 | expect(suggestion).toBeDefined();
290 | expect(suggestion?.suggestedContent).toBeTruthy();
291 | expect(suggestion?.confidence).toBeGreaterThan(0);
292 | });
293 |
294 | test("should provide auto-applicable flag for safe changes", async () => {
295 | const oldCode = `
296 | export function simpleChange(a: number): number {
297 | return a;
298 | }
299 | `.trim();
300 |
301 | await fs.writeFile(join(projectPath, "src", "simple.ts"), oldCode);
302 |
303 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
304 |
305 | // Minor change
306 | const newCode = `
307 | export function simpleChange(a: number): number {
308 | return a * 2;
309 | }
310 | `.trim();
311 |
312 | await fs.writeFile(join(projectPath, "src", "simple.ts"), newCode);
313 |
314 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
315 |
316 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
317 |
318 | // Minor internal changes shouldn't require doc updates if signature is same
319 | const simpleDrift = drifts.find((d) => d.filePath.includes("simple.ts"));
320 |
321 | if (simpleDrift && simpleDrift.suggestions.length > 0) {
322 | const suggestion = simpleDrift.suggestions[0];
323 | expect(typeof suggestion.autoApplicable).toBe("boolean");
324 | }
325 | });
326 | });
327 |
328 | describe("Impact Analysis", () => {
329 | test("should analyze impact of changes", async () => {
330 | const oldCode = `
331 | export function breaking(): void {}
332 | export function major(): void {}
333 | `.trim();
334 |
335 | await fs.writeFile(join(projectPath, "src", "impact.ts"), oldCode);
336 |
337 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
338 |
339 | // Breaking change - remove function
340 | const newCode = `
341 | export function major(): void {}
342 | `.trim();
343 |
344 | await fs.writeFile(join(projectPath, "src", "impact.ts"), newCode);
345 |
346 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
347 |
348 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
349 |
350 | const impactDrift = drifts.find((d) => d.filePath.includes("impact.ts"));
351 |
352 | expect(impactDrift?.impactAnalysis).toBeDefined();
353 | expect(impactDrift?.impactAnalysis.breakingChanges).toBeGreaterThan(0);
354 | expect(impactDrift?.impactAnalysis.estimatedUpdateEffort).toBeDefined();
355 | });
356 |
357 | test("should identify affected documentation files", async () => {
358 | const code = `
359 | export function documented(): void {}
360 | `.trim();
361 |
362 | await fs.writeFile(join(projectPath, "src", "documented.ts"), code);
363 |
364 | const doc = `
365 | # Documentation
366 |
367 | \`documented()\` is a function.
368 | `.trim();
369 |
370 | await fs.writeFile(join(docsPath, "documented.md"), doc);
371 |
372 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
373 |
374 | // Change the function
375 | const newCode = `
376 | export function documented(param: string): void {}
377 | `.trim();
378 |
379 | await fs.writeFile(join(projectPath, "src", "documented.ts"), newCode);
380 |
381 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
382 |
383 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
384 |
385 | const docDrift = drifts.find((d) => d.filePath.includes("documented.ts"));
386 |
387 | expect(docDrift?.impactAnalysis.affectedDocFiles.length).toBeGreaterThan(
388 | 0,
389 | );
390 | });
391 | });
392 |
393 | describe("Edge Cases", () => {
394 | test("should handle no drift scenario", async () => {
395 | const code = `
396 | export function unchangedFunction(): void {}
397 | `.trim();
398 |
399 | await fs.writeFile(join(projectPath, "src", "unchanged.ts"), code);
400 |
401 | const snapshot1 = await detector.createSnapshot(projectPath, docsPath);
402 | const snapshot2 = await detector.createSnapshot(projectPath, docsPath);
403 |
404 | const drifts = await detector.detectDrift(snapshot1, snapshot2);
405 |
406 | // No changes should mean no drifts
407 | const unchangedDrift = drifts.find((d) =>
408 | d.filePath.includes("unchanged.ts"),
409 | );
410 |
411 | if (unchangedDrift) {
412 | expect(unchangedDrift.hasDrift).toBe(false);
413 | }
414 | });
415 |
416 | test("should handle missing documentation gracefully", async () => {
417 | const code = `
418 | export function undocumentedFunction(): void {}
419 | `.trim();
420 |
421 | await fs.writeFile(join(projectPath, "src", "undocumented.ts"), code);
422 |
423 | // Don't create documentation
424 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
425 |
426 | expect(snapshot).toBeDefined();
427 | expect(snapshot.documentation.size).toBeGreaterThanOrEqual(0);
428 | });
429 |
430 | test("should handle new files correctly", async () => {
431 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
432 |
433 | // Add new file
434 | const newCode = `
435 | export function brandNew(): void {}
436 | `.trim();
437 |
438 | await fs.writeFile(join(projectPath, "src", "brand-new.ts"), newCode);
439 |
440 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
441 |
442 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
443 |
444 | // New files might not show as drift if they have no corresponding docs
445 | expect(Array.isArray(drifts)).toBe(true);
446 | });
447 | });
448 |
449 | describe("Documentation Section Extraction", () => {
450 | test("should extract documentation sections", async () => {
451 | const doc = `
452 | # Main Title
453 |
454 | This is the introduction.
455 |
456 | ## Section 1
457 |
458 | Content for section 1.
459 |
460 | \`\`\`typescript
461 | function example(): void {}
462 | \`\`\`
463 |
464 | ## Section 2
465 |
466 | Content for section 2.
467 | `.trim();
468 |
469 | await fs.writeFile(join(docsPath, "sections.md"), doc);
470 |
471 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
472 |
473 | const docSnapshot = snapshot.documentation.get(
474 | join(docsPath, "sections.md"),
475 | );
476 |
477 | expect(docSnapshot).toBeDefined();
478 | expect(docSnapshot?.sections.length).toBeGreaterThan(0);
479 |
480 | const section1 = docSnapshot?.sections.find(
481 | (s) => s.title === "Section 1",
482 | );
483 | expect(section1).toBeDefined();
484 | expect(section1?.codeExamples.length).toBeGreaterThan(0);
485 | });
486 |
487 | test("should extract code references from documentation", async () => {
488 | const doc = `
489 | # API Reference
490 |
491 | See \`calculateSum()\` for details.
492 |
493 | The function is in \`src/math.ts\`.
494 |
495 | Check out the \`MathUtils\` class.
496 | `.trim();
497 |
498 | await fs.writeFile(join(docsPath, "references.md"), doc);
499 |
500 | const snapshot = await detector.createSnapshot(projectPath, docsPath);
501 |
502 | const docSnapshot = snapshot.documentation.get(
503 | join(docsPath, "references.md"),
504 | );
505 |
506 | expect(docSnapshot).toBeDefined();
507 |
508 | const section = docSnapshot?.sections[0];
509 | expect(section?.referencedFunctions.length).toBeGreaterThan(0);
510 | });
511 | });
512 |
513 | describe("Suggestion Generation Helper Methods", () => {
514 | test("should generate removal suggestion with deprecation notice", async () => {
515 | const oldCode = `
516 | export function deprecatedFunc(x: number): number {
517 | return x;
518 | }
519 | `.trim();
520 |
521 | await fs.writeFile(join(projectPath, "src", "deprecated.ts"), oldCode);
522 |
523 | const oldDoc = `
524 | # API
525 |
526 | ## deprecatedFunc(x: number): number
527 |
528 | This function does something.
529 | `.trim();
530 |
531 | await fs.writeFile(join(docsPath, "deprecated.md"), oldDoc);
532 |
533 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
534 |
535 | // Remove the function
536 | const newCode = `// Function removed`;
537 |
538 | await fs.writeFile(join(projectPath, "src", "deprecated.ts"), newCode);
539 |
540 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
541 |
542 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
543 | const deprecatedDrift = drifts.find((d) =>
544 | d.filePath.includes("deprecated.ts"),
545 | );
546 |
547 | expect(deprecatedDrift).toBeDefined();
548 | expect(deprecatedDrift?.suggestions.length).toBeGreaterThan(0);
549 |
550 | const suggestion = deprecatedDrift?.suggestions[0];
551 | expect(suggestion?.suggestedContent).toContain("removed");
552 | expect(suggestion?.suggestedContent).toContain("Note");
553 | });
554 |
555 | test("should generate addition suggestion with code signature", async () => {
556 | const oldCode = `export function existing(): void {}`;
557 |
558 | await fs.writeFile(join(projectPath, "src", "additions.ts"), oldCode);
559 |
560 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
561 |
562 | // Add new function
563 | const newCode = `
564 | export function existing(): void {}
565 | export function newAddedFunc(a: number, b: string): boolean {
566 | return true;
567 | }
568 | `.trim();
569 |
570 | await fs.writeFile(join(projectPath, "src", "additions.ts"), newCode);
571 |
572 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
573 |
574 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
575 | const additionDrift = drifts.find((d) =>
576 | d.filePath.includes("additions.ts"),
577 | );
578 |
579 | expect(additionDrift?.drifts.some((d) => d.type === "missing")).toBe(
580 | true,
581 | );
582 | });
583 |
584 | test("should generate modification suggestion with signature update", async () => {
585 | const oldCode = `
586 | export function modifyMe(x: number): number {
587 | return x;
588 | }
589 | `.trim();
590 |
591 | await fs.writeFile(join(projectPath, "src", "modify.ts"), oldCode);
592 |
593 | const oldDoc = `
594 | # API
595 |
596 | ## modifyMe(x: number): number
597 |
598 | Returns the input number.
599 | `.trim();
600 |
601 | await fs.writeFile(join(docsPath, "modify.md"), oldDoc);
602 |
603 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
604 |
605 | // Modify the function signature
606 | const newCode = `
607 | export function modifyMe(x: number, y: number): number {
608 | return x + y;
609 | }
610 | `.trim();
611 |
612 | await fs.writeFile(join(projectPath, "src", "modify.ts"), newCode);
613 |
614 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
615 |
616 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
617 | const modifyDrift = drifts.find((d) => d.filePath.includes("modify.ts"));
618 |
619 | expect(modifyDrift).toBeDefined();
620 | expect(modifyDrift?.suggestions.length).toBeGreaterThan(0);
621 |
622 | const suggestion = modifyDrift?.suggestions[0];
623 | expect(suggestion?.suggestedContent).toBeTruthy();
624 | });
625 |
626 | test("should set auto-applicable flag correctly for safe changes", async () => {
627 | const oldCode = `
628 | export function safeChange(x: number): number {
629 | return x;
630 | }
631 | `.trim();
632 |
633 | await fs.writeFile(join(projectPath, "src", "safe.ts"), oldCode);
634 |
635 | const oldDoc = `
636 | # API
637 |
638 | ## safeChange(x: number): number
639 | `.trim();
640 |
641 | await fs.writeFile(join(docsPath, "safe.md"), oldDoc);
642 |
643 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
644 |
645 | // Internal implementation change (patch level)
646 | const newCode = `
647 | export function safeChange(x: number): number {
648 | return x * 2; // Changed implementation but not signature
649 | }
650 | `.trim();
651 |
652 | await fs.writeFile(join(projectPath, "src", "safe.ts"), newCode);
653 |
654 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
655 |
656 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
657 |
658 | // If there are any drifts, check their suggestions
659 | if (drifts.length > 0) {
660 | const safeDrift = drifts.find((d) => d.filePath.includes("safe.ts"));
661 | if (safeDrift && safeDrift.suggestions.length > 0) {
662 | const suggestion = safeDrift.suggestions[0];
663 | expect(typeof suggestion.autoApplicable).toBe("boolean");
664 | }
665 | }
666 | });
667 | });
668 |
669 | describe("Comparison Helper Methods", () => {
670 | test("should correctly identify affected sections by function name", async () => {
671 | const code = `
672 | export function targetFunc(): void {}
673 | export function otherFunc(): void {}
674 | `.trim();
675 |
676 | await fs.writeFile(join(projectPath, "src", "target.ts"), code);
677 |
678 | const doc = `
679 | # API
680 |
681 | See \`targetFunc()\` for details.
682 |
683 | ## targetFunc
684 |
685 | This documents the target function.
686 |
687 | ## otherFunc
688 |
689 | This documents another function.
690 | `.trim();
691 |
692 | await fs.writeFile(join(docsPath, "target.md"), doc);
693 |
694 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
695 |
696 | // Modify only targetFunc
697 | const newCode = `
698 | export function targetFunc(param: string): void {}
699 | export function otherFunc(): void {}
700 | `.trim();
701 |
702 | await fs.writeFile(join(projectPath, "src", "target.ts"), newCode);
703 |
704 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
705 |
706 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
707 | const targetDrift = drifts.find((d) => d.filePath.includes("target.ts"));
708 |
709 | expect(targetDrift).toBeDefined();
710 | // Drift was detected, and impact analysis was performed
711 | expect(targetDrift?.impactAnalysis).toBeDefined();
712 | expect(
713 | targetDrift?.impactAnalysis.affectedDocFiles.length,
714 | ).toBeGreaterThanOrEqual(0);
715 | });
716 |
717 | test("should correctly classify drift types", async () => {
718 | const oldCode = `
719 | export function removedFunc(): void {}
720 | export function modifiedFunc(): void {}
721 | `.trim();
722 |
723 | await fs.writeFile(join(projectPath, "src", "classify.ts"), oldCode);
724 |
725 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
726 |
727 | // Remove one function, keep the other unchanged
728 | const newCode = `
729 | export function modifiedFunc(): void {}
730 | `.trim();
731 |
732 | await fs.writeFile(join(projectPath, "src", "classify.ts"), newCode);
733 |
734 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
735 |
736 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
737 | const classifyDrift = drifts.find((d) =>
738 | d.filePath.includes("classify.ts"),
739 | );
740 |
741 | expect(classifyDrift).toBeDefined();
742 | expect(classifyDrift?.drifts.length).toBeGreaterThan(0);
743 |
744 | // Verify drift types are correctly classified
745 | const driftTypes = classifyDrift?.drifts.map((d) => d.type) || [];
746 | expect(driftTypes.length).toBeGreaterThan(0);
747 |
748 | // Should have breaking or incorrect drift for removed function
749 | const hasRemovalDrift = classifyDrift?.drifts.some(
750 | (d) => d.type === "breaking" || d.type === "incorrect",
751 | );
752 | expect(hasRemovalDrift).toBe(true);
753 | });
754 |
755 | test("should map impact levels to severity correctly", async () => {
756 | const oldCode = `
757 | export function critical(): void {}
758 | export function major(): void {}
759 | export function minor(): void {}
760 | `.trim();
761 |
762 | await fs.writeFile(join(projectPath, "src", "severity-map.ts"), oldCode);
763 |
764 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
765 |
766 | // Breaking change
767 | const newCode = `
768 | export function major(): void {}
769 | export function minor(): void {}
770 | `.trim();
771 |
772 | await fs.writeFile(join(projectPath, "src", "severity-map.ts"), newCode);
773 |
774 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
775 |
776 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
777 | const severityDrift = drifts.find((d) =>
778 | d.filePath.includes("severity-map.ts"),
779 | );
780 |
781 | expect(severityDrift).toBeDefined();
782 | expect(severityDrift?.severity).toBe("critical");
783 | });
784 |
785 | test("should estimate update effort based on drift count", async () => {
786 | const oldCode = `
787 | export function func1(): void {}
788 | export function func2(): void {}
789 | export function func3(): void {}
790 | export function func4(): void {}
791 | export function func5(): void {}
792 | `.trim();
793 |
794 | await fs.writeFile(join(projectPath, "src", "effort.ts"), oldCode);
795 |
796 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
797 |
798 | // Remove multiple functions - high effort
799 | const newCode = `
800 | export function func5(): void {}
801 | `.trim();
802 |
803 | await fs.writeFile(join(projectPath, "src", "effort.ts"), newCode);
804 |
805 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
806 |
807 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
808 | const effortDrift = drifts.find((d) => d.filePath.includes("effort.ts"));
809 |
810 | expect(effortDrift).toBeDefined();
811 | expect(effortDrift?.impactAnalysis.estimatedUpdateEffort).toBeDefined();
812 | expect(
813 | ["low", "medium", "high"].includes(
814 | effortDrift!.impactAnalysis.estimatedUpdateEffort,
815 | ),
816 | ).toBe(true);
817 | });
818 |
819 | test("should calculate overall severity from multiple drifts", async () => {
820 | const oldCode = `
821 | export function criticalChange(): void {}
822 | export function minorChange(): void {}
823 | `.trim();
824 |
825 | await fs.writeFile(
826 | join(projectPath, "src", "overall-severity.ts"),
827 | oldCode,
828 | );
829 |
830 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
831 |
832 | // Breaking change dominates
833 | const newCode = `
834 | export function minorChange(x: number): void {}
835 | `.trim();
836 |
837 | await fs.writeFile(
838 | join(projectPath, "src", "overall-severity.ts"),
839 | newCode,
840 | );
841 |
842 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
843 |
844 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
845 | const overallDrift = drifts.find((d) =>
846 | d.filePath.includes("overall-severity.ts"),
847 | );
848 |
849 | expect(overallDrift).toBeDefined();
850 | expect(
851 | ["none", "low", "medium", "high", "critical"].includes(
852 | overallDrift!.severity,
853 | ),
854 | ).toBe(true);
855 | });
856 |
857 | test("should handle multiple documentation files referencing same code", async () => {
858 | const code = `
859 | export function sharedFunc(): void {}
860 | `.trim();
861 |
862 | await fs.writeFile(join(projectPath, "src", "shared.ts"), code);
863 |
864 | const doc1 = `
865 | # Guide 1
866 |
867 | See \`sharedFunc()\` for details.
868 | `.trim();
869 |
870 | const doc2 = `
871 | # Guide 2
872 |
873 | Also uses \`sharedFunc()\`.
874 | `.trim();
875 |
876 | await fs.writeFile(join(docsPath, "guide1.md"), doc1);
877 | await fs.writeFile(join(docsPath, "guide2.md"), doc2);
878 |
879 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
880 |
881 | // Change the shared function
882 | const newCode = `
883 | export function sharedFunc(param: string): void {}
884 | `.trim();
885 |
886 | await fs.writeFile(join(projectPath, "src", "shared.ts"), newCode);
887 |
888 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
889 |
890 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
891 | const sharedDrift = drifts.find((d) => d.filePath.includes("shared.ts"));
892 |
893 | expect(sharedDrift).toBeDefined();
894 | // Should affect both documentation files
895 | expect(
896 | sharedDrift?.impactAnalysis.affectedDocFiles.length,
897 | ).toBeGreaterThanOrEqual(1);
898 | });
899 | });
900 |
901 | describe("Advanced Suggestion Generation", () => {
902 | test("should generate suggestions for added functions with signatures", async () => {
903 | const oldCode = `export function existing(): void {}`;
904 |
905 | await fs.writeFile(
906 | join(projectPath, "src", "added-with-sig.ts"),
907 | oldCode,
908 | );
909 |
910 | const oldDoc = `
911 | # API
912 |
913 | ## existing
914 |
915 | Existing function documentation.
916 | `.trim();
917 |
918 | await fs.writeFile(join(docsPath, "added-with-sig.md"), oldDoc);
919 |
920 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
921 |
922 | // Add new function with signature
923 | const newCode = `
924 | export function existing(): void {}
925 | export async function newFunction(param: string, count: number): Promise<boolean> {
926 | return true;
927 | }
928 | `.trim();
929 |
930 | await fs.writeFile(
931 | join(projectPath, "src", "added-with-sig.ts"),
932 | newCode,
933 | );
934 |
935 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
936 |
937 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
938 | const addedDrift = drifts.find((d) =>
939 | d.filePath.includes("added-with-sig.ts"),
940 | );
941 |
942 | expect(addedDrift).toBeDefined();
943 | expect(addedDrift?.drifts.some((d) => d.type === "missing")).toBe(true);
944 |
945 | // Should detect the added function
946 | const hasAddedFunction = addedDrift?.drifts.some((d) =>
947 | d.codeChanges.some((c) => c.name === "newFunction"),
948 | );
949 | expect(hasAddedFunction).toBe(true);
950 | });
951 |
952 | test("should handle class changes in suggestions", async () => {
953 | const oldCode = `
954 | export class OldClass {
955 | method(): void {}
956 | }
957 | `.trim();
958 |
959 | await fs.writeFile(join(projectPath, "src", "class-change.ts"), oldCode);
960 |
961 | const oldDoc = `
962 | # Classes
963 |
964 | ## OldClass
965 |
966 | Documentation for OldClass.
967 | `.trim();
968 |
969 | await fs.writeFile(join(docsPath, "class-change.md"), oldDoc);
970 |
971 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
972 |
973 | // Modify class
974 | const newCode = `
975 | export class OldClass {
976 | method(): void {}
977 | newMethod(): void {}
978 | }
979 | `.trim();
980 |
981 | await fs.writeFile(join(projectPath, "src", "class-change.ts"), newCode);
982 |
983 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
984 |
985 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
986 |
987 | expect(drifts.length).toBeGreaterThanOrEqual(0);
988 | });
989 |
990 | test("should handle interface changes in suggestions", async () => {
991 | const oldCode = `
992 | export interface UserInterface {
993 | id: string;
994 | }
995 | `.trim();
996 |
997 | await fs.writeFile(
998 | join(projectPath, "src", "interface-change.ts"),
999 | oldCode,
1000 | );
1001 |
1002 | const oldDoc = `
1003 | # Interfaces
1004 |
1005 | ## UserInterface
1006 |
1007 | The UserInterface interface.
1008 | `.trim();
1009 |
1010 | await fs.writeFile(join(docsPath, "interface-change.md"), oldDoc);
1011 |
1012 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1013 |
1014 | // Modify interface
1015 | const newCode = `
1016 | export interface UserInterface {
1017 | id: string;
1018 | name: string;
1019 | }
1020 | `.trim();
1021 |
1022 | await fs.writeFile(
1023 | join(projectPath, "src", "interface-change.ts"),
1024 | newCode,
1025 | );
1026 |
1027 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1028 |
1029 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1030 |
1031 | expect(drifts.length).toBeGreaterThanOrEqual(0);
1032 | });
1033 |
1034 | test("should handle type alias changes in suggestions", async () => {
1035 | const oldCode = `
1036 | export type Status = "active" | "inactive";
1037 | `.trim();
1038 |
1039 | await fs.writeFile(join(projectPath, "src", "type-change.ts"), oldCode);
1040 |
1041 | const oldDoc = `
1042 | # Types
1043 |
1044 | ## Status
1045 |
1046 | The Status type.
1047 | `.trim();
1048 |
1049 | await fs.writeFile(join(docsPath, "type-change.md"), oldDoc);
1050 |
1051 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1052 |
1053 | // Modify type
1054 | const newCode = `
1055 | export type Status = "active" | "inactive" | "pending";
1056 | `.trim();
1057 |
1058 | await fs.writeFile(join(projectPath, "src", "type-change.ts"), newCode);
1059 |
1060 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1061 |
1062 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1063 |
1064 | expect(drifts.length).toBeGreaterThanOrEqual(0);
1065 | });
1066 |
1067 | test("should detect documentation referencing classes", async () => {
1068 | const code = `
1069 | export class DocumentedClass {
1070 | public property: string;
1071 | constructor(prop: string) {
1072 | this.property = prop;
1073 | }
1074 | }
1075 | `.trim();
1076 |
1077 | await fs.writeFile(join(projectPath, "src", "doc-class.ts"), code);
1078 |
1079 | const doc = `
1080 | # Classes
1081 |
1082 | See the \`DocumentedClass\` for details.
1083 |
1084 | ## DocumentedClass
1085 |
1086 | This class does something important.
1087 | `.trim();
1088 |
1089 | await fs.writeFile(join(docsPath, "doc-class.md"), doc);
1090 |
1091 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1092 |
1093 | // Modify class
1094 | const newCode = `
1095 | export class DocumentedClass {
1096 | public property: string;
1097 | public newProperty: number;
1098 | constructor(prop: string, num: number) {
1099 | this.property = prop;
1100 | this.newProperty = num;
1101 | }
1102 | }
1103 | `.trim();
1104 |
1105 | await fs.writeFile(join(projectPath, "src", "doc-class.ts"), newCode);
1106 |
1107 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1108 |
1109 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1110 | const classDrift = drifts.find((d) =>
1111 | d.filePath.includes("doc-class.ts"),
1112 | );
1113 |
1114 | // Check that affected docs were identified
1115 | if (classDrift && classDrift.hasDrift) {
1116 | expect(classDrift.impactAnalysis).toBeDefined();
1117 | }
1118 | });
1119 |
1120 | test("should detect documentation referencing types", async () => {
1121 | const code = `
1122 | export type ConfigType = {
1123 | apiKey: string;
1124 | timeout: number;
1125 | };
1126 | `.trim();
1127 |
1128 | await fs.writeFile(join(projectPath, "src", "doc-type.ts"), code);
1129 |
1130 | const doc = `
1131 | # Configuration
1132 |
1133 | The \`ConfigType\` defines configuration options.
1134 | `.trim();
1135 |
1136 | await fs.writeFile(join(docsPath, "doc-type.md"), doc);
1137 |
1138 | const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
1139 |
1140 | // Modify type
1141 | const newCode = `
1142 | export type ConfigType = {
1143 | apiKey: string;
1144 | timeout: number;
1145 | retries: number;
1146 | };
1147 | `.trim();
1148 |
1149 | await fs.writeFile(join(projectPath, "src", "doc-type.ts"), newCode);
1150 |
1151 | const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
1152 |
1153 | const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
1154 | const typeDrift = drifts.find((d) => d.filePath.includes("doc-type.ts"));
1155 |
1156 | if (typeDrift && typeDrift.hasDrift) {
1157 | expect(typeDrift.impactAnalysis).toBeDefined();
1158 | }
1159 | });
1160 | });
1161 | });
1162 |
```
--------------------------------------------------------------------------------
/src/memory/knowledge-graph.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Knowledge Graph Architecture for DocuMCP
3 | * Implements Phase 1.1: Enhanced Knowledge Graph Schema Implementation
4 | * Previously: Issue #48: Knowledge Graph Architecture
5 | *
6 | * Creates entity relationship graphs for projects, technologies, patterns, and dependencies
7 | * to enable advanced reasoning and recommendation improvements.
8 | *
9 | * Enhanced with comprehensive entity types and relationship schemas following NEW_PRD.md
10 | */
11 |
12 | import { MemoryManager } from "./manager.js";
13 | import { MemoryEntry } from "./storage.js";
14 | import {
15 | validateEntity,
16 | validateRelationship,
17 | SCHEMA_METADATA,
18 | } from "./schemas.js";
19 |
20 | export interface GraphNode {
21 | id: string;
22 | type:
23 | | "project"
24 | | "technology"
25 | | "pattern"
26 | | "user"
27 | | "outcome"
28 | | "recommendation"
29 | | "configuration"
30 | | "documentation"
31 | | "code_file"
32 | | "documentation_section"
33 | | "link_validation"
34 | | "sync_event"
35 | | "documentation_freshness_event"
36 | | "documentation_example"
37 | | "example_validation"
38 | | "call_graph";
39 | label: string;
40 | properties: Record<string, any>;
41 | weight: number;
42 | lastUpdated: string;
43 | }
44 |
45 | export interface GraphEdge {
46 | id: string;
47 | source: string;
48 | target: string;
49 | type:
50 | | "uses"
51 | | "similar_to"
52 | | "depends_on"
53 | | "recommends"
54 | | "results_in"
55 | | "created_by"
56 | | "project_uses_technology"
57 | | "user_prefers_ssg"
58 | | "project_deployed_with"
59 | | "documents"
60 | | "references"
61 | | "outdated_for"
62 | | "has_link_validation"
63 | | "requires_fix"
64 | | "project_has_freshness_event"
65 | | "has_example"
66 | | "validates"
67 | | "has_call_graph"
68 | | (string & NonNullable<unknown>); // Allow any string (for timestamped types like "project_deployed_with:2024-...")
69 | weight: number;
70 | properties: Record<string, any>;
71 | confidence: number;
72 | lastUpdated: string;
73 | }
74 |
75 | export interface GraphPath {
76 | nodes: GraphNode[];
77 | edges: GraphEdge[];
78 | totalWeight: number;
79 | confidence: number;
80 | }
81 |
82 | export interface GraphQuery {
83 | nodeTypes?: string[];
84 | edgeTypes?: string[];
85 | properties?: Record<string, any>;
86 | minWeight?: number;
87 | maxDepth?: number;
88 | startNode?: string;
89 | }
90 |
91 | export interface RecommendationPath {
92 | from: GraphNode;
93 | to: GraphNode;
94 | path: GraphPath;
95 | reasoning: string[];
96 | confidence: number;
97 | }
98 |
99 | export class KnowledgeGraph {
100 | private nodes: Map<string, GraphNode>;
101 | private edges: Map<string, GraphEdge>;
102 | private adjacencyList: Map<string, Set<string>>;
103 | private memoryManager: MemoryManager;
104 | private lastUpdate: string;
105 |
106 | constructor(memoryManager: MemoryManager) {
107 | this.nodes = new Map();
108 | this.edges = new Map();
109 | this.adjacencyList = new Map();
110 | this.memoryManager = memoryManager;
111 | this.lastUpdate = new Date().toISOString();
112 | }
113 |
114 | async initialize(): Promise<void> {
115 | await this.loadFromMemory();
116 | await this.buildFromMemories();
117 | }
118 |
119 | /**
120 | * Add or update a node in the knowledge graph
121 | */
122 | addNode(node: Omit<GraphNode, "lastUpdated">): GraphNode {
123 | const fullNode: GraphNode = {
124 | ...node,
125 | lastUpdated: new Date().toISOString(),
126 | };
127 |
128 | this.nodes.set(node.id, fullNode);
129 |
130 | if (!this.adjacencyList.has(node.id)) {
131 | this.adjacencyList.set(node.id, new Set());
132 | }
133 |
134 | return fullNode;
135 | }
136 |
137 | /**
138 | * Add or update an edge in the knowledge graph
139 | */
140 | addEdge(edge: Omit<GraphEdge, "id" | "lastUpdated">): GraphEdge {
141 | const edgeId = `${edge.source}-${edge.type}-${edge.target}`;
142 | const fullEdge: GraphEdge = {
143 | ...edge,
144 | id: edgeId,
145 | lastUpdated: new Date().toISOString(),
146 | };
147 |
148 | this.edges.set(edgeId, fullEdge);
149 |
150 | // Update adjacency list
151 | if (!this.adjacencyList.has(edge.source)) {
152 | this.adjacencyList.set(edge.source, new Set());
153 | }
154 | if (!this.adjacencyList.has(edge.target)) {
155 | this.adjacencyList.set(edge.target, new Set());
156 | }
157 |
158 | this.adjacencyList.get(edge.source)!.add(edge.target);
159 |
160 | return fullEdge;
161 | }
162 |
163 | /**
164 | * Build knowledge graph from memory entries
165 | */
166 | async buildFromMemories(): Promise<void> {
167 | const memories = await this.memoryManager.search("", {
168 | sortBy: "timestamp",
169 | });
170 |
171 | for (const memory of memories) {
172 | await this.processMemoryEntry(memory);
173 | }
174 |
175 | await this.computeRelationships();
176 | await this.updateWeights();
177 | }
178 |
179 | /**
180 | * Process a single memory entry to extract graph entities
181 | */
182 | private async processMemoryEntry(memory: MemoryEntry): Promise<void> {
183 | // Create project node
184 | if (memory.metadata.projectId) {
185 | const projectNode = this.addNode({
186 | id: `project:${memory.metadata.projectId}`,
187 | type: "project",
188 | label: memory.metadata.projectId,
189 | properties: {
190 | repository: memory.metadata.repository,
191 | lastActivity: memory.timestamp,
192 | },
193 | weight: 1.0,
194 | });
195 |
196 | // Create technology nodes
197 | if (memory.type === "analysis" && memory.data.language) {
198 | const langNode = this.addNode({
199 | id: `tech:${memory.data.language.primary}`,
200 | type: "technology",
201 | label: memory.data.language.primary,
202 | properties: {
203 | category: "language",
204 | popularity: this.getTechnologyPopularity(
205 | memory.data.language.primary,
206 | ),
207 | },
208 | weight: 1.0,
209 | });
210 |
211 | this.addEdge({
212 | source: projectNode.id,
213 | target: langNode.id,
214 | type: "uses",
215 | weight: 1.0,
216 | confidence: 0.9,
217 | properties: { source: "analysis" },
218 | });
219 | }
220 |
221 | // Create framework nodes
222 | if (memory.data.framework?.name) {
223 | const frameworkNode = this.addNode({
224 | id: `tech:${memory.data.framework.name}`,
225 | type: "technology",
226 | label: memory.data.framework.name,
227 | properties: {
228 | category: "framework",
229 | version: memory.data.framework.version,
230 | },
231 | weight: 1.0,
232 | });
233 |
234 | this.addEdge({
235 | source: projectNode.id,
236 | target: frameworkNode.id,
237 | type: "uses",
238 | weight: 1.0,
239 | confidence: 0.8,
240 | properties: { source: "analysis" },
241 | });
242 | }
243 |
244 | // Create SSG recommendation nodes
245 | if (memory.type === "recommendation" && memory.data.recommended) {
246 | const ssgNode = this.addNode({
247 | id: `tech:${memory.data.recommended}`,
248 | type: "technology",
249 | label: memory.data.recommended,
250 | properties: {
251 | category: "ssg",
252 | score: memory.data.score,
253 | },
254 | weight: 1.0,
255 | });
256 |
257 | this.addEdge({
258 | source: projectNode.id,
259 | target: ssgNode.id,
260 | type: "recommends",
261 | weight: memory.data.score || 1.0,
262 | confidence: memory.data.confidence || 0.5,
263 | properties: {
264 | source: "recommendation",
265 | reasoning: memory.data.reasoning,
266 | },
267 | });
268 | }
269 |
270 | // Create outcome nodes
271 | if (memory.type === "deployment") {
272 | const outcomeNode = this.addNode({
273 | id: `outcome:${memory.data.status}:${memory.metadata.ssg}`,
274 | type: "outcome",
275 | label: `${memory.data.status} with ${memory.metadata.ssg}`,
276 | properties: {
277 | status: memory.data.status,
278 | ssg: memory.metadata.ssg,
279 | duration: memory.data.duration,
280 | },
281 | weight: memory.data.status === "success" ? 1.0 : 0.5,
282 | });
283 |
284 | this.addEdge({
285 | source: projectNode.id,
286 | target: outcomeNode.id,
287 | type: "results_in",
288 | weight: 1.0,
289 | confidence: 1.0,
290 | properties: {
291 | timestamp: memory.timestamp,
292 | details: memory.data.details,
293 | },
294 | });
295 | }
296 | }
297 | }
298 |
299 | /**
300 | * Compute additional relationships based on patterns
301 | */
302 | private async computeRelationships(): Promise<void> {
303 | // Find similar projects
304 | await this.computeProjectSimilarity();
305 |
306 | // Find technology dependencies
307 | await this.computeTechnologyDependencies();
308 |
309 | // Find pattern relationships
310 | await this.computePatternRelationships();
311 | }
312 |
313 | /**
314 | * Compute project similarity relationships
315 | */
316 | private async computeProjectSimilarity(): Promise<void> {
317 | const projectNodes = Array.from(this.nodes.values()).filter(
318 | (node) => node.type === "project",
319 | );
320 |
321 | for (let i = 0; i < projectNodes.length; i++) {
322 | for (let j = i + 1; j < projectNodes.length; j++) {
323 | const similarity = this.calculateProjectSimilarity(
324 | projectNodes[i],
325 | projectNodes[j],
326 | );
327 |
328 | if (similarity > 0.7) {
329 | this.addEdge({
330 | source: projectNodes[i].id,
331 | target: projectNodes[j].id,
332 | type: "similar_to",
333 | weight: similarity,
334 | confidence: similarity,
335 | properties: {
336 | computed: true,
337 | similarityScore: similarity,
338 | },
339 | });
340 | }
341 | }
342 | }
343 | }
344 |
345 | /**
346 | * Calculate similarity between two projects
347 | */
348 | private calculateProjectSimilarity(
349 | project1: GraphNode,
350 | project2: GraphNode,
351 | ): number {
352 | const tech1 = this.getConnectedTechnologies(project1.id);
353 | const tech2 = this.getConnectedTechnologies(project2.id);
354 |
355 | if (tech1.size === 0 || tech2.size === 0) return 0;
356 |
357 | const intersection = new Set([...tech1].filter((x) => tech2.has(x)));
358 | const union = new Set([...tech1, ...tech2]);
359 |
360 | return intersection.size / union.size; // Jaccard similarity
361 | }
362 |
363 | /**
364 | * Get technologies connected to a project
365 | */
366 | private getConnectedTechnologies(projectId: string): Set<string> {
367 | const technologies = new Set<string>();
368 | const adjacents = this.adjacencyList.get(projectId) || new Set();
369 |
370 | for (const nodeId of adjacents) {
371 | const node = this.nodes.get(nodeId);
372 | if (node && node.type === "technology") {
373 | technologies.add(nodeId);
374 | }
375 | }
376 |
377 | return technologies;
378 | }
379 |
380 | /**
381 | * Compute technology dependency relationships
382 | */
383 | private async computeTechnologyDependencies(): Promise<void> {
384 | // Define known technology dependencies
385 | const dependencies = new Map([
386 | ["tech:react", ["tech:javascript", "tech:nodejs"]],
387 | ["tech:vue", ["tech:javascript", "tech:nodejs"]],
388 | ["tech:angular", ["tech:typescript", "tech:nodejs"]],
389 | ["tech:gatsby", ["tech:react", "tech:graphql"]],
390 | ["tech:next.js", ["tech:react", "tech:nodejs"]],
391 | ["tech:nuxt.js", ["tech:vue", "tech:nodejs"]],
392 | ["tech:docusaurus", ["tech:react", "tech:markdown"]],
393 | ["tech:jekyll", ["tech:ruby", "tech:markdown"]],
394 | ["tech:hugo", ["tech:go", "tech:markdown"]],
395 | ["tech:mkdocs", ["tech:python", "tech:markdown"]],
396 | ]);
397 |
398 | for (const [tech, deps] of dependencies) {
399 | for (const dep of deps) {
400 | const techNode = this.nodes.get(tech);
401 | const depNode = this.nodes.get(dep);
402 |
403 | if (techNode && depNode) {
404 | this.addEdge({
405 | source: tech,
406 | target: dep,
407 | type: "depends_on",
408 | weight: 0.8,
409 | confidence: 0.9,
410 | properties: {
411 | computed: true,
412 | dependency_type: "runtime",
413 | },
414 | });
415 | }
416 | }
417 | }
418 | }
419 |
420 | /**
421 | * Compute pattern relationships from successful combinations
422 | */
423 | private async computePatternRelationships(): Promise<void> {
424 | const successfulOutcomes = Array.from(this.nodes.values()).filter(
425 | (node) => node.type === "outcome" && node.properties.status === "success",
426 | );
427 |
428 | for (const outcome of successfulOutcomes) {
429 | // Find the path that led to this successful outcome
430 | const incomingEdges = Array.from(this.edges.values()).filter(
431 | (edge) => edge.target === outcome.id,
432 | );
433 |
434 | for (const edge of incomingEdges) {
435 | const sourceNode = this.nodes.get(edge.source);
436 | if (sourceNode && sourceNode.type === "project") {
437 | // Strengthen relationships for successful patterns
438 | this.strengthenSuccessPattern(sourceNode.id, outcome.properties.ssg);
439 | }
440 | }
441 | }
442 | }
443 |
444 | /**
445 | * Strengthen relationships for successful patterns
446 | */
447 | private strengthenSuccessPattern(projectId: string, ssg: string): void {
448 | const ssgNodeId = `tech:${ssg}`;
449 | const edgeId = `${projectId}-recommends-${ssgNodeId}`;
450 | const edge = this.edges.get(edgeId);
451 |
452 | if (edge) {
453 | edge.weight = Math.min(edge.weight * 1.2, 2.0);
454 | edge.confidence = Math.min(edge.confidence * 1.1, 1.0);
455 | }
456 | }
457 |
458 | /**
459 | * Update node and edge weights based on usage patterns
460 | */
461 | private async updateWeights(): Promise<void> {
462 | // Update node weights based on connections
463 | for (const node of this.nodes.values()) {
464 | const connections = this.adjacencyList.get(node.id)?.size || 0;
465 | node.weight = Math.log(connections + 1) / Math.log(10); // Logarithmic scaling
466 | }
467 |
468 | // Update edge weights based on frequency and success
469 | for (const edge of this.edges.values()) {
470 | if (edge.type === "recommends") {
471 | // Find successful outcomes for this recommendation
472 | const targetNode = this.nodes.get(edge.target);
473 | if (targetNode && targetNode.type === "technology") {
474 | const successRate = this.calculateSuccessRate(targetNode.id);
475 | edge.weight *= 1 + successRate;
476 | }
477 | }
478 | }
479 | }
480 |
481 | /**
482 | * Calculate success rate for a technology
483 | */
484 | private calculateSuccessRate(techId: string): number {
485 | const tech = techId.replace("tech:", "");
486 | const outcomes = Array.from(this.nodes.values()).filter(
487 | (node) => node.type === "outcome" && node.properties.ssg === tech,
488 | );
489 |
490 | if (outcomes.length === 0) return 0;
491 |
492 | const successes = outcomes.filter(
493 | (node) => node.properties.status === "success",
494 | ).length;
495 | return successes / outcomes.length;
496 | }
497 |
498 | /**
499 | * Find the shortest path between two nodes
500 | */
501 | findPath(
502 | sourceId: string,
503 | targetId: string,
504 | maxDepth: number = 5,
505 | ): GraphPath | null {
506 | const visited = new Set<string>();
507 | const queue: { nodeId: string; path: GraphPath }[] = [
508 | {
509 | nodeId: sourceId,
510 | path: {
511 | nodes: [this.nodes.get(sourceId)!],
512 | edges: [],
513 | totalWeight: 0,
514 | confidence: 1.0,
515 | },
516 | },
517 | ];
518 |
519 | while (queue.length > 0) {
520 | const current = queue.shift()!;
521 |
522 | if (current.nodeId === targetId) {
523 | return current.path;
524 | }
525 |
526 | if (current.path.nodes.length >= maxDepth) {
527 | continue;
528 | }
529 |
530 | visited.add(current.nodeId);
531 | const neighbors = this.adjacencyList.get(current.nodeId) || new Set();
532 |
533 | for (const neighborId of neighbors) {
534 | if (visited.has(neighborId)) continue;
535 |
536 | const edge = this.findEdge(current.nodeId, neighborId);
537 | const neighborNode = this.nodes.get(neighborId);
538 |
539 | if (edge && neighborNode) {
540 | const newPath: GraphPath = {
541 | nodes: [...current.path.nodes, neighborNode],
542 | edges: [...current.path.edges, edge],
543 | totalWeight: current.path.totalWeight + edge.weight,
544 | confidence: current.path.confidence * edge.confidence,
545 | };
546 |
547 | queue.push({ nodeId: neighborId, path: newPath });
548 | }
549 | }
550 | }
551 |
552 | return null;
553 | }
554 |
555 | /**
556 | * Find edge between two nodes
557 | */
558 | private findEdge(sourceId: string, targetId: string): GraphEdge | null {
559 | for (const edge of this.edges.values()) {
560 | if (edge.source === sourceId && edge.target === targetId) {
561 | return edge;
562 | }
563 | }
564 | return null;
565 | }
566 |
567 | /**
568 | * Query the knowledge graph
569 | */
570 | query(query: GraphQuery): {
571 | nodes: GraphNode[];
572 | edges: GraphEdge[];
573 | paths?: GraphPath[];
574 | } {
575 | let nodes = Array.from(this.nodes.values());
576 | let edges = Array.from(this.edges.values());
577 |
578 | // Filter by node types
579 | if (query.nodeTypes) {
580 | nodes = nodes.filter((node) => query.nodeTypes!.includes(node.type));
581 | }
582 |
583 | // Filter by edge types
584 | if (query.edgeTypes) {
585 | edges = edges.filter((edge) => query.edgeTypes!.includes(edge.type));
586 | }
587 |
588 | // Filter by properties
589 | if (query.properties) {
590 | nodes = nodes.filter((node) =>
591 | Object.entries(query.properties!).every(
592 | ([key, value]) => node.properties[key] === value,
593 | ),
594 | );
595 | }
596 |
597 | // Filter by minimum weight
598 | if (query.minWeight) {
599 | nodes = nodes.filter((node) => node.weight >= query.minWeight!);
600 | edges = edges.filter((edge) => edge.weight >= query.minWeight!);
601 | }
602 |
603 | const result = { nodes, edges };
604 |
605 | // Find paths from start node if specified
606 | if (query.startNode && query.maxDepth) {
607 | const paths: GraphPath[] = [];
608 | const visited = new Set<string>();
609 |
610 | const emptyPath: GraphPath = {
611 | nodes: [],
612 | edges: [],
613 | totalWeight: 0,
614 | confidence: 1.0,
615 | };
616 | this.explorePaths(
617 | query.startNode,
618 | emptyPath,
619 | paths,
620 | visited,
621 | query.maxDepth,
622 | );
623 | (result as any).paths = paths;
624 | }
625 |
626 | return result;
627 | }
628 |
629 | /**
630 | * Explore paths from a starting node
631 | */
632 | private explorePaths(
633 | nodeId: string,
634 | currentPath: GraphPath,
635 | allPaths: GraphPath[],
636 | visited: Set<string>,
637 | maxDepth: number,
638 | ): void {
639 | if (currentPath.nodes.length >= maxDepth) return;
640 |
641 | visited.add(nodeId);
642 | const neighbors = this.adjacencyList.get(nodeId) || new Set();
643 |
644 | for (const neighborId of neighbors) {
645 | if (visited.has(neighborId)) continue;
646 |
647 | const edge = this.findEdge(nodeId, neighborId);
648 | const neighborNode = this.nodes.get(neighborId);
649 |
650 | if (edge && neighborNode) {
651 | const newPath: GraphPath = {
652 | nodes: [...(currentPath.nodes || []), neighborNode],
653 | edges: [...(currentPath.edges || []), edge],
654 | totalWeight: (currentPath.totalWeight || 0) + edge.weight,
655 | confidence: (currentPath.confidence || 1.0) * edge.confidence,
656 | };
657 |
658 | allPaths.push(newPath);
659 | this.explorePaths(
660 | neighborId,
661 | newPath,
662 | allPaths,
663 | new Set(visited),
664 | maxDepth,
665 | );
666 | }
667 | }
668 | }
669 |
670 | /**
671 | * Get enhanced recommendations using knowledge graph
672 | */
673 | async getGraphBasedRecommendation(
674 | projectFeatures: any,
675 | candidateSSGs: string[],
676 | ): Promise<RecommendationPath[]> {
677 | const recommendations: RecommendationPath[] = [];
678 |
679 | // Create a temporary project node
680 | const tempProjectId = `temp:${Date.now()}`;
681 | const projectNode = this.addNode({
682 | id: tempProjectId,
683 | type: "project",
684 | label: "Query Project",
685 | properties: projectFeatures,
686 | weight: 1.0,
687 | });
688 |
689 | for (const ssg of candidateSSGs) {
690 | const ssgNodeId = `tech:${ssg}`;
691 | const ssgNode = this.nodes.get(ssgNodeId);
692 |
693 | if (ssgNode) {
694 | // Find paths from similar projects to this SSG
695 | const similarProjects = this.findSimilarProjects(projectFeatures);
696 |
697 | for (const similarProject of similarProjects) {
698 | const path = this.findPath(similarProject.id, ssgNodeId);
699 |
700 | if (path) {
701 | const reasoning = this.generateReasoning(path);
702 | const confidence = this.calculatePathConfidence(
703 | path,
704 | projectFeatures,
705 | );
706 |
707 | recommendations.push({
708 | from: projectNode,
709 | to: ssgNode,
710 | path,
711 | reasoning,
712 | confidence,
713 | });
714 | }
715 | }
716 | }
717 | }
718 |
719 | // Clean up temporary node
720 | this.nodes.delete(tempProjectId);
721 |
722 | return recommendations.sort((a, b) => b.confidence - a.confidence);
723 | }
724 |
725 | /**
726 | * Find projects similar to given features
727 | */
728 | private findSimilarProjects(features: any): GraphNode[] {
729 | const projectNodes = Array.from(this.nodes.values()).filter(
730 | (node) => node.type === "project",
731 | );
732 |
733 | return projectNodes
734 | .map((project) => ({
735 | project,
736 | similarity: this.calculateFeatureSimilarity(
737 | features,
738 | project.properties,
739 | ),
740 | }))
741 | .filter(({ similarity }) => similarity > 0.6)
742 | .sort((a, b) => b.similarity - a.similarity)
743 | .slice(0, 5)
744 | .map(({ project }) => project);
745 | }
746 |
747 | /**
748 | * Calculate similarity between features and project properties
749 | */
750 | private calculateFeatureSimilarity(features: any, properties: any): number {
751 | let score = 0;
752 | let factors = 0;
753 |
754 | if (features.language === properties.language) {
755 | score += 0.4;
756 | }
757 | factors++;
758 |
759 | if (features.framework === properties.framework) {
760 | score += 0.3;
761 | }
762 | factors++;
763 |
764 | if (features.size === properties.size) {
765 | score += 0.2;
766 | }
767 | factors++;
768 |
769 | if (features.complexity === properties.complexity) {
770 | score += 0.1;
771 | }
772 | factors++;
773 |
774 | return factors > 0 ? score / factors : 0;
775 | }
776 |
777 | /**
778 | * Generate human-readable reasoning for a recommendation path
779 | */
780 | private generateReasoning(path: GraphPath): string[] {
781 | const reasoning: string[] = [];
782 |
783 | for (let i = 0; i < path.edges.length; i++) {
784 | const edge = path.edges[i];
785 | const sourceNode = path.nodes[i];
786 | const targetNode = path.nodes[i + 1];
787 |
788 | switch (edge.type) {
789 | case "similar_to":
790 | reasoning.push(
791 | `Similar to ${sourceNode.label} (${(edge.confidence * 100).toFixed(
792 | 0,
793 | )}% similarity)`,
794 | );
795 | break;
796 | case "recommends":
797 | reasoning.push(
798 | `Successfully used ${
799 | targetNode.label
800 | } (score: ${edge.weight.toFixed(1)})`,
801 | );
802 | break;
803 | case "results_in":
804 | reasoning.push(
805 | `Resulted in ${targetNode.properties.status} deployment`,
806 | );
807 | break;
808 | case "uses":
809 | reasoning.push(`Uses ${targetNode.label}`);
810 | break;
811 | }
812 | }
813 |
814 | return reasoning;
815 | }
816 |
817 | /**
818 | * Calculate confidence for a recommendation path
819 | */
820 | private calculatePathConfidence(path: GraphPath, _features: any): number {
821 | let confidence = path.confidence;
822 |
823 | // Boost confidence for shorter paths
824 | confidence *= 1 / Math.max(path.edges.length, 1);
825 |
826 | // Boost confidence for recent data
827 | const avgAge =
828 | path.nodes.reduce((sum, node) => {
829 | const age = Date.now() - new Date(node.lastUpdated).getTime();
830 | return sum + age;
831 | }, 0) / path.nodes.length;
832 |
833 | const daysSinceUpdate = avgAge / (1000 * 60 * 60 * 24);
834 | confidence *= Math.exp(-daysSinceUpdate / 30); // Exponential decay over 30 days
835 |
836 | return Math.min(confidence, 1.0);
837 | }
838 |
839 | /**
840 | * Get technology popularity score
841 | */
842 | private getTechnologyPopularity(tech: string): number {
843 | // Simple popularity scoring - could be enhanced with real data
844 | const popularityMap = new Map([
845 | ["javascript", 0.9],
846 | ["typescript", 0.8],
847 | ["python", 0.8],
848 | ["react", 0.9],
849 | ["vue", 0.7],
850 | ["angular", 0.6],
851 | ["go", 0.7],
852 | ["ruby", 0.5],
853 | ["rust", 0.6],
854 | ]);
855 |
856 | return popularityMap.get(tech.toLowerCase()) || 0.3;
857 | }
858 |
859 | /**
860 | * Save knowledge graph to persistent memory
861 | */
862 | async saveToMemory(): Promise<void> {
863 | const graphData = {
864 | nodes: Array.from(this.nodes.entries()),
865 | edges: Array.from(this.edges.entries()),
866 | lastUpdate: this.lastUpdate,
867 | statistics: this.getStatistics(),
868 | };
869 |
870 | await this.memoryManager.remember(
871 | "interaction",
872 | {
873 | graph: graphData,
874 | type: "knowledge_graph",
875 | },
876 | {
877 | tags: ["knowledge_graph", "structure"],
878 | },
879 | );
880 | }
881 |
882 | /**
883 | * Load knowledge graph from persistent memory
884 | */
885 | async loadFromMemory(): Promise<void> {
886 | try {
887 | const graphMemories = await this.memoryManager.search("knowledge_graph");
888 |
889 | if (graphMemories.length > 0) {
890 | const latestGraph = graphMemories.sort(
891 | (a, b) =>
892 | new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime(),
893 | )[0];
894 |
895 | if (latestGraph.data.graph) {
896 | const { nodes, edges } = latestGraph.data.graph;
897 |
898 | // Restore nodes
899 | for (const [id, node] of nodes) {
900 | this.nodes.set(id, node);
901 | }
902 |
903 | // Restore edges and adjacency list
904 | for (const [id, edge] of edges) {
905 | this.edges.set(id, edge);
906 |
907 | if (!this.adjacencyList.has(edge.source)) {
908 | this.adjacencyList.set(edge.source, new Set());
909 | }
910 | this.adjacencyList.get(edge.source)!.add(edge.target);
911 | }
912 |
913 | this.lastUpdate = latestGraph.data.graph.lastUpdate;
914 | }
915 | }
916 | } catch (error) {
917 | console.error("Failed to load knowledge graph from memory:", error);
918 | }
919 | }
920 |
921 | /**
922 | * Get all nodes in the knowledge graph
923 | */
924 | async getAllNodes(): Promise<GraphNode[]> {
925 | return Array.from(this.nodes.values());
926 | }
927 |
928 | /**
929 | * Get all edges in the knowledge graph
930 | */
931 | async getAllEdges(): Promise<GraphEdge[]> {
932 | return Array.from(this.edges.values());
933 | }
934 |
935 | /**
936 | * Get a node by its ID
937 | */
938 | async getNodeById(nodeId: string): Promise<GraphNode | null> {
939 | return this.nodes.get(nodeId) || null;
940 | }
941 |
942 | /**
943 | * Remove a node from the knowledge graph
944 | */
945 | async removeNode(nodeId: string): Promise<boolean> {
946 | const node = this.nodes.get(nodeId);
947 | if (!node) {
948 | return false;
949 | }
950 |
951 | // Remove the node
952 | this.nodes.delete(nodeId);
953 |
954 | // Remove all edges connected to this node
955 | const edgesToRemove: string[] = [];
956 | for (const [edgeId, edge] of this.edges) {
957 | if (edge.source === nodeId || edge.target === nodeId) {
958 | edgesToRemove.push(edgeId);
959 | }
960 | }
961 |
962 | for (const edgeId of edgesToRemove) {
963 | this.edges.delete(edgeId);
964 | }
965 |
966 | // Update adjacency list
967 | this.adjacencyList.delete(nodeId);
968 | for (const [, targets] of this.adjacencyList) {
969 | targets.delete(nodeId);
970 | }
971 |
972 | return true;
973 | }
974 |
975 | /**
976 | * Get connections for a specific node
977 | */
978 | async getConnections(nodeId: string): Promise<string[]> {
979 | const connections = this.adjacencyList.get(nodeId);
980 | return connections ? Array.from(connections) : [];
981 | }
982 |
983 | /**
984 | * Get knowledge graph statistics
985 | */
986 | async getStatistics(): Promise<{
987 | nodeCount: number;
988 | edgeCount: number;
989 | nodesByType: Record<string, number>;
990 | edgesByType: Record<string, number>;
991 | averageConnectivity: number;
992 | mostConnectedNodes: Array<{ id: string; connections: number }>;
993 | }> {
994 | const nodesByType: Record<string, number> = {};
995 | const edgesByType: Record<string, number> = {};
996 |
997 | for (const node of this.nodes.values()) {
998 | nodesByType[node.type] = (nodesByType[node.type] || 0) + 1;
999 | }
1000 |
1001 | for (const edge of this.edges.values()) {
1002 | edgesByType[edge.type] = (edgesByType[edge.type] || 0) + 1;
1003 | }
1004 |
1005 | const connectivityCounts = Array.from(this.adjacencyList.entries())
1006 | .map(([id, connections]) => ({ id, connections: connections.size }))
1007 | .sort((a, b) => b.connections - a.connections);
1008 |
1009 | const averageConnectivity =
1010 | connectivityCounts.length > 0
1011 | ? connectivityCounts.reduce(
1012 | (sum, { connections }) => sum + connections,
1013 | 0,
1014 | ) / connectivityCounts.length
1015 | : 0;
1016 |
1017 | return {
1018 | nodeCount: this.nodes.size,
1019 | edgeCount: this.edges.size,
1020 | nodesByType,
1021 | edgesByType,
1022 | averageConnectivity,
1023 | mostConnectedNodes: connectivityCounts.slice(0, 10),
1024 | };
1025 | }
1026 |
1027 | // ============================================================================
1028 | // Phase 1.1: Enhanced Node Query Methods
1029 | // ============================================================================
1030 |
1031 | /**
1032 | * Find a single node matching criteria
1033 | */
1034 | async findNode(criteria: {
1035 | type?: string;
1036 | properties?: Record<string, any>;
1037 | }): Promise<GraphNode | null> {
1038 | for (const node of this.nodes.values()) {
1039 | if (criteria.type && node.type !== criteria.type) continue;
1040 |
1041 | if (criteria.properties) {
1042 | let matches = true;
1043 | for (const [key, value] of Object.entries(criteria.properties)) {
1044 | if (node.properties[key] !== value) {
1045 | matches = false;
1046 | break;
1047 | }
1048 | }
1049 | if (!matches) continue;
1050 | }
1051 |
1052 | return node;
1053 | }
1054 |
1055 | return null;
1056 | }
1057 |
1058 | /**
1059 | * Find all nodes matching criteria
1060 | */
1061 | async findNodes(criteria: {
1062 | type?: string;
1063 | properties?: Record<string, any>;
1064 | }): Promise<GraphNode[]> {
1065 | const results: GraphNode[] = [];
1066 |
1067 | for (const node of this.nodes.values()) {
1068 | if (criteria.type && node.type !== criteria.type) continue;
1069 |
1070 | if (criteria.properties) {
1071 | let matches = true;
1072 | for (const [key, value] of Object.entries(criteria.properties)) {
1073 | if (node.properties[key] !== value) {
1074 | matches = false;
1075 | break;
1076 | }
1077 | }
1078 | if (!matches) continue;
1079 | }
1080 |
1081 | results.push(node);
1082 | }
1083 |
1084 | return results;
1085 | }
1086 |
1087 | /**
1088 | * Find edges matching criteria
1089 | */
1090 | async findEdges(criteria: {
1091 | source?: string;
1092 | target?: string;
1093 | type?: string;
1094 | properties?: Record<string, any>;
1095 | }): Promise<GraphEdge[]> {
1096 | const results: GraphEdge[] = [];
1097 |
1098 | for (const edge of this.edges.values()) {
1099 | if (criteria.source && edge.source !== criteria.source) continue;
1100 | if (criteria.target && edge.target !== criteria.target) continue;
1101 | if (criteria.type && edge.type !== criteria.type) continue;
1102 |
1103 | // Match properties if provided
1104 | if (criteria.properties) {
1105 | let propertiesMatch = true;
1106 | for (const [key, value] of Object.entries(criteria.properties)) {
1107 | if (edge.properties[key] !== value) {
1108 | propertiesMatch = false;
1109 | break;
1110 | }
1111 | }
1112 | if (!propertiesMatch) continue;
1113 | }
1114 |
1115 | results.push(edge);
1116 | }
1117 |
1118 | return results;
1119 | }
1120 |
1121 | /**
1122 | * Find all paths between two nodes up to a maximum depth
1123 | */
1124 | async findPaths(criteria: {
1125 | startNode: string;
1126 | endNode?: string;
1127 | edgeTypes?: string[];
1128 | maxDepth: number;
1129 | }): Promise<GraphPath[]> {
1130 | const paths: GraphPath[] = [];
1131 | const visited = new Set<string>();
1132 |
1133 | const emptyPath: GraphPath = {
1134 | nodes: [this.nodes.get(criteria.startNode)!],
1135 | edges: [],
1136 | totalWeight: 0,
1137 | confidence: 1.0,
1138 | };
1139 |
1140 | this.findPathsRecursive(
1141 | criteria.startNode,
1142 | emptyPath,
1143 | paths,
1144 | visited,
1145 | criteria.maxDepth,
1146 | criteria.endNode,
1147 | criteria.edgeTypes,
1148 | );
1149 |
1150 | return paths;
1151 | }
1152 |
1153 | /**
1154 | * Recursive helper for finding paths
1155 | */
1156 | private findPathsRecursive(
1157 | currentNodeId: string,
1158 | currentPath: GraphPath,
1159 | allPaths: GraphPath[],
1160 | visited: Set<string>,
1161 | maxDepth: number,
1162 | endNode?: string,
1163 | edgeTypes?: string[],
1164 | ): void {
1165 | if (currentPath.nodes.length >= maxDepth) return;
1166 |
1167 | visited.add(currentNodeId);
1168 | const neighbors = this.adjacencyList.get(currentNodeId) || new Set();
1169 |
1170 | for (const neighborId of neighbors) {
1171 | if (visited.has(neighborId)) continue;
1172 |
1173 | const edge = this.findEdge(currentNodeId, neighborId);
1174 | if (!edge) continue;
1175 |
1176 | // Filter by edge type if specified
1177 | if (edgeTypes && !edgeTypes.includes(edge.type)) continue;
1178 |
1179 | const neighborNode = this.nodes.get(neighborId);
1180 | if (!neighborNode) continue;
1181 |
1182 | const newPath: GraphPath = {
1183 | nodes: [...currentPath.nodes, neighborNode],
1184 | edges: [...currentPath.edges, edge],
1185 | totalWeight: currentPath.totalWeight + edge.weight,
1186 | confidence: currentPath.confidence * edge.confidence,
1187 | };
1188 |
1189 | // If we've reached the end node, add this path
1190 | if (endNode && neighborId === endNode) {
1191 | allPaths.push(newPath);
1192 | continue;
1193 | }
1194 |
1195 | // If no end node specified, add all paths
1196 | if (!endNode) {
1197 | allPaths.push(newPath);
1198 | }
1199 |
1200 | // Continue exploring
1201 | this.findPathsRecursive(
1202 | neighborId,
1203 | newPath,
1204 | allPaths,
1205 | new Set(visited),
1206 | maxDepth,
1207 | endNode,
1208 | edgeTypes,
1209 | );
1210 | }
1211 | }
1212 |
1213 | /**
1214 | * Get node history (all changes to a node over time)
1215 | */
1216 | async getNodeHistory(nodeId: string): Promise<MemoryEntry[]> {
1217 | const node = this.nodes.get(nodeId);
1218 | if (!node) return [];
1219 |
1220 | // Search memory for all entries related to this node
1221 | const projectId = node.properties.projectId || node.properties.path;
1222 | if (!projectId) return [];
1223 |
1224 | return await this.memoryManager.search(projectId);
1225 | }
1226 |
1227 | /**
1228 | * Get schema version
1229 | */
1230 | getSchemaVersion(): string {
1231 | return SCHEMA_METADATA.version;
1232 | }
1233 |
1234 | /**
1235 | * Validate node against schema
1236 | */
1237 | validateNode(node: GraphNode): boolean {
1238 | try {
1239 | const entityData = {
1240 | ...node.properties,
1241 | type: node.type,
1242 | };
1243 | validateEntity(entityData);
1244 | return true;
1245 | } catch (error) {
1246 | console.error(`Node validation failed for ${node.id}:`, error);
1247 | return false;
1248 | }
1249 | }
1250 |
1251 | /**
1252 | * Validate edge against schema
1253 | */
1254 | validateEdge(edge: GraphEdge): boolean {
1255 | try {
1256 | const relationshipData = {
1257 | type: edge.type,
1258 | weight: edge.weight,
1259 | confidence: edge.confidence,
1260 | createdAt: edge.lastUpdated,
1261 | lastUpdated: edge.lastUpdated,
1262 | metadata: edge.properties,
1263 | ...edge.properties,
1264 | };
1265 | validateRelationship(relationshipData);
1266 | return true;
1267 | } catch (error) {
1268 | console.error(`Edge validation failed for ${edge.id}:`, error);
1269 | return false;
1270 | }
1271 | }
1272 | }
1273 |
1274 | export default KnowledgeGraph;
1275 |
```
--------------------------------------------------------------------------------
/tests/tools/evaluate-readme-health.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { evaluateReadmeHealth } from "../../src/tools/evaluate-readme-health.js";
2 | import { writeFile, mkdir, rm } from "fs/promises";
3 | import { join } from "path";
4 |
5 | describe("evaluateReadmeHealth", () => {
6 | const testDir = join(process.cwd(), "test-readme-temp");
7 |
8 | beforeEach(async () => {
9 | // Create test directory
10 | await mkdir(testDir, { recursive: true });
11 | });
12 |
13 | afterEach(async () => {
14 | // Clean up test directory
15 | try {
16 | await rm(testDir, { recursive: true, force: true });
17 | } catch (error) {
18 | // Ignore cleanup errors
19 | }
20 | });
21 |
22 | describe("Basic Functionality", () => {
23 | test("should evaluate README health with default parameters", async () => {
24 | const readmePath = join(testDir, "README.md");
25 | await writeFile(
26 | readmePath,
27 | `# Test Project
28 |
29 | ## Description
30 | This is a test project for evaluating README health.
31 |
32 | ## Installation
33 | \`\`\`bash
34 | npm install test-project
35 | \`\`\`
36 |
37 | ## Usage
38 | \`\`\`javascript
39 | const test = require('test-project');
40 | \`\`\`
41 |
42 | ## Contributing
43 | Please read CONTRIBUTING.md for details.
44 |
45 | ## License
46 | MIT License
47 | `,
48 | );
49 |
50 | const result = await evaluateReadmeHealth({
51 | readme_path: readmePath,
52 | });
53 |
54 | expect(result.content).toBeDefined();
55 | expect(result.content.length).toBeGreaterThan(0);
56 | expect(result.isError).toBe(false);
57 |
58 | // Check that it contains health report data
59 | const healthData = result.content.find((c) =>
60 | c.text.includes("healthReport"),
61 | );
62 | expect(healthData).toBeDefined();
63 | });
64 |
65 | test("should handle different project types", async () => {
66 | const readmePath = join(testDir, "README.md");
67 | await writeFile(
68 | readmePath,
69 | "# Enterprise Tool\n\nA professional enterprise tool.",
70 | );
71 |
72 | const result = await evaluateReadmeHealth({
73 | readme_path: readmePath,
74 | project_type: "enterprise_tool",
75 | });
76 |
77 | expect(result.content).toBeDefined();
78 | expect(result.isError).toBe(false);
79 | });
80 |
81 | test("should include repository context when provided", async () => {
82 | const readmePath = join(testDir, "README.md");
83 | await writeFile(readmePath, "# Project with Repo Context");
84 |
85 | // Create a simple repository structure
86 | await writeFile(join(testDir, "package.json"), '{"name": "test"}');
87 |
88 | const result = await evaluateReadmeHealth({
89 | readme_path: readmePath,
90 | repository_path: testDir,
91 | });
92 |
93 | expect(result.content).toBeDefined();
94 | expect(result.isError).toBe(false);
95 | });
96 | });
97 |
98 | describe("Error Handling", () => {
99 | test("should handle missing README file", async () => {
100 | const result = await evaluateReadmeHealth({
101 | readme_path: join(testDir, "nonexistent.md"),
102 | });
103 |
104 | expect(result.isError).toBe(true);
105 | expect(result.content[0].text).toContain(
106 | "Failed to evaluate README health",
107 | );
108 | });
109 |
110 | test("should handle invalid project type", async () => {
111 | const readmePath = join(testDir, "README.md");
112 | await writeFile(readmePath, "# Test");
113 |
114 | const result = await evaluateReadmeHealth({
115 | readme_path: readmePath,
116 | project_type: "invalid_type" as any,
117 | });
118 |
119 | expect(result.isError).toBe(true);
120 | });
121 | });
122 |
123 | describe("Health Report Structure", () => {
124 | test("should include all required health components", async () => {
125 | const readmePath = join(testDir, "README.md");
126 | await writeFile(
127 | readmePath,
128 | `# Complete Project
129 |
130 | ## Table of Contents
131 | - [Installation](#installation)
132 | - [Usage](#usage)
133 |
134 | ## Description
135 | Comprehensive project description here.
136 |
137 | ## Installation
138 | Installation instructions.
139 |
140 | ## Usage
141 | Usage examples.
142 |
143 | ## Contributing
144 | How to contribute.
145 |
146 | ## License
147 | MIT
148 | `,
149 | );
150 |
151 | const result = await evaluateReadmeHealth({
152 | readme_path: readmePath,
153 | });
154 |
155 | const dataContent = result.content.find((c) =>
156 | c.text.includes("healthReport"),
157 | );
158 | expect(dataContent).toBeDefined();
159 |
160 | const data = JSON.parse(dataContent!.text);
161 | expect(data.healthReport).toBeDefined();
162 | expect(data.healthReport.components).toBeDefined();
163 | expect(data.healthReport.components.communityHealth).toBeDefined();
164 | expect(data.healthReport.components.accessibility).toBeDefined();
165 | expect(data.healthReport.components.onboarding).toBeDefined();
166 | expect(data.healthReport.components.contentQuality).toBeDefined();
167 | });
168 |
169 | test("should provide grade and score", async () => {
170 | const readmePath = join(testDir, "README.md");
171 | await writeFile(readmePath, "# Basic Project\n\nMinimal content.");
172 |
173 | const result = await evaluateReadmeHealth({
174 | readme_path: readmePath,
175 | });
176 |
177 | const dataContent = result.content.find((c) =>
178 | c.text.includes("healthReport"),
179 | );
180 | const data = JSON.parse(dataContent!.text);
181 |
182 | expect(data.healthReport.overallScore).toBeGreaterThanOrEqual(0);
183 | expect(data.healthReport.overallScore).toBeLessThanOrEqual(100);
184 | expect(["A", "B", "C", "D", "F"]).toContain(data.healthReport.grade);
185 | });
186 |
187 | test("should include recommendations and next steps", async () => {
188 | const readmePath = join(testDir, "README.md");
189 | await writeFile(readmePath, "# Incomplete Project");
190 |
191 | const result = await evaluateReadmeHealth({
192 | readme_path: readmePath,
193 | });
194 |
195 | const dataContent = result.content.find((c) =>
196 | c.text.includes("recommendations"),
197 | );
198 | expect(dataContent).toBeDefined();
199 |
200 | const data = JSON.parse(dataContent!.text);
201 | expect(data.healthReport.recommendations).toBeDefined();
202 | expect(Array.isArray(data.healthReport.recommendations)).toBe(true);
203 | expect(data.nextSteps).toBeDefined();
204 | expect(Array.isArray(data.nextSteps)).toBe(true);
205 | });
206 | });
207 |
208 | describe("Response Format", () => {
209 | test("should return properly formatted MCP response", async () => {
210 | const readmePath = join(testDir, "README.md");
211 | await writeFile(readmePath, "# Test Project");
212 |
213 | const result = await evaluateReadmeHealth({
214 | readme_path: readmePath,
215 | });
216 |
217 | expect(result.content).toBeDefined();
218 | expect(Array.isArray(result.content)).toBe(true);
219 | expect(result.content.length).toBeGreaterThan(0);
220 |
221 | // Should include execution metadata
222 | const metadataContent = result.content.find((c) =>
223 | c.text.includes("Execution completed"),
224 | );
225 | expect(metadataContent).toBeDefined();
226 | });
227 | });
228 |
229 | describe("Repository Context Analysis", () => {
230 | test("should analyze repository context when path is provided", async () => {
231 | const readmePath = join(testDir, "README.md");
232 | await writeFile(readmePath, "# Project with Context");
233 |
234 | // Create repository files
235 | await writeFile(join(testDir, "CODE_OF_CONDUCT.md"), "# Code of Conduct");
236 | await writeFile(join(testDir, "CONTRIBUTING.md"), "# Contributing");
237 | await writeFile(join(testDir, "SECURITY.md"), "# Security Policy");
238 | await mkdir(join(testDir, ".github"), { recursive: true });
239 | await writeFile(join(testDir, "package.json"), '{"name": "test"}');
240 |
241 | const result = await evaluateReadmeHealth({
242 | readme_path: readmePath,
243 | repository_path: testDir,
244 | });
245 |
246 | expect(result.isError).toBe(false);
247 | const dataContent = result.content.find((c) =>
248 | c.text.includes("healthReport"),
249 | );
250 | expect(dataContent).toBeDefined();
251 | });
252 |
253 | test("should handle repository context analysis errors gracefully", async () => {
254 | const readmePath = join(testDir, "README.md");
255 | await writeFile(readmePath, "# Project");
256 |
257 | const result = await evaluateReadmeHealth({
258 | readme_path: readmePath,
259 | repository_path: "/nonexistent/path",
260 | });
261 |
262 | expect(result.isError).toBe(false); // Should not fail, just return null context
263 | });
264 | });
265 |
266 | describe("Community Health Evaluation", () => {
267 | test("should detect code of conduct references", async () => {
268 | const readmePath = join(testDir, "README.md");
269 | await writeFile(
270 | readmePath,
271 | `# Project
272 |
273 | Please read our [Code of Conduct](CODE_OF_CONDUCT.md) before contributing.
274 | `,
275 | );
276 |
277 | const result = await evaluateReadmeHealth({
278 | readme_path: readmePath,
279 | });
280 |
281 | const dataContent = result.content.find((c) =>
282 | c.text.includes("healthReport"),
283 | );
284 | const data = JSON.parse(dataContent!.text);
285 |
286 | const conductCheck =
287 | data.healthReport.components.communityHealth.details.find(
288 | (d: any) => d.check === "Code of Conduct linked",
289 | );
290 | expect(conductCheck.passed).toBe(true);
291 | expect(conductCheck.points).toBe(5);
292 | });
293 |
294 | test("should detect contributing guidelines", async () => {
295 | const readmePath = join(testDir, "README.md");
296 | await writeFile(
297 | readmePath,
298 | `# Project
299 |
300 | See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines.
301 | `,
302 | );
303 |
304 | const result = await evaluateReadmeHealth({
305 | readme_path: readmePath,
306 | });
307 |
308 | const dataContent = result.content.find((c) =>
309 | c.text.includes("healthReport"),
310 | );
311 | const data = JSON.parse(dataContent!.text);
312 |
313 | const contributingCheck =
314 | data.healthReport.components.communityHealth.details.find(
315 | (d: any) => d.check === "Contributing guidelines visible",
316 | );
317 | expect(contributingCheck.passed).toBe(true);
318 | });
319 |
320 | test("should detect security policy references", async () => {
321 | const readmePath = join(testDir, "README.md");
322 | await writeFile(
323 | readmePath,
324 | `# Project
325 |
326 | Report security issues via our [Security Policy](SECURITY.md).
327 | `,
328 | );
329 |
330 | const result = await evaluateReadmeHealth({
331 | readme_path: readmePath,
332 | });
333 |
334 | const dataContent = result.content.find((c) =>
335 | c.text.includes("healthReport"),
336 | );
337 | const data = JSON.parse(dataContent!.text);
338 |
339 | const securityCheck =
340 | data.healthReport.components.communityHealth.details.find(
341 | (d: any) => d.check === "Security policy linked",
342 | );
343 | expect(securityCheck.passed).toBe(true);
344 | });
345 |
346 | test("should detect support channels", async () => {
347 | const readmePath = join(testDir, "README.md");
348 | await writeFile(
349 | readmePath,
350 | `# Project
351 |
352 | Join our Discord community for support and discussions.
353 | `,
354 | );
355 |
356 | const result = await evaluateReadmeHealth({
357 | readme_path: readmePath,
358 | });
359 |
360 | const dataContent = result.content.find((c) =>
361 | c.text.includes("healthReport"),
362 | );
363 | const data = JSON.parse(dataContent!.text);
364 |
365 | const supportCheck =
366 | data.healthReport.components.communityHealth.details.find(
367 | (d: any) => d.check === "Support channels provided",
368 | );
369 | expect(supportCheck.passed).toBe(true);
370 | });
371 | });
372 |
373 | describe("Accessibility Evaluation", () => {
374 | test("should detect proper spacing and structure", async () => {
375 | const readmePath = join(testDir, "README.md");
376 | await writeFile(
377 | readmePath,
378 | `# Project
379 |
380 | ## Description
381 |
382 | This is a well-structured README with proper spacing.
383 |
384 | ## Installation
385 |
386 | Instructions here.
387 |
388 | ## Usage
389 |
390 | Usage examples here.
391 |
392 | ## Contributing
393 |
394 | Contributing guidelines.
395 |
396 | ## License
397 |
398 | MIT License
399 | `,
400 | );
401 |
402 | const result = await evaluateReadmeHealth({
403 | readme_path: readmePath,
404 | });
405 |
406 | const dataContent = result.content.find((c) =>
407 | c.text.includes("healthReport"),
408 | );
409 | const data = JSON.parse(dataContent!.text);
410 |
411 | const spacingCheck =
412 | data.healthReport.components.accessibility.details.find(
413 | (d: any) => d.check === "Scannable structure with proper spacing",
414 | );
415 | expect(spacingCheck.passed).toBe(true);
416 | });
417 |
418 | test("should detect heading hierarchy", async () => {
419 | const readmePath = join(testDir, "README.md");
420 | await writeFile(
421 | readmePath,
422 | `# Main Title
423 |
424 | ## Section 1
425 |
426 | ### Subsection 1.1
427 |
428 | ## Section 2
429 |
430 | ### Subsection 2.1
431 | `,
432 | );
433 |
434 | const result = await evaluateReadmeHealth({
435 | readme_path: readmePath,
436 | });
437 |
438 | const dataContent = result.content.find((c) =>
439 | c.text.includes("healthReport"),
440 | );
441 | const data = JSON.parse(dataContent!.text);
442 |
443 | const headingCheck =
444 | data.healthReport.components.accessibility.details.find(
445 | (d: any) => d.check === "Clear heading hierarchy",
446 | );
447 | expect(headingCheck.passed).toBe(true);
448 | });
449 |
450 | test("should detect images with alt text", async () => {
451 | const readmePath = join(testDir, "README.md");
452 | await writeFile(
453 | readmePath,
454 | `# Project
455 |
456 | 
457 | 
458 | `,
459 | );
460 |
461 | const result = await evaluateReadmeHealth({
462 | readme_path: readmePath,
463 | });
464 |
465 | const dataContent = result.content.find((c) =>
466 | c.text.includes("healthReport"),
467 | );
468 | const data = JSON.parse(dataContent!.text);
469 |
470 | const altTextCheck =
471 | data.healthReport.components.accessibility.details.find(
472 | (d: any) => d.check === "Alt text for images",
473 | );
474 | expect(altTextCheck.passed).toBe(true);
475 | });
476 |
477 | test("should detect images without alt text", async () => {
478 | const readmePath = join(testDir, "README.md");
479 | await writeFile(
480 | readmePath,
481 | `# Project
482 |
483 | 
484 | `,
485 | );
486 |
487 | const result = await evaluateReadmeHealth({
488 | readme_path: readmePath,
489 | });
490 |
491 | const dataContent = result.content.find((c) =>
492 | c.text.includes("healthReport"),
493 | );
494 | const data = JSON.parse(dataContent!.text);
495 |
496 | const altTextCheck =
497 | data.healthReport.components.accessibility.details.find(
498 | (d: any) => d.check === "Alt text for images",
499 | );
500 | expect(altTextCheck.passed).toBe(false);
501 | });
502 |
503 | test("should detect inclusive language violations", async () => {
504 | const readmePath = join(testDir, "README.md");
505 | await writeFile(
506 | readmePath,
507 | `# Project
508 |
509 | Hey guys, this project uses a master branch and maintains a whitelist of contributors.
510 | `,
511 | );
512 |
513 | const result = await evaluateReadmeHealth({
514 | readme_path: readmePath,
515 | });
516 |
517 | const dataContent = result.content.find((c) =>
518 | c.text.includes("healthReport"),
519 | );
520 | const data = JSON.parse(dataContent!.text);
521 |
522 | const inclusiveCheck =
523 | data.healthReport.components.accessibility.details.find(
524 | (d: any) => d.check === "Inclusive language",
525 | );
526 | expect(inclusiveCheck.passed).toBe(false);
527 | });
528 |
529 | test("should pass inclusive language check with good content", async () => {
530 | const readmePath = join(testDir, "README.md");
531 | await writeFile(
532 | readmePath,
533 | `# Project
534 |
535 | Welcome team! This project uses the main branch and maintains an allowlist of contributors.
536 | `,
537 | );
538 |
539 | const result = await evaluateReadmeHealth({
540 | readme_path: readmePath,
541 | });
542 |
543 | const dataContent = result.content.find((c) =>
544 | c.text.includes("healthReport"),
545 | );
546 | const data = JSON.parse(dataContent!.text);
547 |
548 | const inclusiveCheck =
549 | data.healthReport.components.accessibility.details.find(
550 | (d: any) => d.check === "Inclusive language",
551 | );
552 | expect(inclusiveCheck.passed).toBe(true);
553 | });
554 | });
555 |
556 | describe("Onboarding Evaluation", () => {
557 | test("should detect quick start sections", async () => {
558 | const readmePath = join(testDir, "README.md");
559 | await writeFile(
560 | readmePath,
561 | `# Project
562 |
563 | ## Quick Start
564 |
565 | Get up and running in minutes!
566 | `,
567 | );
568 |
569 | const result = await evaluateReadmeHealth({
570 | readme_path: readmePath,
571 | });
572 |
573 | const dataContent = result.content.find((c) =>
574 | c.text.includes("healthReport"),
575 | );
576 | const data = JSON.parse(dataContent!.text);
577 |
578 | const quickStartCheck =
579 | data.healthReport.components.onboarding.details.find(
580 | (d: any) => d.check === "Quick start section",
581 | );
582 | expect(quickStartCheck.passed).toBe(true);
583 | });
584 |
585 | test("should detect prerequisites", async () => {
586 | const readmePath = join(testDir, "README.md");
587 | await writeFile(
588 | readmePath,
589 | `# Project
590 |
591 | ## Prerequisites
592 |
593 | - Node.js 16+
594 | - npm or yarn
595 | `,
596 | );
597 |
598 | const result = await evaluateReadmeHealth({
599 | readme_path: readmePath,
600 | });
601 |
602 | const dataContent = result.content.find((c) =>
603 | c.text.includes("healthReport"),
604 | );
605 | const data = JSON.parse(dataContent!.text);
606 |
607 | const prereqCheck = data.healthReport.components.onboarding.details.find(
608 | (d: any) => d.check === "Prerequisites clearly listed",
609 | );
610 | expect(prereqCheck.passed).toBe(true);
611 | });
612 |
613 | test("should detect first contribution guidance", async () => {
614 | const readmePath = join(testDir, "README.md");
615 | await writeFile(
616 | readmePath,
617 | `# Project
618 |
619 | ## For New Contributors
620 |
621 | Welcome first-time contributors! Here's how to get started.
622 | `,
623 | );
624 |
625 | const result = await evaluateReadmeHealth({
626 | readme_path: readmePath,
627 | });
628 |
629 | const dataContent = result.content.find((c) =>
630 | c.text.includes("healthReport"),
631 | );
632 | const data = JSON.parse(dataContent!.text);
633 |
634 | const firstContribCheck =
635 | data.healthReport.components.onboarding.details.find(
636 | (d: any) => d.check === "First contribution guide",
637 | );
638 | expect(firstContribCheck.passed).toBe(true);
639 | });
640 |
641 | test("should detect good first issues", async () => {
642 | const readmePath = join(testDir, "README.md");
643 | await writeFile(
644 | readmePath,
645 | `# Project
646 |
647 | Check out our good first issues for beginners!
648 | `,
649 | );
650 |
651 | const result = await evaluateReadmeHealth({
652 | readme_path: readmePath,
653 | });
654 |
655 | const dataContent = result.content.find((c) =>
656 | c.text.includes("healthReport"),
657 | );
658 | const data = JSON.parse(dataContent!.text);
659 |
660 | const goodFirstCheck =
661 | data.healthReport.components.onboarding.details.find(
662 | (d: any) => d.check === "Good first issues mentioned",
663 | );
664 | expect(goodFirstCheck.passed).toBe(true);
665 | });
666 | });
667 |
668 | describe("Content Quality Evaluation", () => {
669 | test("should evaluate adequate content length", async () => {
670 | const readmePath = join(testDir, "README.md");
671 | const content =
672 | "# Project\n\n" +
673 | "This is a well-sized README with adequate content. ".repeat(20);
674 | await writeFile(readmePath, content);
675 |
676 | const result = await evaluateReadmeHealth({
677 | readme_path: readmePath,
678 | });
679 |
680 | const dataContent = result.content.find((c) =>
681 | c.text.includes("healthReport"),
682 | );
683 | const data = JSON.parse(dataContent!.text);
684 |
685 | const lengthCheck =
686 | data.healthReport.components.contentQuality.details.find(
687 | (d: any) => d.check === "Adequate content length",
688 | );
689 | expect(lengthCheck.passed).toBe(true);
690 | });
691 |
692 | test("should detect insufficient content length", async () => {
693 | const readmePath = join(testDir, "README.md");
694 | await writeFile(readmePath, "# Project\n\nToo short.");
695 |
696 | const result = await evaluateReadmeHealth({
697 | readme_path: readmePath,
698 | });
699 |
700 | const dataContent = result.content.find((c) =>
701 | c.text.includes("healthReport"),
702 | );
703 | const data = JSON.parse(dataContent!.text);
704 |
705 | const lengthCheck =
706 | data.healthReport.components.contentQuality.details.find(
707 | (d: any) => d.check === "Adequate content length",
708 | );
709 | expect(lengthCheck.passed).toBe(false);
710 | });
711 |
712 | test("should detect code examples", async () => {
713 | const readmePath = join(testDir, "README.md");
714 | await writeFile(
715 | readmePath,
716 | `# Project
717 |
718 | ## Installation
719 |
720 | \`\`\`bash
721 | npm install project
722 | \`\`\`
723 |
724 | ## Usage
725 |
726 | \`\`\`javascript
727 | const project = require('project');
728 | project.run();
729 | \`\`\`
730 | `,
731 | );
732 |
733 | const result = await evaluateReadmeHealth({
734 | readme_path: readmePath,
735 | });
736 |
737 | const dataContent = result.content.find((c) =>
738 | c.text.includes("healthReport"),
739 | );
740 | const data = JSON.parse(dataContent!.text);
741 |
742 | const codeCheck =
743 | data.healthReport.components.contentQuality.details.find(
744 | (d: any) => d.check === "Code examples provided",
745 | );
746 | expect(codeCheck.passed).toBe(true);
747 | });
748 |
749 | test("should detect external links", async () => {
750 | const readmePath = join(testDir, "README.md");
751 | await writeFile(
752 | readmePath,
753 | `# Project
754 |
755 | Check out our [documentation](https://docs.example.com),
756 | [demo](https://demo.example.com), and [related project](https://github.com/example/related).
757 | `,
758 | );
759 |
760 | const result = await evaluateReadmeHealth({
761 | readme_path: readmePath,
762 | });
763 |
764 | const dataContent = result.content.find((c) =>
765 | c.text.includes("healthReport"),
766 | );
767 | const data = JSON.parse(dataContent!.text);
768 |
769 | const linksCheck =
770 | data.healthReport.components.contentQuality.details.find(
771 | (d: any) => d.check === "External links present",
772 | );
773 | expect(linksCheck.passed).toBe(true);
774 | });
775 |
776 | test("should evaluate project description clarity", async () => {
777 | const readmePath = join(testDir, "README.md");
778 | const longContent = `# Project
779 |
780 | ## Description
781 |
782 | This is a comprehensive project description that provides detailed information about what the project does, how it works, and why it's useful. The description is long enough and well-structured to meet the clarity requirements. This content needs to be over 500 characters to pass the clarity check, so I'm adding more detailed information about the project features, installation process, usage examples, and comprehensive documentation that explains all aspects of the project in great detail.
783 |
784 | ## Features
785 |
786 | - Feature 1: Advanced functionality
787 | - Feature 2: Enhanced performance
788 | - Feature 3: User-friendly interface
789 |
790 | ## Installation
791 |
792 | Detailed installation instructions here with step-by-step guidance.
793 |
794 | ## Usage
795 |
796 | Comprehensive usage examples and documentation with code samples.
797 | `;
798 | await writeFile(readmePath, longContent);
799 |
800 | const result = await evaluateReadmeHealth({
801 | readme_path: readmePath,
802 | });
803 |
804 | const dataContent = result.content.find((c) =>
805 | c.text.includes("healthReport"),
806 | );
807 | const data = JSON.parse(dataContent!.text);
808 |
809 | const clarityCheck =
810 | data.healthReport.components.contentQuality.details.find(
811 | (d: any) => d.check === "Project description clarity",
812 | );
813 | expect(clarityCheck.passed).toBe(true);
814 | });
815 | });
816 |
817 | describe("Grade Calculation", () => {
818 | test("should assign grade A for 90%+ score", async () => {
819 | const readmePath = join(testDir, "README.md");
820 | // Create comprehensive README that should score high
821 | await writeFile(
822 | readmePath,
823 | `# Excellent Project
824 |
825 | ## Table of Contents
826 | - [Description](#description)
827 | - [Installation](#installation)
828 | - [Usage](#usage)
829 |
830 | ## Description
831 |
832 | This is a comprehensive project with excellent documentation. It includes all necessary sections and follows best practices for community health, accessibility, onboarding, and content quality.
833 |
834 | ## Quick Start
835 |
836 | Get started in minutes with our simple installation process.
837 |
838 | ## Prerequisites
839 |
840 | - Node.js 16+
841 | - npm or yarn
842 |
843 | ## Installation
844 |
845 | \`\`\`bash
846 | npm install excellent-project
847 | \`\`\`
848 |
849 | ## Usage
850 |
851 | \`\`\`javascript
852 | const project = require('excellent-project');
853 | project.start();
854 | \`\`\`
855 |
856 | ## Contributing
857 |
858 | Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct and the process for submitting pull requests.
859 |
860 | ## First Contribution
861 |
862 | New contributors welcome! Check out our good first issues for beginners.
863 |
864 | ## Support
865 |
866 | Join our Discord community for help and discussions.
867 |
868 | ## Security
869 |
870 | Report security issues via our [Security Policy](SECURITY.md).
871 |
872 | ## Links
873 |
874 | - [Documentation](https://docs.example.com)
875 | - [Demo](https://demo.example.com)
876 | - [API Reference](https://api.example.com)
877 | - [GitHub Issues](https://github.com/example/issues)
878 |
879 | ## License
880 |
881 | MIT License
882 | `,
883 | );
884 |
885 | const result = await evaluateReadmeHealth({
886 | readme_path: readmePath,
887 | });
888 |
889 | const dataContent = result.content.find((c) =>
890 | c.text.includes("healthReport"),
891 | );
892 | const data = JSON.parse(dataContent!.text);
893 |
894 | expect(data.healthReport.overallScore).toBeGreaterThanOrEqual(90);
895 | expect(data.healthReport.grade).toBe("A");
896 | });
897 |
898 | test("should assign grade F for very low scores", async () => {
899 | const readmePath = join(testDir, "README.md");
900 | await writeFile(readmePath, "# Bad\n\nMinimal.");
901 |
902 | const result = await evaluateReadmeHealth({
903 | readme_path: readmePath,
904 | });
905 |
906 | const dataContent = result.content.find((c) =>
907 | c.text.includes("healthReport"),
908 | );
909 | const data = JSON.parse(dataContent!.text);
910 |
911 | expect(data.healthReport.overallScore).toBeLessThan(60);
912 | expect(data.healthReport.grade).toBe("F");
913 | });
914 | });
915 |
916 | describe("Recommendations and Critical Issues", () => {
917 | test("should identify critical issues for low-scoring components", async () => {
918 | const readmePath = join(testDir, "README.md");
919 | await writeFile(readmePath, "# Minimal Project\n\nVery basic content.");
920 |
921 | const result = await evaluateReadmeHealth({
922 | readme_path: readmePath,
923 | });
924 |
925 | const dataContent = result.content.find((c) =>
926 | c.text.includes("healthReport"),
927 | );
928 | const data = JSON.parse(dataContent!.text);
929 |
930 | expect(data.healthReport.criticalIssues.length).toBeGreaterThan(0);
931 | expect(
932 | data.healthReport.criticalIssues.some((issue: string) =>
933 | issue.includes("Critical:"),
934 | ),
935 | ).toBe(true);
936 | });
937 |
938 | test("should generate appropriate recommendations", async () => {
939 | const readmePath = join(testDir, "README.md");
940 | await writeFile(
941 | readmePath,
942 | "# Project\n\nBasic project with minimal content that will fail most health checks.",
943 | );
944 |
945 | const result = await evaluateReadmeHealth({
946 | readme_path: readmePath,
947 | });
948 |
949 | const dataContent = result.content.find((c) =>
950 | c.text.includes("healthReport"),
951 | );
952 | const data = JSON.parse(dataContent!.text);
953 |
954 | // Should have recommendations since most checks will fail with minimal content
955 | expect(data.healthReport.recommendations.length).toBeGreaterThan(0);
956 | expect(data.healthReport.recommendations.length).toBeLessThanOrEqual(10);
957 | });
958 |
959 | test("should identify strengths in well-structured components", async () => {
960 | const readmePath = join(testDir, "README.md");
961 | await writeFile(
962 | readmePath,
963 | `# Project
964 |
965 | ## Description
966 |
967 | This project has good content quality with proper structure and adequate length.
968 |
969 | ## Installation
970 |
971 | \`\`\`bash
972 | npm install
973 | \`\`\`
974 |
975 | ## Usage
976 |
977 | \`\`\`javascript
978 | const app = require('./app');
979 | app.start();
980 | \`\`\`
981 |
982 | ## Links
983 |
984 | - [Docs](https://example.com)
985 | - [Demo](https://demo.com)
986 | - [API](https://api.com)
987 | - [Support](https://support.com)
988 | `,
989 | );
990 |
991 | const result = await evaluateReadmeHealth({
992 | readme_path: readmePath,
993 | });
994 |
995 | const dataContent = result.content.find((c) =>
996 | c.text.includes("healthReport"),
997 | );
998 | const data = JSON.parse(dataContent!.text);
999 |
1000 | expect(data.healthReport.strengths.length).toBeGreaterThan(0);
1001 | });
1002 | });
1003 |
1004 | describe("Time Estimation", () => {
1005 | test("should estimate time in minutes for quick fixes", async () => {
1006 | const readmePath = join(testDir, "README.md");
1007 | await writeFile(
1008 | readmePath,
1009 | `# Excellent Project
1010 |
1011 | ## Table of Contents
1012 | - [Description](#description)
1013 | - [Installation](#installation)
1014 |
1015 | ## Description
1016 | This is a comprehensive project with excellent documentation. It includes all necessary sections and follows best practices for community health, accessibility, onboarding, and content quality.
1017 |
1018 | ## Quick Start
1019 | Get started in minutes with our simple installation process.
1020 |
1021 | ## Prerequisites
1022 | - Node.js 16+
1023 | - npm or yarn
1024 |
1025 | ## Installation
1026 | \`\`\`bash
1027 | npm install excellent-project
1028 | \`\`\`
1029 |
1030 | ## Usage
1031 | \`\`\`javascript
1032 | const project = require('excellent-project');
1033 | project.start();
1034 | \`\`\`
1035 |
1036 | ## Contributing
1037 | Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details.
1038 |
1039 | ## First Contribution
1040 | New contributors welcome! Check out our good first issues for beginners.
1041 |
1042 | ## Support
1043 | Join our Discord community for help and discussions.
1044 |
1045 | ## Security
1046 | Report security issues via our [Security Policy](SECURITY.md).
1047 |
1048 | ## Links
1049 | - [Documentation](https://docs.example.com)
1050 | - [Demo](https://demo.example.com)
1051 | - [API Reference](https://api.example.com)
1052 |
1053 | ## License
1054 | MIT License
1055 | `,
1056 | );
1057 |
1058 | const result = await evaluateReadmeHealth({
1059 | readme_path: readmePath,
1060 | });
1061 |
1062 | const dataContent = result.content.find((c) =>
1063 | c.text.includes("healthReport"),
1064 | );
1065 | const data = JSON.parse(dataContent!.text);
1066 |
1067 | // Should have very few recommendations, resulting in minutes
1068 | expect(data.healthReport.estimatedImprovementTime).toMatch(/\d+ minutes/);
1069 | });
1070 |
1071 | test("should estimate time in hours for moderate improvements", async () => {
1072 | const readmePath = join(testDir, "README.md");
1073 | await writeFile(
1074 | readmePath,
1075 | "# Project\n\nBasic project needing improvements.",
1076 | );
1077 |
1078 | const result = await evaluateReadmeHealth({
1079 | readme_path: readmePath,
1080 | });
1081 |
1082 | const dataContent = result.content.find((c) =>
1083 | c.text.includes("healthReport"),
1084 | );
1085 | const data = JSON.parse(dataContent!.text);
1086 |
1087 | // Should have enough recommendations to warrant hours
1088 | expect(data.healthReport.estimatedImprovementTime).toMatch(/\d+ hours?/);
1089 | });
1090 | });
1091 |
1092 | describe("Next Steps Generation", () => {
1093 | test("should prioritize critical issues in next steps", async () => {
1094 | const readmePath = join(testDir, "README.md");
1095 | await writeFile(readmePath, "# Minimal\n\nBad.");
1096 |
1097 | const result = await evaluateReadmeHealth({
1098 | readme_path: readmePath,
1099 | });
1100 |
1101 | const dataContent = result.content.find((c) =>
1102 | c.text.includes("nextSteps"),
1103 | );
1104 | const data = JSON.parse(dataContent!.text);
1105 |
1106 | expect(
1107 | data.nextSteps.some((step: string) => step.includes("critical issues")),
1108 | ).toBe(true);
1109 | });
1110 |
1111 | test("should suggest targeting 85+ score for low-scoring READMEs", async () => {
1112 | const readmePath = join(testDir, "README.md");
1113 | await writeFile(readmePath, "# Project\n\nNeeds improvement.");
1114 |
1115 | const result = await evaluateReadmeHealth({
1116 | readme_path: readmePath,
1117 | });
1118 |
1119 | const dataContent = result.content.find((c) =>
1120 | c.text.includes("nextSteps"),
1121 | );
1122 | const data = JSON.parse(dataContent!.text);
1123 |
1124 | expect(
1125 | data.nextSteps.some((step: string) =>
1126 | step.includes("85+ health score"),
1127 | ),
1128 | ).toBe(true);
1129 | });
1130 |
1131 | test("should always include re-evaluation step", async () => {
1132 | const readmePath = join(testDir, "README.md");
1133 | await writeFile(readmePath, "# Any Project");
1134 |
1135 | const result = await evaluateReadmeHealth({
1136 | readme_path: readmePath,
1137 | });
1138 |
1139 | const dataContent = result.content.find((c) =>
1140 | c.text.includes("nextSteps"),
1141 | );
1142 | const data = JSON.parse(dataContent!.text);
1143 |
1144 | expect(
1145 | data.nextSteps.some((step: string) => step.includes("Re-evaluate")),
1146 | ).toBe(true);
1147 | });
1148 | });
1149 |
1150 | describe("Project Type Variations", () => {
1151 | test("should handle enterprise_tool project type", async () => {
1152 | const readmePath = join(testDir, "README.md");
1153 | await writeFile(
1154 | readmePath,
1155 | "# Enterprise Tool\n\nProfessional enterprise solution.",
1156 | );
1157 |
1158 | const result = await evaluateReadmeHealth({
1159 | readme_path: readmePath,
1160 | project_type: "enterprise_tool",
1161 | });
1162 |
1163 | expect(result.isError).toBe(false);
1164 | const dataContent = result.content.find((c) =>
1165 | c.text.includes("projectType"),
1166 | );
1167 | const data = JSON.parse(dataContent!.text);
1168 | expect(data.projectType).toBe("enterprise_tool");
1169 | });
1170 |
1171 | test("should handle personal_project project type", async () => {
1172 | const readmePath = join(testDir, "README.md");
1173 | await writeFile(
1174 | readmePath,
1175 | "# Personal Project\n\nMy personal coding project.",
1176 | );
1177 |
1178 | const result = await evaluateReadmeHealth({
1179 | readme_path: readmePath,
1180 | project_type: "personal_project",
1181 | });
1182 |
1183 | expect(result.isError).toBe(false);
1184 | const dataContent = result.content.find((c) =>
1185 | c.text.includes("projectType"),
1186 | );
1187 | const data = JSON.parse(dataContent!.text);
1188 | expect(data.projectType).toBe("personal_project");
1189 | });
1190 |
1191 | test("should handle documentation project type", async () => {
1192 | const readmePath = join(testDir, "README.md");
1193 | await writeFile(
1194 | readmePath,
1195 | "# Documentation Project\n\nComprehensive documentation.",
1196 | );
1197 |
1198 | const result = await evaluateReadmeHealth({
1199 | readme_path: readmePath,
1200 | project_type: "documentation",
1201 | });
1202 |
1203 | expect(result.isError).toBe(false);
1204 | const dataContent = result.content.find((c) =>
1205 | c.text.includes("projectType"),
1206 | );
1207 | const data = JSON.parse(dataContent!.text);
1208 | expect(data.projectType).toBe("documentation");
1209 | });
1210 | });
1211 | });
1212 |
```