This is page 15 of 20. Use http://codebase.md/tosin2013/documcp?page={x} to view the full context.
# Directory Structure
```
├── .dockerignore
├── .eslintignore
├── .eslintrc.json
├── .github
│ ├── agents
│ │ ├── documcp-ast.md
│ │ ├── documcp-deploy.md
│ │ ├── documcp-memory.md
│ │ ├── documcp-test.md
│ │ └── documcp-tool.md
│ ├── copilot-instructions.md
│ ├── dependabot.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── automated-changelog.md
│ │ ├── bug_report.md
│ │ ├── bug_report.yml
│ │ ├── documentation_issue.md
│ │ ├── feature_request.md
│ │ ├── feature_request.yml
│ │ ├── npm-publishing-fix.md
│ │ └── release_improvements.md
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-drafter.yml
│ └── workflows
│ ├── auto-merge.yml
│ ├── ci.yml
│ ├── codeql.yml
│ ├── dependency-review.yml
│ ├── deploy-docs.yml
│ ├── README.md
│ ├── release-drafter.yml
│ └── release.yml
├── .gitignore
├── .husky
│ ├── commit-msg
│ └── pre-commit
├── .linkcheck.config.json
├── .markdown-link-check.json
├── .nvmrc
├── .pre-commit-config.yaml
├── .versionrc.json
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── docker-compose.docs.yml
├── Dockerfile.docs
├── docs
│ ├── .docusaurus
│ │ ├── docusaurus-plugin-content-docs
│ │ │ └── default
│ │ │ └── __mdx-loader-dependency.json
│ │ └── docusaurus-plugin-content-pages
│ │ └── default
│ │ └── __plugin.json
│ ├── adrs
│ │ ├── 001-mcp-server-architecture.md
│ │ ├── 002-repository-analysis-engine.md
│ │ ├── 003-static-site-generator-recommendation-engine.md
│ │ ├── 004-diataxis-framework-integration.md
│ │ ├── 005-github-pages-deployment-automation.md
│ │ ├── 006-mcp-tools-api-design.md
│ │ ├── 007-mcp-prompts-and-resources-integration.md
│ │ ├── 008-intelligent-content-population-engine.md
│ │ ├── 009-content-accuracy-validation-framework.md
│ │ ├── 010-mcp-resource-pattern-redesign.md
│ │ └── README.md
│ ├── api
│ │ ├── .nojekyll
│ │ ├── assets
│ │ │ ├── hierarchy.js
│ │ │ ├── highlight.css
│ │ │ ├── icons.js
│ │ │ ├── icons.svg
│ │ │ ├── main.js
│ │ │ ├── navigation.js
│ │ │ ├── search.js
│ │ │ └── style.css
│ │ ├── hierarchy.html
│ │ ├── index.html
│ │ ├── modules.html
│ │ └── variables
│ │ └── TOOLS.html
│ ├── assets
│ │ └── logo.svg
│ ├── development
│ │ └── MCP_INSPECTOR_TESTING.md
│ ├── docusaurus.config.js
│ ├── explanation
│ │ ├── architecture.md
│ │ └── index.md
│ ├── guides
│ │ ├── link-validation.md
│ │ ├── playwright-integration.md
│ │ └── playwright-testing-workflow.md
│ ├── how-to
│ │ ├── analytics-setup.md
│ │ ├── custom-domains.md
│ │ ├── documentation-freshness-tracking.md
│ │ ├── github-pages-deployment.md
│ │ ├── index.md
│ │ ├── local-testing.md
│ │ ├── performance-optimization.md
│ │ ├── prompting-guide.md
│ │ ├── repository-analysis.md
│ │ ├── seo-optimization.md
│ │ ├── site-monitoring.md
│ │ ├── troubleshooting.md
│ │ └── usage-examples.md
│ ├── index.md
│ ├── knowledge-graph.md
│ ├── package-lock.json
│ ├── package.json
│ ├── phase-2-intelligence.md
│ ├── reference
│ │ ├── api-overview.md
│ │ ├── cli.md
│ │ ├── configuration.md
│ │ ├── deploy-pages.md
│ │ ├── index.md
│ │ ├── mcp-tools.md
│ │ └── prompt-templates.md
│ ├── research
│ │ ├── cross-domain-integration
│ │ │ └── README.md
│ │ ├── domain-1-mcp-architecture
│ │ │ ├── index.md
│ │ │ └── mcp-performance-research.md
│ │ ├── domain-2-repository-analysis
│ │ │ └── README.md
│ │ ├── domain-3-ssg-recommendation
│ │ │ ├── index.md
│ │ │ └── ssg-performance-analysis.md
│ │ ├── domain-4-diataxis-integration
│ │ │ └── README.md
│ │ ├── domain-5-github-deployment
│ │ │ ├── github-pages-security-analysis.md
│ │ │ └── index.md
│ │ ├── domain-6-api-design
│ │ │ └── README.md
│ │ ├── README.md
│ │ ├── research-integration-summary-2025-01-14.md
│ │ ├── research-progress-template.md
│ │ └── research-questions-2025-01-14.md
│ ├── robots.txt
│ ├── sidebars.js
│ ├── sitemap.xml
│ ├── src
│ │ └── css
│ │ └── custom.css
│ └── tutorials
│ ├── development-setup.md
│ ├── environment-setup.md
│ ├── first-deployment.md
│ ├── getting-started.md
│ ├── index.md
│ ├── memory-workflows.md
│ └── user-onboarding.md
├── jest.config.js
├── LICENSE
├── Makefile
├── MCP_PHASE2_IMPLEMENTATION.md
├── mcp-config-example.json
├── mcp.json
├── package-lock.json
├── package.json
├── README.md
├── release.sh
├── scripts
│ └── check-package-structure.cjs
├── SECURITY.md
├── setup-precommit.sh
├── src
│ ├── benchmarks
│ │ └── performance.ts
│ ├── index.ts
│ ├── memory
│ │ ├── contextual-retrieval.ts
│ │ ├── deployment-analytics.ts
│ │ ├── enhanced-manager.ts
│ │ ├── export-import.ts
│ │ ├── freshness-kg-integration.ts
│ │ ├── index.ts
│ │ ├── integration.ts
│ │ ├── kg-code-integration.ts
│ │ ├── kg-health.ts
│ │ ├── kg-integration.ts
│ │ ├── kg-link-validator.ts
│ │ ├── kg-storage.ts
│ │ ├── knowledge-graph.ts
│ │ ├── learning.ts
│ │ ├── manager.ts
│ │ ├── multi-agent-sharing.ts
│ │ ├── pruning.ts
│ │ ├── schemas.ts
│ │ ├── storage.ts
│ │ ├── temporal-analysis.ts
│ │ ├── user-preferences.ts
│ │ └── visualization.ts
│ ├── prompts
│ │ └── technical-writer-prompts.ts
│ ├── scripts
│ │ └── benchmark.ts
│ ├── templates
│ │ └── playwright
│ │ ├── accessibility.spec.template.ts
│ │ ├── Dockerfile.template
│ │ ├── docs-e2e.workflow.template.yml
│ │ ├── link-validation.spec.template.ts
│ │ └── playwright.config.template.ts
│ ├── tools
│ │ ├── analyze-deployments.ts
│ │ ├── analyze-readme.ts
│ │ ├── analyze-repository.ts
│ │ ├── check-documentation-links.ts
│ │ ├── deploy-pages.ts
│ │ ├── detect-gaps.ts
│ │ ├── evaluate-readme-health.ts
│ │ ├── generate-config.ts
│ │ ├── generate-contextual-content.ts
│ │ ├── generate-llm-context.ts
│ │ ├── generate-readme-template.ts
│ │ ├── generate-technical-writer-prompts.ts
│ │ ├── kg-health-check.ts
│ │ ├── manage-preferences.ts
│ │ ├── manage-sitemap.ts
│ │ ├── optimize-readme.ts
│ │ ├── populate-content.ts
│ │ ├── readme-best-practices.ts
│ │ ├── recommend-ssg.ts
│ │ ├── setup-playwright-tests.ts
│ │ ├── setup-structure.ts
│ │ ├── sync-code-to-docs.ts
│ │ ├── test-local-deployment.ts
│ │ ├── track-documentation-freshness.ts
│ │ ├── update-existing-documentation.ts
│ │ ├── validate-content.ts
│ │ ├── validate-documentation-freshness.ts
│ │ ├── validate-readme-checklist.ts
│ │ └── verify-deployment.ts
│ ├── types
│ │ └── api.ts
│ ├── utils
│ │ ├── ast-analyzer.ts
│ │ ├── code-scanner.ts
│ │ ├── content-extractor.ts
│ │ ├── drift-detector.ts
│ │ ├── freshness-tracker.ts
│ │ ├── language-parsers-simple.ts
│ │ ├── permission-checker.ts
│ │ └── sitemap-generator.ts
│ └── workflows
│ └── documentation-workflow.ts
├── test-docs-local.sh
├── tests
│ ├── api
│ │ └── mcp-responses.test.ts
│ ├── benchmarks
│ │ └── performance.test.ts
│ ├── edge-cases
│ │ └── error-handling.test.ts
│ ├── functional
│ │ └── tools.test.ts
│ ├── integration
│ │ ├── kg-documentation-workflow.test.ts
│ │ ├── knowledge-graph-workflow.test.ts
│ │ ├── mcp-readme-tools.test.ts
│ │ ├── memory-mcp-tools.test.ts
│ │ ├── readme-technical-writer.test.ts
│ │ └── workflow.test.ts
│ ├── memory
│ │ ├── contextual-retrieval.test.ts
│ │ ├── enhanced-manager.test.ts
│ │ ├── export-import.test.ts
│ │ ├── freshness-kg-integration.test.ts
│ │ ├── kg-code-integration.test.ts
│ │ ├── kg-health.test.ts
│ │ ├── kg-link-validator.test.ts
│ │ ├── kg-storage-validation.test.ts
│ │ ├── kg-storage.test.ts
│ │ ├── knowledge-graph-enhanced.test.ts
│ │ ├── knowledge-graph.test.ts
│ │ ├── learning.test.ts
│ │ ├── manager-advanced.test.ts
│ │ ├── manager.test.ts
│ │ ├── mcp-resource-integration.test.ts
│ │ ├── mcp-tool-persistence.test.ts
│ │ ├── schemas.test.ts
│ │ ├── storage.test.ts
│ │ ├── temporal-analysis.test.ts
│ │ └── user-preferences.test.ts
│ ├── performance
│ │ ├── memory-load-testing.test.ts
│ │ └── memory-stress-testing.test.ts
│ ├── prompts
│ │ ├── guided-workflow-prompts.test.ts
│ │ └── technical-writer-prompts.test.ts
│ ├── server.test.ts
│ ├── setup.ts
│ ├── tools
│ │ ├── all-tools.test.ts
│ │ ├── analyze-coverage.test.ts
│ │ ├── analyze-deployments.test.ts
│ │ ├── analyze-readme.test.ts
│ │ ├── analyze-repository.test.ts
│ │ ├── check-documentation-links.test.ts
│ │ ├── deploy-pages-kg-retrieval.test.ts
│ │ ├── deploy-pages-tracking.test.ts
│ │ ├── deploy-pages.test.ts
│ │ ├── detect-gaps.test.ts
│ │ ├── evaluate-readme-health.test.ts
│ │ ├── generate-contextual-content.test.ts
│ │ ├── generate-llm-context.test.ts
│ │ ├── generate-readme-template.test.ts
│ │ ├── generate-technical-writer-prompts.test.ts
│ │ ├── kg-health-check.test.ts
│ │ ├── manage-sitemap.test.ts
│ │ ├── optimize-readme.test.ts
│ │ ├── readme-best-practices.test.ts
│ │ ├── recommend-ssg-historical.test.ts
│ │ ├── recommend-ssg-preferences.test.ts
│ │ ├── recommend-ssg.test.ts
│ │ ├── simple-coverage.test.ts
│ │ ├── sync-code-to-docs.test.ts
│ │ ├── test-local-deployment.test.ts
│ │ ├── tool-error-handling.test.ts
│ │ ├── track-documentation-freshness.test.ts
│ │ ├── validate-content.test.ts
│ │ ├── validate-documentation-freshness.test.ts
│ │ └── validate-readme-checklist.test.ts
│ ├── types
│ │ └── type-safety.test.ts
│ └── utils
│ ├── ast-analyzer.test.ts
│ ├── content-extractor.test.ts
│ ├── drift-detector.test.ts
│ ├── freshness-tracker.test.ts
│ └── sitemap-generator.test.ts
├── tsconfig.json
└── typedoc.json
```
# Files
--------------------------------------------------------------------------------
/src/tools/analyze-repository.ts:
--------------------------------------------------------------------------------
```typescript
import { promises as fs } from "fs";
import path from "path";
import { z } from "zod";
import { MCPToolResponse, formatMCPResponse } from "../types/api.js";
import {
createOrUpdateProject,
getProjectContext,
} from "../memory/kg-integration.js";
import {
extractRepositoryContent,
ExtractedContent,
} from "../utils/content-extractor.js";
// Analysis result schema based on ADR-002
export interface RepositoryAnalysis {
id: string;
timestamp: string;
path: string;
structure: {
totalFiles: number;
totalDirectories: number;
languages: Record<string, number>;
hasTests: boolean;
hasCI: boolean;
hasDocs: boolean;
};
dependencies: {
ecosystem: "javascript" | "python" | "ruby" | "go" | "unknown";
packages: string[];
devPackages: string[];
};
documentation: {
hasReadme: boolean;
hasContributing: boolean;
hasLicense: boolean;
existingDocs: string[];
estimatedComplexity: "simple" | "moderate" | "complex";
extractedContent?: ExtractedContent;
};
recommendations: {
primaryLanguage: string;
projectType: string;
teamSize: "solo" | "small" | "medium" | "large";
};
}
const inputSchema = z.object({
path: z.string(),
depth: z.enum(["quick", "standard", "deep"]).optional().default("standard"),
});
/**
* Analyzes a repository to understand its structure, dependencies, and documentation needs.
*
* This is the core function of DocuMCP that performs multi-layered analysis of a codebase
* to provide intelligent insights for documentation generation and deployment recommendations.
* The analysis includes structural analysis, dependency detection, documentation assessment,
* and generates recommendations for optimal documentation strategies.
*
* @param args - The input arguments for repository analysis
* @param args.path - The file system path to the repository to analyze
* @param args.depth - The analysis depth level: "quick" (basic), "standard" (comprehensive), or "deep" (thorough)
*
* @returns Promise resolving to analysis results with content array and error status
* @returns content - Array containing the analysis results in MCP tool response format
* @returns isError - Boolean flag indicating if the analysis encountered errors
*
* @throws {Error} When the repository path is inaccessible or invalid
* @throws {Error} When permission is denied to read the repository
* @throws {Error} When the repository structure cannot be analyzed
*
* @example
* ```typescript
* // Basic repository analysis
* const result = await analyzeRepository({
* path: "/path/to/my/repository",
* depth: "standard"
* });
*
* // Quick analysis for large repositories
* const quickResult = await analyzeRepository({
* path: "/path/to/large/repo",
* depth: "quick"
* });
* ```
*
* @since 1.0.0
* @version 1.2.0 - Added Knowledge Graph integration and historical context
*/
export async function analyzeRepository(
args: unknown,
context?: any,
): Promise<{ content: any[]; isError?: boolean }> {
const startTime = Date.now();
const { path: repoPath, depth } = inputSchema.parse(args);
try {
// Report initial progress
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 0,
total: 100,
});
}
await context?.info?.("🔍 Starting repository analysis...");
// Verify path exists and is accessible
await context?.info?.(`📂 Verifying access to ${repoPath}...`);
await fs.access(repoPath, fs.constants.R_OK);
// Try to read the directory to catch permission issues early
try {
await fs.readdir(repoPath);
} catch (error: any) {
if (error.code === "EACCES" || error.code === "EPERM") {
throw new Error(`Permission denied: Cannot read directory ${repoPath}`);
}
throw error;
}
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 10,
total: 100,
});
}
// Phase 1.2: Get historical context from Knowledge Graph
await context?.info?.(
"📊 Retrieving historical context from Knowledge Graph...",
);
let projectContext;
try {
projectContext = await getProjectContext(repoPath);
if (projectContext.previousAnalyses > 0) {
await context?.info?.(
`✨ Found ${projectContext.previousAnalyses} previous analysis(es) of this project`,
);
}
} catch (error) {
console.warn("Failed to retrieve project context:", error);
projectContext = {
previousAnalyses: 0,
lastAnalyzed: null,
knownTechnologies: [],
similarProjects: [],
};
}
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 20,
total: 100,
});
}
await context?.info?.("🔎 Analyzing repository structure...");
const structure = await analyzeStructure(repoPath, depth);
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 40,
total: 100,
});
}
await context?.info?.("📦 Analyzing dependencies...");
const dependencies = await analyzeDependencies(repoPath);
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 60,
total: 100,
});
}
await context?.info?.("📝 Analyzing documentation...");
const documentation = await analyzeDocumentation(repoPath);
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 75,
total: 100,
});
}
await context?.info?.("💡 Generating recommendations...");
const recommendations = await generateRecommendations(repoPath);
const analysis: RepositoryAnalysis = {
id: generateAnalysisId(),
timestamp: new Date().toISOString(),
path: repoPath,
structure,
dependencies,
documentation,
recommendations,
};
// Phase 1.2: Store project in Knowledge Graph
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 85,
total: 100,
});
}
await context?.info?.("💾 Storing analysis in Knowledge Graph...");
try {
await createOrUpdateProject(analysis);
} catch (error) {
console.warn("Failed to store project in Knowledge Graph:", error);
}
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 90,
total: 100,
});
}
// Phase 1.3: Get intelligent analysis enrichment
await context?.info?.("🧠 Enriching analysis with historical insights...");
let intelligentAnalysis;
let documentationHealth;
try {
const { getProjectInsights, getSimilarProjects } = await import(
"../memory/index.js"
);
const { getKnowledgeGraph } = await import("../memory/kg-integration.js");
const insights = await getProjectInsights(repoPath);
const similar = await getSimilarProjects(analysis, 5);
// Check documentation health from KG
try {
const kg = await getKnowledgeGraph();
const allEdges = await kg.getAllEdges();
// Find outdated documentation
const outdatedEdges = allEdges.filter((e) => e.type === "outdated_for");
// Find documentation coverage
const documentsEdges = allEdges.filter((e) => e.type === "documents");
const totalCodeFiles = allEdges.filter(
(e) => e.type === "depends_on" && e.target.startsWith("code_file:"),
).length;
const documentedFiles = new Set(documentsEdges.map((e) => e.source))
.size;
const coveragePercent =
totalCodeFiles > 0
? Math.round((documentedFiles / totalCodeFiles) * 100)
: 0;
documentationHealth = {
outdatedCount: outdatedEdges.length,
coveragePercent,
totalCodeFiles,
documentedFiles,
};
} catch (error) {
console.warn("Failed to calculate documentation health:", error);
}
intelligentAnalysis = {
insights,
similarProjects: similar.slice(0, 3).map((p: any) => ({
path: p.projectPath,
similarity: Math.round((p.similarity || 0) * 100) + "%",
technologies: p.technologies?.join(", ") || "unknown",
})),
...(documentationHealth && { documentationHealth }),
recommendations: [
// Documentation health recommendations
...(documentationHealth && documentationHealth.outdatedCount > 0
? [
`${documentationHealth.outdatedCount} documentation section(s) may be outdated - code has changed since docs were updated`,
]
: []),
...(documentationHealth &&
documentationHealth.coveragePercent < 50 &&
documentationHealth.totalCodeFiles > 0
? [
`Documentation covers only ${documentationHealth.coveragePercent}% of code files - consider documenting more`,
]
: []),
// Only suggest creating README if it truly doesn't exist
// Don't suggest improvements yet - that requires deeper analysis
...(analysis.documentation.hasReadme
? []
: ["Consider creating a README.md for project documentation"]),
// Only suggest docs structure if no docs folder exists at all
...(analysis.structure.hasDocs
? []
: analysis.documentation.existingDocs.length === 0
? [
"Consider setting up documentation structure using Diataxis framework",
]
: []),
// Infrastructure recommendations are safe as they're objective
...(analysis.structure.hasTests
? []
: ["Consider adding test coverage to improve reliability"]),
...(analysis.structure.hasCI
? []
: ["Consider setting up CI/CD pipeline for automation"]),
],
};
} catch (error) {
console.warn("Failed to get intelligent analysis:", error);
}
// Enhance response with historical context
const contextInfo: string[] = [];
if (projectContext.previousAnalyses > 0) {
contextInfo.push(
`📊 Previously analyzed ${projectContext.previousAnalyses} time(s)`,
);
if (projectContext.lastAnalyzed) {
const lastDate = new Date(
projectContext.lastAnalyzed,
).toLocaleDateString();
contextInfo.push(`📅 Last analyzed: ${lastDate}`);
}
}
if (projectContext.knownTechnologies.length > 0) {
contextInfo.push(
`💡 Known technologies: ${projectContext.knownTechnologies.join(", ")}`,
);
}
if (projectContext.similarProjects.length > 0) {
contextInfo.push(
`🔗 Found ${projectContext.similarProjects.length} similar project(s) in knowledge graph`,
);
}
if (context?.meta?.progressToken) {
await context.meta.reportProgress?.({
progress: 100,
total: 100,
});
}
const executionTime = Date.now() - startTime;
await context?.info?.(
`✅ Analysis complete! Processed ${
analysis.structure.totalFiles
} files in ${Math.round(executionTime / 1000)}s`,
);
const response: MCPToolResponse<RepositoryAnalysis> = {
success: true,
data: analysis,
metadata: {
toolVersion: "1.0.0",
executionTime,
timestamp: new Date().toISOString(),
analysisId: analysis.id,
...(intelligentAnalysis && { intelligentAnalysis }),
},
recommendations: [
{
type: "info",
title: "Analysis Complete",
description: `Successfully analyzed ${analysis.structure.totalFiles} files across ${analysis.structure.totalDirectories} directories`,
},
...(contextInfo.length > 0
? [
{
type: "info" as const,
title: "Historical Context",
description: contextInfo.join("\n"),
},
]
: []),
...(intelligentAnalysis?.recommendations &&
intelligentAnalysis.recommendations.length > 0
? [
{
type: "info" as const,
title: "AI Recommendations",
description: intelligentAnalysis.recommendations.join("\n• "),
},
]
: []),
...(intelligentAnalysis?.similarProjects &&
intelligentAnalysis.similarProjects.length > 0
? [
{
type: "info" as const,
title: "Similar Projects",
description: intelligentAnalysis.similarProjects
.map(
(p: any) =>
`${p.path} (${p.similarity} similar, ${p.technologies})`,
)
.join("\n"),
},
]
: []),
],
nextSteps: [
...(analysis.documentation.hasReadme
? [
{
action: "Analyze README Quality",
toolRequired: "analyze_readme",
description:
"Evaluate README completeness and suggest improvements",
priority: "medium" as const,
},
]
: []),
{
action: "Get SSG Recommendation",
toolRequired: "recommend_ssg",
description: `Use analysis ID: ${analysis.id}`,
priority: "high",
},
],
};
return formatMCPResponse(response);
} catch (error) {
const errorResponse: MCPToolResponse = {
success: false,
error: {
code: "ANALYSIS_FAILED",
message: `Failed to analyze repository: ${error}`,
resolution: "Ensure the repository path exists and is accessible",
},
metadata: {
toolVersion: "1.0.0",
executionTime: Date.now() - startTime,
timestamp: new Date().toISOString(),
},
};
return formatMCPResponse(errorResponse);
}
}
// Helper function to generate unique analysis ID
/**
* Generates a unique analysis ID for tracking repository analyses.
*
* Creates a unique identifier combining timestamp and random string for
* tracking individual repository analysis sessions and linking them to
* recommendations and deployments.
*
* @returns A unique analysis identifier string
*
* @example
* ```typescript
* const analysisId = generateAnalysisId();
* // Returns: "abc123def456"
* ```
*
* @since 1.0.0
*/
function generateAnalysisId(): string {
const timestamp = Date.now().toString(36);
const random = Math.random().toString(36).substring(2, 7);
return `analysis_${timestamp}_${random}`;
}
// Map file extensions to languages
function getLanguageFromExtension(ext: string): string | null {
const languageMap: Record<string, string> = {
".js": "javascript",
".jsx": "javascript",
".ts": "typescript",
".tsx": "typescript",
".py": "python",
".rb": "ruby",
".go": "go",
".java": "java",
".c": "c",
".cpp": "cpp",
".cs": "csharp",
".php": "php",
".rs": "rust",
".kt": "kotlin",
".swift": "swift",
".scala": "scala",
".sh": "shell",
".bash": "shell",
".zsh": "shell",
".fish": "shell",
".ps1": "powershell",
".r": "r",
".sql": "sql",
".html": "html",
".css": "css",
".scss": "scss",
".sass": "sass",
".less": "less",
".vue": "vue",
".svelte": "svelte",
".dart": "dart",
".lua": "lua",
".pl": "perl",
".elm": "elm",
".clj": "clojure",
".ex": "elixir",
".exs": "elixir",
".erl": "erlang",
".hrl": "erlang",
".hs": "haskell",
".ml": "ocaml",
".fs": "fsharp",
".nim": "nim",
".cr": "crystal",
".d": "d",
".jl": "julia",
".zig": "zig",
};
return languageMap[ext] || null;
}
// Analyze repository structure
/**
* Analyzes the structural characteristics of a repository.
*
* Performs comprehensive structural analysis including file counting, directory
* structure analysis, language detection, and identification of key project
* components like tests, CI/CD, and documentation.
*
* @param repoPath - The file system path to the repository
* @param depth - Analysis depth level affecting thoroughness and performance
*
* @returns Promise resolving to repository structure analysis results
* @returns totalFiles - Total number of files in the repository
* @returns totalDirectories - Total number of directories
* @returns languages - Mapping of file extensions to counts
* @returns hasTests - Whether test files/directories are present
* @returns hasCI - Whether CI/CD configuration files are present
* @returns hasDocs - Whether documentation files are present
*
* @throws {Error} When repository structure cannot be analyzed
*
* @example
* ```typescript
* const structure = await analyzeStructure("/path/to/repo", "standard");
* console.log(`Found ${structure.totalFiles} files in ${structure.totalDirectories} directories`);
* ```
*
* @since 1.0.0
*/
async function analyzeStructure(
repoPath: string,
depth: "quick" | "standard" | "deep",
): Promise<RepositoryAnalysis["structure"]> {
const stats = {
totalFiles: 0,
totalDirectories: 0,
languages: {} as Record<string, number>,
hasTests: false,
hasCI: false,
hasDocs: false,
};
const maxDepth = depth === "quick" ? 2 : depth === "standard" ? 5 : 10;
async function walkDirectory(
dirPath: string,
currentDepth: number = 0,
): Promise<void> {
if (currentDepth > maxDepth) return;
try {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory()) {
stats.totalDirectories++;
// Check for special directories
if (
entry.name.includes("test") ||
entry.name.includes("spec") ||
entry.name === "__tests__"
) {
stats.hasTests = true;
}
if (
entry.name === ".github" ||
entry.name === ".gitlab-ci" ||
entry.name === ".circleci"
) {
stats.hasCI = true;
}
if (
entry.name === "docs" ||
entry.name === "documentation" ||
entry.name === "doc"
) {
stats.hasDocs = true;
}
// Skip node_modules and other common ignored directories
if (
![
"node_modules",
".git",
"dist",
"build",
".next",
".nuxt",
].includes(entry.name)
) {
await walkDirectory(fullPath, currentDepth + 1);
}
} else if (entry.isFile()) {
// Skip hidden files (starting with .)
if (!entry.name.startsWith(".")) {
stats.totalFiles++;
// Track languages by file extension
const ext = path.extname(entry.name).toLowerCase();
if (ext && getLanguageFromExtension(ext)) {
stats.languages[ext] = (stats.languages[ext] || 0) + 1;
}
// Check for CI files
if (
entry.name.match(/\.(yml|yaml)$/) &&
entry.name.includes("ci")
) {
stats.hasCI = true;
}
// Check for test files
if (entry.name.includes("test") || entry.name.includes("spec")) {
stats.hasTests = true;
}
}
}
}
} catch (error) {
// Skip directories we can't read
}
}
await walkDirectory(repoPath);
return stats;
}
// Analyze project dependencies
async function analyzeDependencies(
repoPath: string,
): Promise<RepositoryAnalysis["dependencies"]> {
const result: RepositoryAnalysis["dependencies"] = {
ecosystem: "unknown",
packages: [],
devPackages: [],
};
try {
// Check for package.json (JavaScript/TypeScript)
const packageJsonPath = path.join(repoPath, "package.json");
try {
const packageJsonContent = await fs.readFile(packageJsonPath, "utf-8");
const packageJson = JSON.parse(packageJsonContent);
result.ecosystem = "javascript";
result.packages = Object.keys(packageJson.dependencies || {});
result.devPackages = Object.keys(packageJson.devDependencies || {});
return result;
} catch {
// Continue to check other ecosystems
}
// Check for requirements.txt or pyproject.toml (Python)
const requirementsPath = path.join(repoPath, "requirements.txt");
const pyprojectPath = path.join(repoPath, "pyproject.toml");
try {
try {
const requirementsContent = await fs.readFile(
requirementsPath,
"utf-8",
);
result.ecosystem = "python";
result.packages = requirementsContent
.split("\n")
.filter((line) => line.trim() && !line.startsWith("#"))
.map((line) =>
line.split("==")[0].split(">=")[0].split("<=")[0].trim(),
);
return result;
} catch {
const pyprojectContent = await fs.readFile(pyprojectPath, "utf-8");
result.ecosystem = "python";
// Basic parsing for pyproject.toml dependencies
const dependencyMatches = pyprojectContent.match(
/dependencies\s*=\s*\[([\s\S]*?)\]/,
);
if (dependencyMatches) {
result.packages = dependencyMatches[1]
.split(",")
.map(
(dep) =>
dep
.trim()
.replace(/["']/g, "")
.split("==")[0]
.split(">=")[0]
.split("<=")[0],
)
.filter((dep) => dep.length > 0);
}
return result;
}
} catch {
// Continue to check other ecosystems
}
// Check for Gemfile (Ruby)
const gemfilePath = path.join(repoPath, "Gemfile");
try {
const gemfileContent = await fs.readFile(gemfilePath, "utf-8");
result.ecosystem = "ruby";
const gemMatches = gemfileContent.match(/gem\s+['"]([^'"]+)['"]/g);
if (gemMatches) {
result.packages = gemMatches.map(
(match) => match.match(/gem\s+['"]([^'"]+)['"]/)![1],
);
}
return result;
} catch {
// Continue to check other ecosystems
}
// Check for go.mod (Go)
const goModPath = path.join(repoPath, "go.mod");
try {
const goModContent = await fs.readFile(goModPath, "utf-8");
result.ecosystem = "go";
const requireMatches = goModContent.match(/require\s+\(([\s\S]*?)\)/);
if (requireMatches) {
result.packages = requireMatches[1]
.split("\n")
.map((line) => line.trim().split(" ")[0])
.filter((pkg) => pkg && !pkg.startsWith("//"));
}
return result;
} catch {
// No recognized dependency files found
}
return result;
} catch (error) {
return result;
}
}
// Analyze documentation structure
async function analyzeDocumentation(
repoPath: string,
): Promise<RepositoryAnalysis["documentation"]> {
const result: RepositoryAnalysis["documentation"] = {
hasReadme: false,
hasContributing: false,
hasLicense: false,
existingDocs: [],
estimatedComplexity: "simple",
};
try {
const entries = await fs.readdir(repoPath);
// Check for standard files
for (const entry of entries) {
const lowerEntry = entry.toLowerCase();
if (lowerEntry.startsWith("readme")) {
result.hasReadme = true;
} else if (lowerEntry.startsWith("contributing")) {
result.hasContributing = true;
} else if (lowerEntry.startsWith("license")) {
result.hasLicense = true;
}
}
// Find documentation files
const docExtensions = [".md", ".rst", ".txt", ".adoc"];
const commonDocDirs = ["docs", "documentation", "doc", "wiki"];
// Check root directory for docs
for (const entry of entries) {
const entryPath = path.join(repoPath, entry);
const stat = await fs.stat(entryPath);
if (
stat.isFile() &&
docExtensions.some((ext) => entry.toLowerCase().endsWith(ext))
) {
result.existingDocs.push(entry);
} else if (
stat.isDirectory() &&
commonDocDirs.includes(entry.toLowerCase())
) {
try {
const docFiles = await fs.readdir(entryPath);
for (const docFile of docFiles) {
if (
docExtensions.some((ext) => docFile.toLowerCase().endsWith(ext))
) {
result.existingDocs.push(path.join(entry, docFile));
}
}
} catch {
// Skip if can't read directory
}
}
}
// Estimate complexity based on documentation found
const docCount = result.existingDocs.length;
if (docCount <= 3) {
result.estimatedComplexity = "simple";
} else if (docCount <= 10) {
result.estimatedComplexity = "moderate";
} else {
result.estimatedComplexity = "complex";
}
// Extract comprehensive documentation content
try {
result.extractedContent = await extractRepositoryContent(repoPath);
} catch (error) {
console.warn("Failed to extract repository content:", error);
// Continue without extracted content
}
return result;
} catch (error) {
return result;
}
}
// Helper function to count languages in a directory
async function countLanguagesInDirectory(
dirPath: string,
languages: Record<string, number>,
depth: number = 0,
): Promise<void> {
if (depth > 3) return; // Limit depth for performance
try {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile()) {
const ext = path.extname(entry.name).toLowerCase();
if (ext && getLanguageFromExtension(ext)) {
languages[ext] = (languages[ext] || 0) + 1;
}
} else if (
entry.isDirectory() &&
!["node_modules", ".git", "dist"].includes(entry.name)
) {
await countLanguagesInDirectory(
path.join(dirPath, entry.name),
languages,
depth + 1,
);
}
}
} catch {
// Skip directories we can't read
}
}
// Generate recommendations based on analysis
async function generateRecommendations(
repoPath: string,
): Promise<RepositoryAnalysis["recommendations"]> {
const result: RepositoryAnalysis["recommendations"] = {
primaryLanguage: "unknown",
projectType: "unknown",
teamSize: "solo",
};
try {
// Determine primary language by counting files
const languages: Record<string, number> = {};
await countLanguagesInDirectory(repoPath, languages);
// Find primary language
let primaryExt = "";
if (Object.keys(languages).length > 0) {
primaryExt = Object.entries(languages).reduce((a, b) =>
a[1] > b[1] ? a : b,
)[0];
const primaryLanguage = getLanguageFromExtension(primaryExt);
result.primaryLanguage = primaryLanguage || "unknown";
}
// Determine project type based on files and structure
const entries = await fs.readdir(repoPath);
const hasPackageJson = entries.includes("package.json");
const hasDockerfile = entries.includes("Dockerfile");
const hasK8sFiles = entries.some(
(entry) => entry.endsWith(".yaml") || entry.endsWith(".yml"),
);
const hasTests = entries.some(
(entry) => entry.includes("test") || entry.includes("spec"),
);
if (hasPackageJson && entries.includes("src") && hasTests) {
result.projectType = "library";
} else if (hasDockerfile || hasK8sFiles) {
result.projectType = "application";
} else if (entries.includes("docs") || entries.includes("documentation")) {
result.projectType = "documentation";
} else if (hasTests && primaryExt && languages[primaryExt] > 10) {
result.projectType = "application";
} else {
result.projectType = "script";
}
// Estimate team size based on complexity and structure
const totalFiles = Object.values(languages).reduce(
(sum, count) => sum + count,
0,
);
const hasCI = entries.some(
(entry) => entry.includes(".github") || entry.includes(".gitlab"),
);
const hasContributing = entries.some((entry) =>
entry.toLowerCase().includes("contributing"),
);
if (totalFiles > 100 || (hasCI && hasContributing)) {
result.teamSize = "large";
} else if (totalFiles > 50 || hasCI) {
result.teamSize = "medium";
} else if (totalFiles > 20 || hasTests) {
result.teamSize = "small";
} else {
result.teamSize = "solo";
}
return result;
} catch (error) {
return result;
}
}
```
--------------------------------------------------------------------------------
/tests/tools/check-documentation-links.test.ts:
--------------------------------------------------------------------------------
```typescript
import { checkDocumentationLinks } from "../../src/tools/check-documentation-links.js";
import { formatMCPResponse } from "../../src/types/api.js";
import { writeFile, mkdir, rm } from "fs/promises";
import { join } from "path";
describe("checkDocumentationLinks", () => {
const testDir = join(process.cwd(), "test-docs-temp");
beforeEach(async () => {
// Create test directory structure
await mkdir(testDir, { recursive: true });
await mkdir(join(testDir, "guides"), { recursive: true });
await mkdir(join(testDir, "api"), { recursive: true });
});
afterEach(async () => {
// Clean up test directory
try {
await rm(testDir, { recursive: true, force: true });
} catch (error) {
// Ignore cleanup errors
}
});
describe("Input Validation", () => {
test("should use default values for optional parameters", async () => {
await writeFile(
join(testDir, "README.md"),
"# Test\n[Link](./guides/test.md)",
);
await writeFile(join(testDir, "guides", "test.md"), "# Guide");
const result = await checkDocumentationLinks({
documentation_path: testDir,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"totalLinks": 1');
});
test("should validate timeout_ms parameter", async () => {
const result = await checkDocumentationLinks({
documentation_path: testDir,
timeout_ms: 500, // Below minimum
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(true);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain(
"Number must be greater than or equal to 1000",
);
});
test("should validate max_concurrent_checks parameter", async () => {
const result = await checkDocumentationLinks({
documentation_path: testDir,
max_concurrent_checks: 25, // Above maximum
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(true);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain("Number must be less than or equal to 20");
});
});
describe("File Scanning", () => {
test("should find markdown files in nested directories", async () => {
await writeFile(join(testDir, "README.md"), "# Root");
await writeFile(join(testDir, "guides", "guide1.md"), "# Guide 1");
await writeFile(join(testDir, "api", "reference.mdx"), "# API Reference");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"filesScanned": 3');
});
test("should handle empty documentation directory", async () => {
const result = await checkDocumentationLinks({
documentation_path: testDir,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(true);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain("No documentation files found");
});
test("should handle non-existent directory", async () => {
const result = await checkDocumentationLinks({
documentation_path: "/non/existent/path",
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(true);
});
});
describe("Link Extraction", () => {
test("should extract markdown links", async () => {
const content = `# Test Document
[Internal Link](./guides/test.md)
[External Link](https://example.com)
`;
await writeFile(join(testDir, "test.md"), content);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
check_internal_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"totalLinks": 1');
});
test("should extract HTML links", async () => {
const content = `# Test Document
<a href="./guides/test.md">Internal Link</a>
<a href="https://example.com">External Link</a>
`;
await writeFile(join(testDir, "test.md"), content);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should skip mailto and tel links", async () => {
const content = `# Contact
[Email](mailto:[email protected])
[Phone](tel:+1234567890)
[Valid Link](./test.md)
`;
await writeFile(join(testDir, "contact.md"), content);
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should only check the valid link, not mailto/tel
});
});
describe("Internal Link Checking", () => {
test("should validate existing internal links", async () => {
await writeFile(join(testDir, "README.md"), "[Valid](./guides/test.md)");
await mkdir(join(testDir, "guides"), { recursive: true });
await writeFile(join(testDir, "guides", "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
check_internal_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"status": "valid"');
});
test("should detect broken internal links", async () => {
await writeFile(join(testDir, "README.md"), "[Broken](./missing.md)");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
check_internal_links: true,
fail_on_broken_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"status": "broken"');
});
test("should handle relative path navigation", async () => {
await writeFile(
join(testDir, "guides", "guide1.md"),
"[Back](../README.md)",
);
await writeFile(join(testDir, "README.md"), "# Root");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
check_internal_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"status": "valid"');
});
test("should handle anchor links in internal files", async () => {
await writeFile(
join(testDir, "README.md"),
"[Section](./guide.md#section)",
);
await writeFile(join(testDir, "guide.md"), "# Guide\n## Section");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
check_internal_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
});
describe("External Link Checking", () => {
test("should skip external links when disabled", async () => {
await writeFile(
join(testDir, "README.md"),
"[External](https://example.com)",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should have 0 links checked since external checking is disabled
});
test("should respect allowed domains", async () => {
await writeFile(
join(testDir, "README.md"),
`
[Allowed](https://github.com/test)
[Not Allowed](https://example.com)
`,
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
allowed_domains: ["github.com"],
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should handle timeout for slow external links", async () => {
await writeFile(
join(testDir, "README.md"),
"[Slow](https://httpstat.us/200?sleep=10000)",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
timeout_ms: 1000,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should timeout and mark as warning
});
});
describe("Link Filtering", () => {
test("should ignore links matching ignore patterns", async () => {
await writeFile(
join(testDir, "README.md"),
`
[Ignored](./temp/file.md)
[Valid](./guides/test.md)
`,
);
await writeFile(join(testDir, "guides", "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
ignore_patterns: ["temp/"],
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should only check the valid link, ignore the temp/ link
});
test("should filter by link types", async () => {
await writeFile(
join(testDir, "README.md"),
`
[Internal](./test.md)
[External](https://example.com)
[Anchor](#section)
`,
);
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
check_internal_links: true,
check_anchor_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should only check internal links
});
});
describe("Failure Modes", () => {
test("should fail when fail_on_broken_links is true and links are broken", async () => {
await writeFile(join(testDir, "README.md"), "[Broken](./missing.md)");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
fail_on_broken_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(true);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain("Found 1 broken links");
});
test("should not fail when fail_on_broken_links is false", async () => {
await writeFile(join(testDir, "README.md"), "[Broken](./missing.md)");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
fail_on_broken_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
});
describe("Report Generation", () => {
test("should generate comprehensive report with summary", async () => {
await writeFile(
join(testDir, "README.md"),
`
[Valid Internal](./test.md)
[Broken Internal](./missing.md)
`,
);
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
fail_on_broken_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"summary"');
expect(contentText).toContain('"results"');
expect(contentText).toContain('"recommendations"');
expect(contentText).toContain('"totalLinks": 2');
});
test("should include execution metrics", async () => {
await writeFile(join(testDir, "README.md"), "[Test](./test.md)");
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"executionTime"');
expect(contentText).toContain('"filesScanned"');
});
test("should provide recommendations based on results", async () => {
await writeFile(join(testDir, "README.md"), "[Valid](./test.md)");
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain(
"All links are valid - excellent documentation quality!",
);
});
});
describe("Concurrency Control", () => {
test("should respect max_concurrent_checks limit", async () => {
// Create multiple files with links
for (let i = 0; i < 10; i++) {
await writeFile(
join(testDir, `file${i}.md`),
`[Link](./target${i}.md)`,
);
await writeFile(join(testDir, `target${i}.md`), `# Target ${i}`);
}
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
max_concurrent_checks: 2,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should complete successfully with concurrency control
});
});
describe("Edge Cases", () => {
test("should handle files with no links", async () => {
await writeFile(
join(testDir, "README.md"),
"# No Links Here\nJust plain text.",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"totalLinks": 0');
});
test("should handle malformed markdown", async () => {
await writeFile(
join(testDir, "README.md"),
`
# Malformed
[Incomplete link](
[Missing closing](test.md
[Valid](./test.md)
`,
);
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should handle malformed links gracefully
});
test("should handle binary files gracefully", async () => {
await writeFile(join(testDir, "README.md"), "[Test](./test.md)");
await writeFile(join(testDir, "test.md"), "# Test");
// Create a binary file that should be ignored
await writeFile(
join(testDir, "image.png"),
Buffer.from([0x89, 0x50, 0x4e, 0x47]),
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should ignore binary files and process markdown files
});
});
describe("Advanced Branch Coverage Tests", () => {
test("should handle reference links", async () => {
const content = `# Test Document
[Reference Link][ref1]
[Another Reference][ref2]
[ref1]: ./guides/test.md
[ref2]: https://example.com
`;
await writeFile(join(testDir, "test.md"), content);
await writeFile(join(testDir, "guides", "test.md"), "# Guide");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"totalLinks": 1');
});
test("should handle anchor-only links", async () => {
const content = `# Test Document
[Anchor Only](#section)
[Valid Link](./test.md)
`;
await writeFile(join(testDir, "README.md"), content);
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
check_anchor_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should handle empty URL in links", async () => {
const content = `# Test Document
[Empty Link]()
[Valid Link](./test.md)
`;
await writeFile(join(testDir, "README.md"), content);
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should handle different internal link path formats", async () => {
await mkdir(join(testDir, "subdir"), { recursive: true });
await mkdir(join(testDir, "guides"), { recursive: true });
await writeFile(
join(testDir, "subdir", "nested.md"),
`
[Current Dir](./file.md)
[Parent Dir](../README.md)
[Absolute](/guides/test.md)
[Relative](file.md)
`,
);
await writeFile(join(testDir, "subdir", "file.md"), "# File");
await writeFile(join(testDir, "README.md"), "# Root");
await writeFile(join(testDir, "guides", "test.md"), "# Guide");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should handle external link domain filtering", async () => {
await writeFile(
join(testDir, "README.md"),
`
[GitHub](https://github.com/test)
[Subdomain](https://api.github.com/test)
[Not Allowed](https://example.com)
`,
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
allowed_domains: ["github.com"],
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should handle external link fetch errors", async () => {
await writeFile(
join(testDir, "README.md"),
"[Invalid URL](https://invalid-domain-that-does-not-exist-12345.com)",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
timeout_ms: 2000,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"status": "broken"');
});
test("should handle HTTP error status codes", async () => {
await writeFile(
join(testDir, "README.md"),
"[Not Found](https://httpstat.us/404)",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
timeout_ms: 5000,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should handle directory scanning errors", async () => {
// Create a directory with restricted permissions
await mkdir(join(testDir, "restricted"), { recursive: true });
await writeFile(join(testDir, "README.md"), "[Test](./test.md)");
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should handle file reading errors gracefully", async () => {
await writeFile(join(testDir, "README.md"), "[Test](./test.md)");
await writeFile(join(testDir, "test.md"), "# Test");
// Create a file that might cause reading issues
await writeFile(join(testDir, "problematic.md"), "# Test\x00\x01\x02");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
});
test("should generate recommendations for large link counts", async () => {
let content = "# Test Document\n";
for (let i = 0; i < 101; i++) {
content += `[Link ${i}](./file${i}.md)\n`;
await writeFile(join(testDir, `file${i}.md`), `# File ${i}`);
}
await writeFile(join(testDir, "README.md"), content);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain(
"Consider implementing automated link checking in CI/CD pipeline",
);
});
test("should handle mixed link types with warnings", async () => {
await writeFile(
join(testDir, "README.md"),
`
[Valid](./test.md)
[Broken](./missing.md)
[Timeout](https://httpstat.us/200?sleep=10000)
`,
);
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
timeout_ms: 1000,
fail_on_broken_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"brokenLinks"');
expect(contentText).toContain('"warningLinks"');
});
test("should handle node_modules and hidden directory exclusion", async () => {
await mkdir(join(testDir, "node_modules"), { recursive: true });
await mkdir(join(testDir, ".hidden"), { recursive: true });
await writeFile(
join(testDir, "node_modules", "package.md"),
"# Should be ignored",
);
await writeFile(
join(testDir, ".hidden", "secret.md"),
"# Should be ignored",
);
await writeFile(join(testDir, "README.md"), "[Test](./test.md)");
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"filesScanned": 2'); // Only README.md and test.md
});
test("should handle different markdown file extensions", async () => {
await writeFile(join(testDir, "README.md"), "[MD](./test.md)");
await writeFile(join(testDir, "guide.mdx"), "[MDX](./test.md)");
await writeFile(join(testDir, "doc.markdown"), "[MARKDOWN](./test.md)");
await writeFile(join(testDir, "test.md"), "# Test");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"filesScanned": 4');
});
});
describe("Special Link Types", () => {
test("should skip mailto links during filtering", async () => {
await writeFile(
join(testDir, "README.md"),
"[Email](mailto:[email protected])\n[Valid](./guide.md)",
);
await writeFile(join(testDir, "guide.md"), "# Guide");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
check_internal_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
// Should find only the internal link, mailto is filtered
expect(contentText).toContain('"totalLinks": 1');
expect(contentText).toContain("./guide.md");
});
test("should skip tel links during filtering", async () => {
await writeFile(
join(testDir, "README.md"),
"[Phone](tel:+1234567890)\n[Valid](./guide.md)",
);
await writeFile(join(testDir, "guide.md"), "# Guide");
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
check_internal_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
// Should find only the internal link, tel is filtered
expect(contentText).toContain('"totalLinks": 1');
expect(contentText).toContain("./guide.md");
});
test("should check anchor links when enabled and file exists", async () => {
await writeFile(
join(testDir, "README.md"),
"[Guide Anchor](./guide.md#introduction)",
);
await writeFile(
join(testDir, "guide.md"),
"# Guide\n\n## Introduction\n\nContent here.",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_anchor_links: true,
check_internal_links: true,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"totalLinks": 1');
expect(contentText).toContain("./guide.md#introduction");
});
test("should handle anchor links to other files", async () => {
await writeFile(
join(testDir, "README.md"),
"[Guide Section](./guide.md#setup)",
);
await writeFile(
join(testDir, "guide.md"),
"# Guide\n\n## Setup\n\nSetup instructions.",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_anchor_links: true,
check_internal_links: true,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
expect(contentText).toContain('"totalLinks": 1');
});
});
describe("Error Handling Edge Cases", () => {
test("should handle internal link check errors gracefully", async () => {
await writeFile(
join(testDir, "README.md"),
"[Broken](./nonexistent/deeply/nested/file.md)",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_internal_links: true,
check_external_links: false,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
// Should report broken link
expect(contentText).toContain('"brokenLinks": 1');
});
test("should handle network errors for external links", async () => {
await writeFile(
join(testDir, "README.md"),
"[Invalid](https://this-domain-should-not-exist-12345.com)",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_external_links: true,
timeout_ms: 2000,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
// Should handle as broken or warning
});
test("should handle multiple link types in same document", async () => {
await writeFile(
join(testDir, "README.md"),
`
# Documentation
[Internal](./guide.md)
[External](https://github.com)
[Email](mailto:[email protected])
[Phone](tel:123-456-7890)
[Anchor](./guide.md#section)
## Section
Content here.
`,
);
await writeFile(
join(testDir, "guide.md"),
"# Guide\n\n## Section\n\nContent",
);
const result = await checkDocumentationLinks({
documentation_path: testDir,
check_internal_links: true,
check_external_links: true,
check_anchor_links: true,
});
const formatted = formatMCPResponse(result);
expect(formatted.isError).toBe(false);
const contentText = formatted.content.map((c) => c.text).join(" ");
// Should process checkable link types (internal, external, anchor to file)
// mailto and tel are filtered out
expect(contentText).toContain('"totalLinks": 3');
expect(contentText).toContain("./guide.md");
expect(contentText).toContain("https://github.com");
});
});
});
```
--------------------------------------------------------------------------------
/src/utils/ast-analyzer.ts:
--------------------------------------------------------------------------------
```typescript
/**
* AST-based Code Analyzer (Phase 3)
*
* Uses tree-sitter parsers for multi-language AST analysis
* Provides deep code structure extraction for drift detection
*/
import { parse as parseTypeScript } from "@typescript-eslint/typescript-estree";
import { promises as fs } from "fs";
import path from "path";
import crypto from "crypto";
// Language configuration
const LANGUAGE_CONFIGS: Record<
string,
{ parser: string; extensions: string[] }
> = {
typescript: { parser: "tree-sitter-typescript", extensions: [".ts", ".tsx"] },
javascript: {
parser: "tree-sitter-javascript",
extensions: [".js", ".jsx", ".mjs"],
},
python: { parser: "tree-sitter-python", extensions: [".py"] },
rust: { parser: "tree-sitter-rust", extensions: [".rs"] },
go: { parser: "tree-sitter-go", extensions: [".go"] },
java: { parser: "tree-sitter-java", extensions: [".java"] },
ruby: { parser: "tree-sitter-ruby", extensions: [".rb"] },
bash: { parser: "tree-sitter-bash", extensions: [".sh", ".bash"] },
};
export interface FunctionSignature {
name: string;
parameters: ParameterInfo[];
returnType: string | null;
isAsync: boolean;
isExported: boolean;
isPublic: boolean;
docComment: string | null;
startLine: number;
endLine: number;
complexity: number;
dependencies: string[];
}
export interface ParameterInfo {
name: string;
type: string | null;
optional: boolean;
defaultValue: string | null;
}
export interface ClassInfo {
name: string;
isExported: boolean;
extends: string | null;
implements: string[];
methods: FunctionSignature[];
properties: PropertyInfo[];
docComment: string | null;
startLine: number;
endLine: number;
}
export interface PropertyInfo {
name: string;
type: string | null;
isStatic: boolean;
isReadonly: boolean;
visibility: "public" | "private" | "protected";
}
export interface InterfaceInfo {
name: string;
isExported: boolean;
extends: string[];
properties: PropertyInfo[];
methods: FunctionSignature[];
docComment: string | null;
startLine: number;
endLine: number;
}
export interface TypeInfo {
name: string;
isExported: boolean;
definition: string;
docComment: string | null;
startLine: number;
endLine: number;
}
export interface ImportInfo {
source: string;
imports: Array<{ name: string; alias?: string }>;
isDefault: boolean;
startLine: number;
}
export interface ASTAnalysisResult {
filePath: string;
language: string;
functions: FunctionSignature[];
classes: ClassInfo[];
interfaces: InterfaceInfo[];
types: TypeInfo[];
imports: ImportInfo[];
exports: string[];
contentHash: string;
lastModified: string;
linesOfCode: number;
complexity: number;
}
export interface CodeDiff {
type: "added" | "removed" | "modified" | "unchanged";
category: "function" | "class" | "interface" | "type" | "import" | "export";
name: string;
details: string;
oldSignature?: string;
newSignature?: string;
impactLevel: "breaking" | "major" | "minor" | "patch";
}
/**
* Main AST Analyzer class
*/
export class ASTAnalyzer {
private parsers: Map<string, any> = new Map();
private initialized = false;
/**
* Initialize tree-sitter parsers for all languages
*/
async initialize(): Promise<void> {
if (this.initialized) return;
// Note: Tree-sitter initialization would happen here in a full implementation
// For now, we're primarily using TypeScript/JavaScript parser
// console.log(
// "AST Analyzer initialized with language support:",
// Object.keys(LANGUAGE_CONFIGS),
// );
this.initialized = true;
}
/**
* Analyze a single file and extract AST information
*/
async analyzeFile(filePath: string): Promise<ASTAnalysisResult | null> {
if (!this.initialized) {
await this.initialize();
}
const ext = path.extname(filePath);
const language = this.detectLanguage(ext);
if (!language) {
console.warn(`Unsupported file extension: ${ext}`);
return null;
}
const content = await fs.readFile(filePath, "utf-8");
const stats = await fs.stat(filePath);
// Use TypeScript parser for .ts/.tsx files
if (language === "typescript" || language === "javascript") {
return this.analyzeTypeScript(
filePath,
content,
stats.mtime.toISOString(),
);
}
// For other languages, use tree-sitter (placeholder)
return this.analyzeWithTreeSitter(
filePath,
content,
language,
stats.mtime.toISOString(),
);
}
/**
* Analyze TypeScript/JavaScript using typescript-estree
*/
private async analyzeTypeScript(
filePath: string,
content: string,
lastModified: string,
): Promise<ASTAnalysisResult> {
const functions: FunctionSignature[] = [];
const classes: ClassInfo[] = [];
const interfaces: InterfaceInfo[] = [];
const types: TypeInfo[] = [];
const imports: ImportInfo[] = [];
const exports: string[] = [];
try {
const ast = parseTypeScript(content, {
loc: true,
range: true,
tokens: false,
comment: true,
jsx: filePath.endsWith(".tsx") || filePath.endsWith(".jsx"),
});
// Extract functions
this.extractFunctions(ast, content, functions);
// Extract classes
this.extractClasses(ast, content, classes);
// Extract interfaces
this.extractInterfaces(ast, content, interfaces);
// Extract type aliases
this.extractTypes(ast, content, types);
// Extract imports
this.extractImports(ast, imports);
// Extract exports
this.extractExports(ast, exports);
} catch (error) {
console.warn(`Failed to parse TypeScript file ${filePath}:`, error);
}
const contentHash = crypto
.createHash("sha256")
.update(content)
.digest("hex");
const linesOfCode = content.split("\n").length;
const complexity = this.calculateComplexity(functions, classes);
return {
filePath,
language:
filePath.endsWith(".ts") || filePath.endsWith(".tsx")
? "typescript"
: "javascript",
functions,
classes,
interfaces,
types,
imports,
exports,
contentHash,
lastModified,
linesOfCode,
complexity,
};
}
/**
* Analyze using tree-sitter (placeholder for other languages)
*/
private async analyzeWithTreeSitter(
filePath: string,
content: string,
language: string,
lastModified: string,
): Promise<ASTAnalysisResult> {
// Placeholder for tree-sitter analysis
// In a full implementation, we'd parse the content using tree-sitter
// and extract language-specific constructs
const contentHash = crypto
.createHash("sha256")
.update(content)
.digest("hex");
const linesOfCode = content.split("\n").length;
return {
filePath,
language,
functions: [],
classes: [],
interfaces: [],
types: [],
imports: [],
exports: [],
contentHash,
lastModified,
linesOfCode,
complexity: 0,
};
}
/**
* Extract function declarations from AST
*/
private extractFunctions(
ast: any,
content: string,
functions: FunctionSignature[],
): void {
const lines = content.split("\n");
const traverse = (node: any, isExported = false) => {
if (!node) return;
// Handle export declarations
if (
node.type === "ExportNamedDeclaration" ||
node.type === "ExportDefaultDeclaration"
) {
if (node.declaration) {
traverse(node.declaration, true);
}
return;
}
// Function declarations
if (node.type === "FunctionDeclaration") {
const func = this.parseFunctionNode(node, lines, isExported);
if (func) functions.push(func);
}
// Arrow functions assigned to variables
if (node.type === "VariableDeclaration") {
for (const declarator of node.declarations || []) {
if (declarator.init?.type === "ArrowFunctionExpression") {
const func = this.parseArrowFunction(declarator, lines, isExported);
if (func) functions.push(func);
}
}
}
// Traverse children
for (const key in node) {
if (typeof node[key] === "object" && node[key] !== null) {
if (Array.isArray(node[key])) {
node[key].forEach((child: any) => traverse(child, false));
} else {
traverse(node[key], false);
}
}
}
};
traverse(ast);
}
/**
* Parse function node
*/
private parseFunctionNode(
node: any,
lines: string[],
isExported: boolean,
): FunctionSignature | null {
if (!node.id?.name) return null;
const docComment = this.extractDocComment(node.loc?.start.line - 1, lines);
const parameters = this.extractParameters(node.params);
return {
name: node.id.name,
parameters,
returnType: this.extractReturnType(node),
isAsync: node.async || false,
isExported,
isPublic: true,
docComment,
startLine: node.loc?.start.line || 0,
endLine: node.loc?.end.line || 0,
complexity: this.calculateFunctionComplexity(node),
dependencies: [],
};
}
/**
* Parse arrow function
*/
private parseArrowFunction(
declarator: any,
lines: string[],
isExported: boolean,
): FunctionSignature | null {
if (!declarator.id?.name) return null;
const node = declarator.init;
const docComment = this.extractDocComment(
declarator.loc?.start.line - 1,
lines,
);
const parameters = this.extractParameters(node.params);
return {
name: declarator.id.name,
parameters,
returnType: this.extractReturnType(node),
isAsync: node.async || false,
isExported,
isPublic: true,
docComment,
startLine: declarator.loc?.start.line || 0,
endLine: declarator.loc?.end.line || 0,
complexity: this.calculateFunctionComplexity(node),
dependencies: [],
};
}
/**
* Extract classes from AST
*/
private extractClasses(
ast: any,
content: string,
classes: ClassInfo[],
): void {
const lines = content.split("\n");
const traverse = (node: any, isExported = false) => {
if (!node) return;
// Handle export declarations
if (
node.type === "ExportNamedDeclaration" ||
node.type === "ExportDefaultDeclaration"
) {
if (node.declaration) {
traverse(node.declaration, true);
}
return;
}
if (node.type === "ClassDeclaration" && node.id?.name) {
const classInfo = this.parseClassNode(node, lines, isExported);
if (classInfo) classes.push(classInfo);
}
for (const key in node) {
if (typeof node[key] === "object" && node[key] !== null) {
if (Array.isArray(node[key])) {
node[key].forEach((child: any) => traverse(child, false));
} else {
traverse(node[key], false);
}
}
}
};
traverse(ast);
}
/**
* Parse class node
*/
private parseClassNode(
node: any,
lines: string[],
isExported: boolean,
): ClassInfo | null {
const methods: FunctionSignature[] = [];
const properties: PropertyInfo[] = [];
// Extract methods and properties
if (node.body?.body) {
for (const member of node.body.body) {
if (member.type === "MethodDefinition") {
const method = this.parseMethodNode(member, lines);
if (method) methods.push(method);
} else if (member.type === "PropertyDefinition") {
const property = this.parsePropertyNode(member);
if (property) properties.push(property);
}
}
}
return {
name: node.id.name,
isExported,
extends: node.superClass?.name || null,
implements:
node.implements?.map((i: any) => i.expression?.name || "unknown") || [],
methods,
properties,
docComment: this.extractDocComment(node.loc?.start.line - 1, lines),
startLine: node.loc?.start.line || 0,
endLine: node.loc?.end.line || 0,
};
}
/**
* Parse method node
*/
private parseMethodNode(
node: any,
lines: string[],
): FunctionSignature | null {
if (!node.key?.name) return null;
return {
name: node.key.name,
parameters: this.extractParameters(node.value?.params || []),
returnType: this.extractReturnType(node.value),
isAsync: node.value?.async || false,
isExported: false,
isPublic: !node.key.name.startsWith("_"),
docComment: this.extractDocComment(node.loc?.start.line - 1, lines),
startLine: node.loc?.start.line || 0,
endLine: node.loc?.end.line || 0,
complexity: this.calculateFunctionComplexity(node.value),
dependencies: [],
};
}
/**
* Parse property node
*/
private parsePropertyNode(node: any): PropertyInfo | null {
if (!node.key?.name) return null;
return {
name: node.key.name,
type: this.extractTypeAnnotation(node.typeAnnotation),
isStatic: node.static || false,
isReadonly: node.readonly || false,
visibility: this.determineVisibility(node),
};
}
/**
* Extract interfaces from AST
*/
private extractInterfaces(
ast: any,
content: string,
interfaces: InterfaceInfo[],
): void {
const lines = content.split("\n");
const traverse = (node: any, isExported = false) => {
if (!node) return;
// Handle export declarations
if (
node.type === "ExportNamedDeclaration" ||
node.type === "ExportDefaultDeclaration"
) {
if (node.declaration) {
traverse(node.declaration, true);
}
return;
}
if (node.type === "TSInterfaceDeclaration" && node.id?.name) {
const interfaceInfo = this.parseInterfaceNode(node, lines, isExported);
if (interfaceInfo) interfaces.push(interfaceInfo);
}
for (const key in node) {
if (typeof node[key] === "object" && node[key] !== null) {
if (Array.isArray(node[key])) {
node[key].forEach((child: any) => traverse(child, false));
} else {
traverse(node[key], false);
}
}
}
};
traverse(ast);
}
/**
* Parse interface node
*/
private parseInterfaceNode(
node: any,
lines: string[],
isExported: boolean,
): InterfaceInfo | null {
const properties: PropertyInfo[] = [];
const methods: FunctionSignature[] = [];
if (node.body?.body) {
for (const member of node.body.body) {
if (member.type === "TSPropertySignature") {
const prop = this.parseInterfaceProperty(member);
if (prop) properties.push(prop);
} else if (member.type === "TSMethodSignature") {
const method = this.parseInterfaceMethod(member);
if (method) methods.push(method);
}
}
}
return {
name: node.id.name,
isExported,
extends:
node.extends?.map((e: any) => e.expression?.name || "unknown") || [],
properties,
methods,
docComment: this.extractDocComment(node.loc?.start.line - 1, lines),
startLine: node.loc?.start.line || 0,
endLine: node.loc?.end.line || 0,
};
}
/**
* Parse interface property
*/
private parseInterfaceProperty(node: any): PropertyInfo | null {
if (!node.key?.name) return null;
return {
name: node.key.name,
type: this.extractTypeAnnotation(node.typeAnnotation),
isStatic: false,
isReadonly: node.readonly || false,
visibility: "public",
};
}
/**
* Parse interface method
*/
private parseInterfaceMethod(node: any): FunctionSignature | null {
if (!node.key?.name) return null;
return {
name: node.key.name,
parameters: this.extractParameters(node.params || []),
returnType: this.extractTypeAnnotation(node.returnType),
isAsync: false,
isExported: false,
isPublic: true,
docComment: null,
startLine: node.loc?.start.line || 0,
endLine: node.loc?.end.line || 0,
complexity: 0,
dependencies: [],
};
}
/**
* Extract type aliases from AST
*/
private extractTypes(ast: any, content: string, types: TypeInfo[]): void {
const lines = content.split("\n");
const traverse = (node: any, isExported = false) => {
if (!node) return;
// Handle export declarations
if (
node.type === "ExportNamedDeclaration" ||
node.type === "ExportDefaultDeclaration"
) {
if (node.declaration) {
traverse(node.declaration, true);
}
return;
}
if (node.type === "TSTypeAliasDeclaration" && node.id?.name) {
const typeInfo = this.parseTypeNode(node, lines, isExported);
if (typeInfo) types.push(typeInfo);
}
for (const key in node) {
if (typeof node[key] === "object" && node[key] !== null) {
if (Array.isArray(node[key])) {
node[key].forEach((child: any) => traverse(child, false));
} else {
traverse(node[key], false);
}
}
}
};
traverse(ast);
}
/**
* Parse type alias node
*/
private parseTypeNode(
node: any,
lines: string[],
isExported: boolean,
): TypeInfo | null {
return {
name: node.id.name,
isExported,
definition: this.extractTypeDefinition(node.typeAnnotation),
docComment: this.extractDocComment(node.loc?.start.line - 1, lines),
startLine: node.loc?.start.line || 0,
endLine: node.loc?.end.line || 0,
};
}
/**
* Extract imports from AST
*/
private extractImports(ast: any, imports: ImportInfo[]): void {
const traverse = (node: any) => {
if (!node) return;
if (node.type === "ImportDeclaration") {
const importInfo: ImportInfo = {
source: node.source?.value || "",
imports: [],
isDefault: false,
startLine: node.loc?.start.line || 0,
};
for (const specifier of node.specifiers || []) {
if (specifier.type === "ImportDefaultSpecifier") {
importInfo.isDefault = true;
importInfo.imports.push({
name: specifier.local?.name || "default",
});
} else if (specifier.type === "ImportSpecifier") {
importInfo.imports.push({
name: specifier.imported?.name || "",
alias:
specifier.local?.name !== specifier.imported?.name
? specifier.local?.name
: undefined,
});
}
}
imports.push(importInfo);
}
for (const key in node) {
if (typeof node[key] === "object" && node[key] !== null) {
if (Array.isArray(node[key])) {
node[key].forEach((child: any) => traverse(child));
} else {
traverse(node[key]);
}
}
}
};
traverse(ast);
}
/**
* Extract exports from AST
*/
private extractExports(ast: any, exports: string[]): void {
const traverse = (node: any) => {
if (!node) return;
// Named exports
if (node.type === "ExportNamedDeclaration") {
if (node.declaration) {
if (node.declaration.id?.name) {
exports.push(node.declaration.id.name);
} else if (node.declaration.declarations) {
for (const decl of node.declaration.declarations) {
if (decl.id?.name) exports.push(decl.id.name);
}
}
}
for (const specifier of node.specifiers || []) {
if (specifier.exported?.name) exports.push(specifier.exported.name);
}
}
// Default export
if (node.type === "ExportDefaultDeclaration") {
if (node.declaration?.id?.name) {
exports.push(node.declaration.id.name);
} else {
exports.push("default");
}
}
for (const key in node) {
if (typeof node[key] === "object" && node[key] !== null) {
if (Array.isArray(node[key])) {
node[key].forEach((child: any) => traverse(child));
} else {
traverse(node[key]);
}
}
}
};
traverse(ast);
}
// Helper methods
private extractParameters(params: any[]): ParameterInfo[] {
return params.map((param) => ({
name: param.name || param.argument?.name || param.left?.name || "unknown",
type: this.extractTypeAnnotation(param.typeAnnotation),
optional: param.optional || false,
defaultValue: param.right ? this.extractDefaultValue(param.right) : null,
}));
}
private extractReturnType(node: any): string | null {
return this.extractTypeAnnotation(node?.returnType);
}
private extractTypeAnnotation(typeAnnotation: any): string | null {
if (!typeAnnotation) return null;
if (typeAnnotation.typeAnnotation)
return this.extractTypeDefinition(typeAnnotation.typeAnnotation);
return this.extractTypeDefinition(typeAnnotation);
}
private extractTypeDefinition(typeNode: any): string {
if (!typeNode) return "unknown";
if (typeNode.type === "TSStringKeyword") return "string";
if (typeNode.type === "TSNumberKeyword") return "number";
if (typeNode.type === "TSBooleanKeyword") return "boolean";
if (typeNode.type === "TSAnyKeyword") return "any";
if (typeNode.type === "TSVoidKeyword") return "void";
if (typeNode.type === "TSTypeReference")
return typeNode.typeName?.name || "unknown";
return "unknown";
}
private extractDefaultValue(node: any): string | null {
if (node.type === "Literal") return String(node.value);
if (node.type === "Identifier") return node.name;
return null;
}
private extractDocComment(
lineNumber: number,
lines: string[],
): string | null {
if (lineNumber < 0 || lineNumber >= lines.length) return null;
const comment: string[] = [];
let currentLine = lineNumber;
// Look backwards for JSDoc comment
while (currentLine >= 0) {
const line = lines[currentLine].trim();
if (line.startsWith("*/")) {
comment.unshift(line);
currentLine--;
continue;
}
if (line.startsWith("*") || line.startsWith("/**")) {
comment.unshift(line);
if (line.startsWith("/**")) break;
currentLine--;
continue;
}
if (comment.length > 0) break;
currentLine--;
}
return comment.length > 0 ? comment.join("\n") : null;
}
private isExported(node: any): boolean {
if (!node) return false;
// Check parent for export
let current = node;
while (current) {
if (
current.type === "ExportNamedDeclaration" ||
current.type === "ExportDefaultDeclaration"
) {
return true;
}
current = current.parent;
}
return false;
}
private determineVisibility(node: any): "public" | "private" | "protected" {
if (node.accessibility) return node.accessibility;
if (node.key?.name?.startsWith("_")) return "private";
if (node.key?.name?.startsWith("#")) return "private";
return "public";
}
private calculateFunctionComplexity(node: any): number {
// Simplified cyclomatic complexity
let complexity = 1;
const traverse = (n: any) => {
if (!n) return;
// Increment for control flow statements
if (
[
"IfStatement",
"ConditionalExpression",
"ForStatement",
"WhileStatement",
"DoWhileStatement",
"SwitchCase",
"CatchClause",
].includes(n.type)
) {
complexity++;
}
for (const key in n) {
if (typeof n[key] === "object" && n[key] !== null) {
if (Array.isArray(n[key])) {
n[key].forEach((child: any) => traverse(child));
} else {
traverse(n[key]);
}
}
}
};
traverse(node);
return complexity;
}
private calculateComplexity(
functions: FunctionSignature[],
classes: ClassInfo[],
): number {
const functionComplexity = functions.reduce(
(sum, f) => sum + f.complexity,
0,
);
const classComplexity = classes.reduce(
(sum, c) =>
sum + c.methods.reduce((methodSum, m) => methodSum + m.complexity, 0),
0,
);
return functionComplexity + classComplexity;
}
private detectLanguage(ext: string): string | null {
for (const [lang, config] of Object.entries(LANGUAGE_CONFIGS)) {
if (config.extensions.includes(ext)) return lang;
}
return null;
}
/**
* Compare two AST analysis results and detect changes
*/
async detectDrift(
oldAnalysis: ASTAnalysisResult,
newAnalysis: ASTAnalysisResult,
): Promise<CodeDiff[]> {
const diffs: CodeDiff[] = [];
// Compare functions
diffs.push(
...this.compareFunctions(oldAnalysis.functions, newAnalysis.functions),
);
// Compare classes
diffs.push(
...this.compareClasses(oldAnalysis.classes, newAnalysis.classes),
);
// Compare interfaces
diffs.push(
...this.compareInterfaces(oldAnalysis.interfaces, newAnalysis.interfaces),
);
// Compare types
diffs.push(...this.compareTypes(oldAnalysis.types, newAnalysis.types));
return diffs;
}
private compareFunctions(
oldFuncs: FunctionSignature[],
newFuncs: FunctionSignature[],
): CodeDiff[] {
const diffs: CodeDiff[] = [];
const oldMap = new Map(oldFuncs.map((f) => [f.name, f]));
const newMap = new Map(newFuncs.map((f) => [f.name, f]));
// Check for removed functions
for (const [name, func] of oldMap) {
if (!newMap.has(name)) {
diffs.push({
type: "removed",
category: "function",
name,
details: `Function '${name}' was removed`,
oldSignature: this.formatFunctionSignature(func),
impactLevel: func.isExported ? "breaking" : "minor",
});
}
}
// Check for added functions
for (const [name, func] of newMap) {
if (!oldMap.has(name)) {
diffs.push({
type: "added",
category: "function",
name,
details: `Function '${name}' was added`,
newSignature: this.formatFunctionSignature(func),
impactLevel: "patch",
});
}
}
// Check for modified functions
for (const [name, newFunc] of newMap) {
const oldFunc = oldMap.get(name);
if (oldFunc) {
const changes = this.detectFunctionChanges(oldFunc, newFunc);
if (changes.length > 0) {
diffs.push({
type: "modified",
category: "function",
name,
details: changes.join("; "),
oldSignature: this.formatFunctionSignature(oldFunc),
newSignature: this.formatFunctionSignature(newFunc),
impactLevel: this.determineFunctionImpact(oldFunc, newFunc),
});
}
}
}
return diffs;
}
private compareClasses(
oldClasses: ClassInfo[],
newClasses: ClassInfo[],
): CodeDiff[] {
const diffs: CodeDiff[] = [];
const oldMap = new Map(oldClasses.map((c) => [c.name, c]));
const newMap = new Map(newClasses.map((c) => [c.name, c]));
for (const [name, oldClass] of oldMap) {
if (!newMap.has(name)) {
diffs.push({
type: "removed",
category: "class",
name,
details: `Class '${name}' was removed`,
impactLevel: oldClass.isExported ? "breaking" : "minor",
});
}
}
for (const [name] of newMap) {
if (!oldMap.has(name)) {
diffs.push({
type: "added",
category: "class",
name,
details: `Class '${name}' was added`,
impactLevel: "patch",
});
}
}
return diffs;
}
private compareInterfaces(
oldInterfaces: InterfaceInfo[],
newInterfaces: InterfaceInfo[],
): CodeDiff[] {
const diffs: CodeDiff[] = [];
const oldMap = new Map(oldInterfaces.map((i) => [i.name, i]));
const newMap = new Map(newInterfaces.map((i) => [i.name, i]));
for (const [name, oldInterface] of oldMap) {
if (!newMap.has(name)) {
diffs.push({
type: "removed",
category: "interface",
name,
details: `Interface '${name}' was removed`,
impactLevel: oldInterface.isExported ? "breaking" : "minor",
});
}
}
for (const [name] of newMap) {
if (!oldMap.has(name)) {
diffs.push({
type: "added",
category: "interface",
name,
details: `Interface '${name}' was added`,
impactLevel: "patch",
});
}
}
return diffs;
}
private compareTypes(oldTypes: TypeInfo[], newTypes: TypeInfo[]): CodeDiff[] {
const diffs: CodeDiff[] = [];
const oldMap = new Map(oldTypes.map((t) => [t.name, t]));
const newMap = new Map(newTypes.map((t) => [t.name, t]));
for (const [name, oldType] of oldMap) {
if (!newMap.has(name)) {
diffs.push({
type: "removed",
category: "type",
name,
details: `Type '${name}' was removed`,
impactLevel: oldType.isExported ? "breaking" : "minor",
});
}
}
for (const [name] of newMap) {
if (!oldMap.has(name)) {
diffs.push({
type: "added",
category: "type",
name,
details: `Type '${name}' was added`,
impactLevel: "patch",
});
}
}
return diffs;
}
private detectFunctionChanges(
oldFunc: FunctionSignature,
newFunc: FunctionSignature,
): string[] {
const changes: string[] = [];
// Check parameter changes
if (oldFunc.parameters.length !== newFunc.parameters.length) {
changes.push(
`Parameter count changed from ${oldFunc.parameters.length} to ${newFunc.parameters.length}`,
);
}
// Check return type changes
if (oldFunc.returnType !== newFunc.returnType) {
changes.push(
`Return type changed from '${oldFunc.returnType}' to '${newFunc.returnType}'`,
);
}
// Check async changes
if (oldFunc.isAsync !== newFunc.isAsync) {
changes.push(
newFunc.isAsync
? "Function became async"
: "Function is no longer async",
);
}
// Check export changes
if (oldFunc.isExported !== newFunc.isExported) {
changes.push(
newFunc.isExported
? "Function is now exported"
: "Function is no longer exported",
);
}
return changes;
}
private determineFunctionImpact(
oldFunc: FunctionSignature,
newFunc: FunctionSignature,
): "breaking" | "major" | "minor" | "patch" {
// Breaking changes
if (oldFunc.isExported) {
if (oldFunc.parameters.length !== newFunc.parameters.length)
return "breaking";
if (oldFunc.returnType !== newFunc.returnType) return "breaking";
// If a function was exported and is no longer exported, that's breaking
if (oldFunc.isExported && !newFunc.isExported) return "breaking";
}
// Major changes
if (oldFunc.isAsync !== newFunc.isAsync) return "major";
// Minor changes (new API surface)
// If a function becomes exported, that's a minor change (new feature/API)
if (!oldFunc.isExported && newFunc.isExported) return "minor";
return "patch";
}
private formatFunctionSignature(func: FunctionSignature): string {
const params = func.parameters
.map((p) => `${p.name}: ${p.type || "any"}`)
.join(", ");
const returnType = func.returnType || "void";
const asyncPrefix = func.isAsync ? "async " : "";
return `${asyncPrefix}${func.name}(${params}): ${returnType}`;
}
}
```
--------------------------------------------------------------------------------
/src/memory/knowledge-graph.ts:
--------------------------------------------------------------------------------
```typescript
/**
* Knowledge Graph Architecture for DocuMCP
* Implements Phase 1.1: Enhanced Knowledge Graph Schema Implementation
* Previously: Issue #48: Knowledge Graph Architecture
*
* Creates entity relationship graphs for projects, technologies, patterns, and dependencies
* to enable advanced reasoning and recommendation improvements.
*
* Enhanced with comprehensive entity types and relationship schemas following NEW_PRD.md
*/
import { MemoryManager } from "./manager.js";
import { MemoryEntry } from "./storage.js";
import {
validateEntity,
validateRelationship,
SCHEMA_METADATA,
} from "./schemas.js";
export interface GraphNode {
id: string;
type:
| "project"
| "technology"
| "pattern"
| "user"
| "outcome"
| "recommendation"
| "configuration"
| "documentation"
| "code_file"
| "documentation_section"
| "link_validation"
| "sync_event"
| "documentation_freshness_event";
label: string;
properties: Record<string, any>;
weight: number;
lastUpdated: string;
}
export interface GraphEdge {
id: string;
source: string;
target: string;
type:
| "uses"
| "similar_to"
| "depends_on"
| "recommends"
| "results_in"
| "created_by"
| "project_uses_technology"
| "user_prefers_ssg"
| "project_deployed_with"
| "documents"
| "references"
| "outdated_for"
| "has_link_validation"
| "requires_fix"
| "project_has_freshness_event"
| (string & NonNullable<unknown>); // Allow any string (for timestamped types like "project_deployed_with:2024-...")
weight: number;
properties: Record<string, any>;
confidence: number;
lastUpdated: string;
}
export interface GraphPath {
nodes: GraphNode[];
edges: GraphEdge[];
totalWeight: number;
confidence: number;
}
export interface GraphQuery {
nodeTypes?: string[];
edgeTypes?: string[];
properties?: Record<string, any>;
minWeight?: number;
maxDepth?: number;
startNode?: string;
}
export interface RecommendationPath {
from: GraphNode;
to: GraphNode;
path: GraphPath;
reasoning: string[];
confidence: number;
}
export class KnowledgeGraph {
private nodes: Map<string, GraphNode>;
private edges: Map<string, GraphEdge>;
private adjacencyList: Map<string, Set<string>>;
private memoryManager: MemoryManager;
private lastUpdate: string;
constructor(memoryManager: MemoryManager) {
this.nodes = new Map();
this.edges = new Map();
this.adjacencyList = new Map();
this.memoryManager = memoryManager;
this.lastUpdate = new Date().toISOString();
}
async initialize(): Promise<void> {
await this.loadFromMemory();
await this.buildFromMemories();
}
/**
* Add or update a node in the knowledge graph
*/
addNode(node: Omit<GraphNode, "lastUpdated">): GraphNode {
const fullNode: GraphNode = {
...node,
lastUpdated: new Date().toISOString(),
};
this.nodes.set(node.id, fullNode);
if (!this.adjacencyList.has(node.id)) {
this.adjacencyList.set(node.id, new Set());
}
return fullNode;
}
/**
* Add or update an edge in the knowledge graph
*/
addEdge(edge: Omit<GraphEdge, "id" | "lastUpdated">): GraphEdge {
const edgeId = `${edge.source}-${edge.type}-${edge.target}`;
const fullEdge: GraphEdge = {
...edge,
id: edgeId,
lastUpdated: new Date().toISOString(),
};
this.edges.set(edgeId, fullEdge);
// Update adjacency list
if (!this.adjacencyList.has(edge.source)) {
this.adjacencyList.set(edge.source, new Set());
}
if (!this.adjacencyList.has(edge.target)) {
this.adjacencyList.set(edge.target, new Set());
}
this.adjacencyList.get(edge.source)!.add(edge.target);
return fullEdge;
}
/**
* Build knowledge graph from memory entries
*/
async buildFromMemories(): Promise<void> {
const memories = await this.memoryManager.search("", {
sortBy: "timestamp",
});
for (const memory of memories) {
await this.processMemoryEntry(memory);
}
await this.computeRelationships();
await this.updateWeights();
}
/**
* Process a single memory entry to extract graph entities
*/
private async processMemoryEntry(memory: MemoryEntry): Promise<void> {
// Create project node
if (memory.metadata.projectId) {
const projectNode = this.addNode({
id: `project:${memory.metadata.projectId}`,
type: "project",
label: memory.metadata.projectId,
properties: {
repository: memory.metadata.repository,
lastActivity: memory.timestamp,
},
weight: 1.0,
});
// Create technology nodes
if (memory.type === "analysis" && memory.data.language) {
const langNode = this.addNode({
id: `tech:${memory.data.language.primary}`,
type: "technology",
label: memory.data.language.primary,
properties: {
category: "language",
popularity: this.getTechnologyPopularity(
memory.data.language.primary,
),
},
weight: 1.0,
});
this.addEdge({
source: projectNode.id,
target: langNode.id,
type: "uses",
weight: 1.0,
confidence: 0.9,
properties: { source: "analysis" },
});
}
// Create framework nodes
if (memory.data.framework?.name) {
const frameworkNode = this.addNode({
id: `tech:${memory.data.framework.name}`,
type: "technology",
label: memory.data.framework.name,
properties: {
category: "framework",
version: memory.data.framework.version,
},
weight: 1.0,
});
this.addEdge({
source: projectNode.id,
target: frameworkNode.id,
type: "uses",
weight: 1.0,
confidence: 0.8,
properties: { source: "analysis" },
});
}
// Create SSG recommendation nodes
if (memory.type === "recommendation" && memory.data.recommended) {
const ssgNode = this.addNode({
id: `tech:${memory.data.recommended}`,
type: "technology",
label: memory.data.recommended,
properties: {
category: "ssg",
score: memory.data.score,
},
weight: 1.0,
});
this.addEdge({
source: projectNode.id,
target: ssgNode.id,
type: "recommends",
weight: memory.data.score || 1.0,
confidence: memory.data.confidence || 0.5,
properties: {
source: "recommendation",
reasoning: memory.data.reasoning,
},
});
}
// Create outcome nodes
if (memory.type === "deployment") {
const outcomeNode = this.addNode({
id: `outcome:${memory.data.status}:${memory.metadata.ssg}`,
type: "outcome",
label: `${memory.data.status} with ${memory.metadata.ssg}`,
properties: {
status: memory.data.status,
ssg: memory.metadata.ssg,
duration: memory.data.duration,
},
weight: memory.data.status === "success" ? 1.0 : 0.5,
});
this.addEdge({
source: projectNode.id,
target: outcomeNode.id,
type: "results_in",
weight: 1.0,
confidence: 1.0,
properties: {
timestamp: memory.timestamp,
details: memory.data.details,
},
});
}
}
}
/**
* Compute additional relationships based on patterns
*/
private async computeRelationships(): Promise<void> {
// Find similar projects
await this.computeProjectSimilarity();
// Find technology dependencies
await this.computeTechnologyDependencies();
// Find pattern relationships
await this.computePatternRelationships();
}
/**
* Compute project similarity relationships
*/
private async computeProjectSimilarity(): Promise<void> {
const projectNodes = Array.from(this.nodes.values()).filter(
(node) => node.type === "project",
);
for (let i = 0; i < projectNodes.length; i++) {
for (let j = i + 1; j < projectNodes.length; j++) {
const similarity = this.calculateProjectSimilarity(
projectNodes[i],
projectNodes[j],
);
if (similarity > 0.7) {
this.addEdge({
source: projectNodes[i].id,
target: projectNodes[j].id,
type: "similar_to",
weight: similarity,
confidence: similarity,
properties: {
computed: true,
similarityScore: similarity,
},
});
}
}
}
}
/**
* Calculate similarity between two projects
*/
private calculateProjectSimilarity(
project1: GraphNode,
project2: GraphNode,
): number {
const tech1 = this.getConnectedTechnologies(project1.id);
const tech2 = this.getConnectedTechnologies(project2.id);
if (tech1.size === 0 || tech2.size === 0) return 0;
const intersection = new Set([...tech1].filter((x) => tech2.has(x)));
const union = new Set([...tech1, ...tech2]);
return intersection.size / union.size; // Jaccard similarity
}
/**
* Get technologies connected to a project
*/
private getConnectedTechnologies(projectId: string): Set<string> {
const technologies = new Set<string>();
const adjacents = this.adjacencyList.get(projectId) || new Set();
for (const nodeId of adjacents) {
const node = this.nodes.get(nodeId);
if (node && node.type === "technology") {
technologies.add(nodeId);
}
}
return technologies;
}
/**
* Compute technology dependency relationships
*/
private async computeTechnologyDependencies(): Promise<void> {
// Define known technology dependencies
const dependencies = new Map([
["tech:react", ["tech:javascript", "tech:nodejs"]],
["tech:vue", ["tech:javascript", "tech:nodejs"]],
["tech:angular", ["tech:typescript", "tech:nodejs"]],
["tech:gatsby", ["tech:react", "tech:graphql"]],
["tech:next.js", ["tech:react", "tech:nodejs"]],
["tech:nuxt.js", ["tech:vue", "tech:nodejs"]],
["tech:docusaurus", ["tech:react", "tech:markdown"]],
["tech:jekyll", ["tech:ruby", "tech:markdown"]],
["tech:hugo", ["tech:go", "tech:markdown"]],
["tech:mkdocs", ["tech:python", "tech:markdown"]],
]);
for (const [tech, deps] of dependencies) {
for (const dep of deps) {
const techNode = this.nodes.get(tech);
const depNode = this.nodes.get(dep);
if (techNode && depNode) {
this.addEdge({
source: tech,
target: dep,
type: "depends_on",
weight: 0.8,
confidence: 0.9,
properties: {
computed: true,
dependency_type: "runtime",
},
});
}
}
}
}
/**
* Compute pattern relationships from successful combinations
*/
private async computePatternRelationships(): Promise<void> {
const successfulOutcomes = Array.from(this.nodes.values()).filter(
(node) => node.type === "outcome" && node.properties.status === "success",
);
for (const outcome of successfulOutcomes) {
// Find the path that led to this successful outcome
const incomingEdges = Array.from(this.edges.values()).filter(
(edge) => edge.target === outcome.id,
);
for (const edge of incomingEdges) {
const sourceNode = this.nodes.get(edge.source);
if (sourceNode && sourceNode.type === "project") {
// Strengthen relationships for successful patterns
this.strengthenSuccessPattern(sourceNode.id, outcome.properties.ssg);
}
}
}
}
/**
* Strengthen relationships for successful patterns
*/
private strengthenSuccessPattern(projectId: string, ssg: string): void {
const ssgNodeId = `tech:${ssg}`;
const edgeId = `${projectId}-recommends-${ssgNodeId}`;
const edge = this.edges.get(edgeId);
if (edge) {
edge.weight = Math.min(edge.weight * 1.2, 2.0);
edge.confidence = Math.min(edge.confidence * 1.1, 1.0);
}
}
/**
* Update node and edge weights based on usage patterns
*/
private async updateWeights(): Promise<void> {
// Update node weights based on connections
for (const node of this.nodes.values()) {
const connections = this.adjacencyList.get(node.id)?.size || 0;
node.weight = Math.log(connections + 1) / Math.log(10); // Logarithmic scaling
}
// Update edge weights based on frequency and success
for (const edge of this.edges.values()) {
if (edge.type === "recommends") {
// Find successful outcomes for this recommendation
const targetNode = this.nodes.get(edge.target);
if (targetNode && targetNode.type === "technology") {
const successRate = this.calculateSuccessRate(targetNode.id);
edge.weight *= 1 + successRate;
}
}
}
}
/**
* Calculate success rate for a technology
*/
private calculateSuccessRate(techId: string): number {
const tech = techId.replace("tech:", "");
const outcomes = Array.from(this.nodes.values()).filter(
(node) => node.type === "outcome" && node.properties.ssg === tech,
);
if (outcomes.length === 0) return 0;
const successes = outcomes.filter(
(node) => node.properties.status === "success",
).length;
return successes / outcomes.length;
}
/**
* Find the shortest path between two nodes
*/
findPath(
sourceId: string,
targetId: string,
maxDepth: number = 5,
): GraphPath | null {
const visited = new Set<string>();
const queue: { nodeId: string; path: GraphPath }[] = [
{
nodeId: sourceId,
path: {
nodes: [this.nodes.get(sourceId)!],
edges: [],
totalWeight: 0,
confidence: 1.0,
},
},
];
while (queue.length > 0) {
const current = queue.shift()!;
if (current.nodeId === targetId) {
return current.path;
}
if (current.path.nodes.length >= maxDepth) {
continue;
}
visited.add(current.nodeId);
const neighbors = this.adjacencyList.get(current.nodeId) || new Set();
for (const neighborId of neighbors) {
if (visited.has(neighborId)) continue;
const edge = this.findEdge(current.nodeId, neighborId);
const neighborNode = this.nodes.get(neighborId);
if (edge && neighborNode) {
const newPath: GraphPath = {
nodes: [...current.path.nodes, neighborNode],
edges: [...current.path.edges, edge],
totalWeight: current.path.totalWeight + edge.weight,
confidence: current.path.confidence * edge.confidence,
};
queue.push({ nodeId: neighborId, path: newPath });
}
}
}
return null;
}
/**
* Find edge between two nodes
*/
private findEdge(sourceId: string, targetId: string): GraphEdge | null {
for (const edge of this.edges.values()) {
if (edge.source === sourceId && edge.target === targetId) {
return edge;
}
}
return null;
}
/**
* Query the knowledge graph
*/
query(query: GraphQuery): {
nodes: GraphNode[];
edges: GraphEdge[];
paths?: GraphPath[];
} {
let nodes = Array.from(this.nodes.values());
let edges = Array.from(this.edges.values());
// Filter by node types
if (query.nodeTypes) {
nodes = nodes.filter((node) => query.nodeTypes!.includes(node.type));
}
// Filter by edge types
if (query.edgeTypes) {
edges = edges.filter((edge) => query.edgeTypes!.includes(edge.type));
}
// Filter by properties
if (query.properties) {
nodes = nodes.filter((node) =>
Object.entries(query.properties!).every(
([key, value]) => node.properties[key] === value,
),
);
}
// Filter by minimum weight
if (query.minWeight) {
nodes = nodes.filter((node) => node.weight >= query.minWeight!);
edges = edges.filter((edge) => edge.weight >= query.minWeight!);
}
const result = { nodes, edges };
// Find paths from start node if specified
if (query.startNode && query.maxDepth) {
const paths: GraphPath[] = [];
const visited = new Set<string>();
const emptyPath: GraphPath = {
nodes: [],
edges: [],
totalWeight: 0,
confidence: 1.0,
};
this.explorePaths(
query.startNode,
emptyPath,
paths,
visited,
query.maxDepth,
);
(result as any).paths = paths;
}
return result;
}
/**
* Explore paths from a starting node
*/
private explorePaths(
nodeId: string,
currentPath: GraphPath,
allPaths: GraphPath[],
visited: Set<string>,
maxDepth: number,
): void {
if (currentPath.nodes.length >= maxDepth) return;
visited.add(nodeId);
const neighbors = this.adjacencyList.get(nodeId) || new Set();
for (const neighborId of neighbors) {
if (visited.has(neighborId)) continue;
const edge = this.findEdge(nodeId, neighborId);
const neighborNode = this.nodes.get(neighborId);
if (edge && neighborNode) {
const newPath: GraphPath = {
nodes: [...(currentPath.nodes || []), neighborNode],
edges: [...(currentPath.edges || []), edge],
totalWeight: (currentPath.totalWeight || 0) + edge.weight,
confidence: (currentPath.confidence || 1.0) * edge.confidence,
};
allPaths.push(newPath);
this.explorePaths(
neighborId,
newPath,
allPaths,
new Set(visited),
maxDepth,
);
}
}
}
/**
* Get enhanced recommendations using knowledge graph
*/
async getGraphBasedRecommendation(
projectFeatures: any,
candidateSSGs: string[],
): Promise<RecommendationPath[]> {
const recommendations: RecommendationPath[] = [];
// Create a temporary project node
const tempProjectId = `temp:${Date.now()}`;
const projectNode = this.addNode({
id: tempProjectId,
type: "project",
label: "Query Project",
properties: projectFeatures,
weight: 1.0,
});
for (const ssg of candidateSSGs) {
const ssgNodeId = `tech:${ssg}`;
const ssgNode = this.nodes.get(ssgNodeId);
if (ssgNode) {
// Find paths from similar projects to this SSG
const similarProjects = this.findSimilarProjects(projectFeatures);
for (const similarProject of similarProjects) {
const path = this.findPath(similarProject.id, ssgNodeId);
if (path) {
const reasoning = this.generateReasoning(path);
const confidence = this.calculatePathConfidence(
path,
projectFeatures,
);
recommendations.push({
from: projectNode,
to: ssgNode,
path,
reasoning,
confidence,
});
}
}
}
}
// Clean up temporary node
this.nodes.delete(tempProjectId);
return recommendations.sort((a, b) => b.confidence - a.confidence);
}
/**
* Find projects similar to given features
*/
private findSimilarProjects(features: any): GraphNode[] {
const projectNodes = Array.from(this.nodes.values()).filter(
(node) => node.type === "project",
);
return projectNodes
.map((project) => ({
project,
similarity: this.calculateFeatureSimilarity(
features,
project.properties,
),
}))
.filter(({ similarity }) => similarity > 0.6)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 5)
.map(({ project }) => project);
}
/**
* Calculate similarity between features and project properties
*/
private calculateFeatureSimilarity(features: any, properties: any): number {
let score = 0;
let factors = 0;
if (features.language === properties.language) {
score += 0.4;
}
factors++;
if (features.framework === properties.framework) {
score += 0.3;
}
factors++;
if (features.size === properties.size) {
score += 0.2;
}
factors++;
if (features.complexity === properties.complexity) {
score += 0.1;
}
factors++;
return factors > 0 ? score / factors : 0;
}
/**
* Generate human-readable reasoning for a recommendation path
*/
private generateReasoning(path: GraphPath): string[] {
const reasoning: string[] = [];
for (let i = 0; i < path.edges.length; i++) {
const edge = path.edges[i];
const sourceNode = path.nodes[i];
const targetNode = path.nodes[i + 1];
switch (edge.type) {
case "similar_to":
reasoning.push(
`Similar to ${sourceNode.label} (${(edge.confidence * 100).toFixed(
0,
)}% similarity)`,
);
break;
case "recommends":
reasoning.push(
`Successfully used ${
targetNode.label
} (score: ${edge.weight.toFixed(1)})`,
);
break;
case "results_in":
reasoning.push(
`Resulted in ${targetNode.properties.status} deployment`,
);
break;
case "uses":
reasoning.push(`Uses ${targetNode.label}`);
break;
}
}
return reasoning;
}
/**
* Calculate confidence for a recommendation path
*/
private calculatePathConfidence(path: GraphPath, _features: any): number {
let confidence = path.confidence;
// Boost confidence for shorter paths
confidence *= 1 / Math.max(path.edges.length, 1);
// Boost confidence for recent data
const avgAge =
path.nodes.reduce((sum, node) => {
const age = Date.now() - new Date(node.lastUpdated).getTime();
return sum + age;
}, 0) / path.nodes.length;
const daysSinceUpdate = avgAge / (1000 * 60 * 60 * 24);
confidence *= Math.exp(-daysSinceUpdate / 30); // Exponential decay over 30 days
return Math.min(confidence, 1.0);
}
/**
* Get technology popularity score
*/
private getTechnologyPopularity(tech: string): number {
// Simple popularity scoring - could be enhanced with real data
const popularityMap = new Map([
["javascript", 0.9],
["typescript", 0.8],
["python", 0.8],
["react", 0.9],
["vue", 0.7],
["angular", 0.6],
["go", 0.7],
["ruby", 0.5],
["rust", 0.6],
]);
return popularityMap.get(tech.toLowerCase()) || 0.3;
}
/**
* Save knowledge graph to persistent memory
*/
async saveToMemory(): Promise<void> {
const graphData = {
nodes: Array.from(this.nodes.entries()),
edges: Array.from(this.edges.entries()),
lastUpdate: this.lastUpdate,
statistics: this.getStatistics(),
};
await this.memoryManager.remember(
"interaction",
{
graph: graphData,
type: "knowledge_graph",
},
{
tags: ["knowledge_graph", "structure"],
},
);
}
/**
* Load knowledge graph from persistent memory
*/
async loadFromMemory(): Promise<void> {
try {
const graphMemories = await this.memoryManager.search("knowledge_graph");
if (graphMemories.length > 0) {
const latestGraph = graphMemories.sort(
(a, b) =>
new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime(),
)[0];
if (latestGraph.data.graph) {
const { nodes, edges } = latestGraph.data.graph;
// Restore nodes
for (const [id, node] of nodes) {
this.nodes.set(id, node);
}
// Restore edges and adjacency list
for (const [id, edge] of edges) {
this.edges.set(id, edge);
if (!this.adjacencyList.has(edge.source)) {
this.adjacencyList.set(edge.source, new Set());
}
this.adjacencyList.get(edge.source)!.add(edge.target);
}
this.lastUpdate = latestGraph.data.graph.lastUpdate;
}
}
} catch (error) {
console.error("Failed to load knowledge graph from memory:", error);
}
}
/**
* Get all nodes in the knowledge graph
*/
async getAllNodes(): Promise<GraphNode[]> {
return Array.from(this.nodes.values());
}
/**
* Get all edges in the knowledge graph
*/
async getAllEdges(): Promise<GraphEdge[]> {
return Array.from(this.edges.values());
}
/**
* Get a node by its ID
*/
async getNodeById(nodeId: string): Promise<GraphNode | null> {
return this.nodes.get(nodeId) || null;
}
/**
* Remove a node from the knowledge graph
*/
async removeNode(nodeId: string): Promise<boolean> {
const node = this.nodes.get(nodeId);
if (!node) {
return false;
}
// Remove the node
this.nodes.delete(nodeId);
// Remove all edges connected to this node
const edgesToRemove: string[] = [];
for (const [edgeId, edge] of this.edges) {
if (edge.source === nodeId || edge.target === nodeId) {
edgesToRemove.push(edgeId);
}
}
for (const edgeId of edgesToRemove) {
this.edges.delete(edgeId);
}
// Update adjacency list
this.adjacencyList.delete(nodeId);
for (const [, targets] of this.adjacencyList) {
targets.delete(nodeId);
}
return true;
}
/**
* Get connections for a specific node
*/
async getConnections(nodeId: string): Promise<string[]> {
const connections = this.adjacencyList.get(nodeId);
return connections ? Array.from(connections) : [];
}
/**
* Get knowledge graph statistics
*/
async getStatistics(): Promise<{
nodeCount: number;
edgeCount: number;
nodesByType: Record<string, number>;
edgesByType: Record<string, number>;
averageConnectivity: number;
mostConnectedNodes: Array<{ id: string; connections: number }>;
}> {
const nodesByType: Record<string, number> = {};
const edgesByType: Record<string, number> = {};
for (const node of this.nodes.values()) {
nodesByType[node.type] = (nodesByType[node.type] || 0) + 1;
}
for (const edge of this.edges.values()) {
edgesByType[edge.type] = (edgesByType[edge.type] || 0) + 1;
}
const connectivityCounts = Array.from(this.adjacencyList.entries())
.map(([id, connections]) => ({ id, connections: connections.size }))
.sort((a, b) => b.connections - a.connections);
const averageConnectivity =
connectivityCounts.length > 0
? connectivityCounts.reduce(
(sum, { connections }) => sum + connections,
0,
) / connectivityCounts.length
: 0;
return {
nodeCount: this.nodes.size,
edgeCount: this.edges.size,
nodesByType,
edgesByType,
averageConnectivity,
mostConnectedNodes: connectivityCounts.slice(0, 10),
};
}
// ============================================================================
// Phase 1.1: Enhanced Node Query Methods
// ============================================================================
/**
* Find a single node matching criteria
*/
async findNode(criteria: {
type?: string;
properties?: Record<string, any>;
}): Promise<GraphNode | null> {
for (const node of this.nodes.values()) {
if (criteria.type && node.type !== criteria.type) continue;
if (criteria.properties) {
let matches = true;
for (const [key, value] of Object.entries(criteria.properties)) {
if (node.properties[key] !== value) {
matches = false;
break;
}
}
if (!matches) continue;
}
return node;
}
return null;
}
/**
* Find all nodes matching criteria
*/
async findNodes(criteria: {
type?: string;
properties?: Record<string, any>;
}): Promise<GraphNode[]> {
const results: GraphNode[] = [];
for (const node of this.nodes.values()) {
if (criteria.type && node.type !== criteria.type) continue;
if (criteria.properties) {
let matches = true;
for (const [key, value] of Object.entries(criteria.properties)) {
if (node.properties[key] !== value) {
matches = false;
break;
}
}
if (!matches) continue;
}
results.push(node);
}
return results;
}
/**
* Find edges matching criteria
*/
async findEdges(criteria: {
source?: string;
target?: string;
type?: string;
properties?: Record<string, any>;
}): Promise<GraphEdge[]> {
const results: GraphEdge[] = [];
for (const edge of this.edges.values()) {
if (criteria.source && edge.source !== criteria.source) continue;
if (criteria.target && edge.target !== criteria.target) continue;
if (criteria.type && edge.type !== criteria.type) continue;
// Match properties if provided
if (criteria.properties) {
let propertiesMatch = true;
for (const [key, value] of Object.entries(criteria.properties)) {
if (edge.properties[key] !== value) {
propertiesMatch = false;
break;
}
}
if (!propertiesMatch) continue;
}
results.push(edge);
}
return results;
}
/**
* Find all paths between two nodes up to a maximum depth
*/
async findPaths(criteria: {
startNode: string;
endNode?: string;
edgeTypes?: string[];
maxDepth: number;
}): Promise<GraphPath[]> {
const paths: GraphPath[] = [];
const visited = new Set<string>();
const emptyPath: GraphPath = {
nodes: [this.nodes.get(criteria.startNode)!],
edges: [],
totalWeight: 0,
confidence: 1.0,
};
this.findPathsRecursive(
criteria.startNode,
emptyPath,
paths,
visited,
criteria.maxDepth,
criteria.endNode,
criteria.edgeTypes,
);
return paths;
}
/**
* Recursive helper for finding paths
*/
private findPathsRecursive(
currentNodeId: string,
currentPath: GraphPath,
allPaths: GraphPath[],
visited: Set<string>,
maxDepth: number,
endNode?: string,
edgeTypes?: string[],
): void {
if (currentPath.nodes.length >= maxDepth) return;
visited.add(currentNodeId);
const neighbors = this.adjacencyList.get(currentNodeId) || new Set();
for (const neighborId of neighbors) {
if (visited.has(neighborId)) continue;
const edge = this.findEdge(currentNodeId, neighborId);
if (!edge) continue;
// Filter by edge type if specified
if (edgeTypes && !edgeTypes.includes(edge.type)) continue;
const neighborNode = this.nodes.get(neighborId);
if (!neighborNode) continue;
const newPath: GraphPath = {
nodes: [...currentPath.nodes, neighborNode],
edges: [...currentPath.edges, edge],
totalWeight: currentPath.totalWeight + edge.weight,
confidence: currentPath.confidence * edge.confidence,
};
// If we've reached the end node, add this path
if (endNode && neighborId === endNode) {
allPaths.push(newPath);
continue;
}
// If no end node specified, add all paths
if (!endNode) {
allPaths.push(newPath);
}
// Continue exploring
this.findPathsRecursive(
neighborId,
newPath,
allPaths,
new Set(visited),
maxDepth,
endNode,
edgeTypes,
);
}
}
/**
* Get node history (all changes to a node over time)
*/
async getNodeHistory(nodeId: string): Promise<MemoryEntry[]> {
const node = this.nodes.get(nodeId);
if (!node) return [];
// Search memory for all entries related to this node
const projectId = node.properties.projectId || node.properties.path;
if (!projectId) return [];
return await this.memoryManager.search(projectId);
}
/**
* Get schema version
*/
getSchemaVersion(): string {
return SCHEMA_METADATA.version;
}
/**
* Validate node against schema
*/
validateNode(node: GraphNode): boolean {
try {
const entityData = {
...node.properties,
type: node.type,
};
validateEntity(entityData);
return true;
} catch (error) {
console.error(`Node validation failed for ${node.id}:`, error);
return false;
}
}
/**
* Validate edge against schema
*/
validateEdge(edge: GraphEdge): boolean {
try {
const relationshipData = {
type: edge.type,
weight: edge.weight,
confidence: edge.confidence,
createdAt: edge.lastUpdated,
lastUpdated: edge.lastUpdated,
metadata: edge.properties,
...edge.properties,
};
validateRelationship(relationshipData);
return true;
} catch (error) {
console.error(`Edge validation failed for ${edge.id}:`, error);
return false;
}
}
}
export default KnowledgeGraph;
```
--------------------------------------------------------------------------------
/tests/utils/drift-detector.test.ts:
--------------------------------------------------------------------------------
```typescript
/**
* Drift Detector Tests (Phase 3)
*/
import {
DriftDetector,
DriftDetectionResult,
} from "../../src/utils/drift-detector.js";
import { promises as fs } from "fs";
import { tmpdir } from "os";
import { join } from "path";
import { mkdtemp, rm } from "fs/promises";
describe("DriftDetector", () => {
let detector: DriftDetector;
let tempDir: string;
let projectPath: string;
let docsPath: string;
beforeAll(async () => {
tempDir = await mkdtemp(join(tmpdir(), "drift-test-"));
projectPath = join(tempDir, "project");
docsPath = join(tempDir, "docs");
await fs.mkdir(projectPath, { recursive: true });
await fs.mkdir(join(projectPath, "src"), { recursive: true });
await fs.mkdir(docsPath, { recursive: true });
detector = new DriftDetector(tempDir);
await detector.initialize();
});
afterAll(async () => {
await rm(tempDir, { recursive: true, force: true });
});
describe("Snapshot Creation", () => {
test("should create snapshot of codebase and documentation", async () => {
// Create sample source file
const sourceCode = `
export function calculateSum(a: number, b: number): number {
return a + b;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "math.ts"), sourceCode);
// Create sample documentation
const docContent = `
# Math Module
## calculateSum
Adds two numbers together.
\`\`\`typescript
calculateSum(a: number, b: number): number
\`\`\`
`.trim();
await fs.writeFile(join(docsPath, "math.md"), docContent);
const snapshot = await detector.createSnapshot(projectPath, docsPath);
expect(snapshot).toBeDefined();
expect(snapshot.projectPath).toBe(projectPath);
expect(snapshot.timestamp).toBeTruthy();
expect(snapshot.files.size).toBeGreaterThan(0);
expect(snapshot.documentation.size).toBeGreaterThan(0);
});
test("should store snapshot to disk", async () => {
const sourceCode = `export function test(): void {}`;
await fs.writeFile(join(projectPath, "src", "test.ts"), sourceCode);
const snapshot = await detector.createSnapshot(projectPath, docsPath);
// Check that snapshot directory was created
const snapshotDir = join(tempDir, ".documcp", "snapshots");
const files = await fs.readdir(snapshotDir);
expect(files.length).toBeGreaterThan(0);
expect(files.some((f) => f.startsWith("snapshot-"))).toBe(true);
});
test("should load latest snapshot", async () => {
const sourceCode = `export function loadTest(): void {}`;
await fs.writeFile(join(projectPath, "src", "load-test.ts"), sourceCode);
await detector.createSnapshot(projectPath, docsPath);
const loaded = await detector.loadLatestSnapshot();
expect(loaded).toBeDefined();
expect(loaded?.projectPath).toBe(projectPath);
});
});
describe("Drift Detection", () => {
test("should detect when function signature changes", async () => {
// Create initial version
const oldCode = `
export function processData(data: string): void {
console.log(data);
}
`.trim();
await fs.writeFile(join(projectPath, "src", "processor.ts"), oldCode);
const oldDoc = `
# Processor
## processData(data: string): void
Processes string data.
`.trim();
await fs.writeFile(join(docsPath, "processor.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify function signature
const newCode = `
export function processData(data: string, options: object): Promise<string> {
console.log(data, options);
return Promise.resolve("done");
}
`.trim();
await fs.writeFile(join(projectPath, "src", "processor.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
expect(drifts.length).toBeGreaterThan(0);
const processorDrift = drifts.find((d) =>
d.filePath.includes("processor.ts"),
);
expect(processorDrift).toBeDefined();
expect(processorDrift?.hasDrift).toBe(true);
expect(processorDrift?.drifts.length).toBeGreaterThan(0);
});
test("should detect when functions are removed", async () => {
// Initial code with two functions
const oldCode = `
export function keepMe(): void {}
export function removeMe(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "removal.ts"), oldCode);
const oldDoc = `
# Functions
## keepMe
## removeMe
`.trim();
await fs.writeFile(join(docsPath, "removal.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Remove one function
const newCode = `
export function keepMe(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "removal.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const removalDrift = drifts.find((d) =>
d.filePath.includes("removal.ts"),
);
expect(removalDrift).toBeDefined();
expect(
removalDrift?.drifts.some((drift) => drift.type === "breaking"),
).toBe(true);
});
test("should detect when new functions are added", async () => {
const oldCode = `
export function existing(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "addition.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
const newCode = `
export function existing(): void {}
export function newFunction(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "addition.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const additionDrift = drifts.find((d) =>
d.filePath.includes("addition.ts"),
);
expect(additionDrift).toBeDefined();
expect(
additionDrift?.drifts.some((drift) => drift.type === "missing"),
).toBe(true);
});
test("should classify drift severity correctly", async () => {
// Breaking change
const oldCode = `
export function criticalFunction(param: string): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "severity.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Remove exported function - breaking change
const newCode = `
function criticalFunction(param: string): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "severity.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const severityDrift = drifts.find((d) =>
d.filePath.includes("severity.ts"),
);
expect(severityDrift).toBeDefined();
expect(severityDrift?.severity).toBe("critical"); // Removing export is breaking
});
});
describe("Suggestion Generation", () => {
test("should generate suggestions for outdated documentation", async () => {
const oldCode = `
export function calculate(x: number): number {
return x * 2;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "calc.ts"), oldCode);
const oldDoc = `
# Calculator
## calculate(x: number): number
Doubles the input.
`.trim();
await fs.writeFile(join(docsPath, "calc.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Change function signature
const newCode = `
export function calculate(x: number, y: number): number {
return x * y;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "calc.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const calcDrift = drifts.find((d) => d.filePath.includes("calc.ts"));
expect(calcDrift).toBeDefined();
expect(calcDrift?.suggestions.length).toBeGreaterThan(0);
const suggestion = calcDrift?.suggestions[0];
expect(suggestion).toBeDefined();
expect(suggestion?.suggestedContent).toBeTruthy();
expect(suggestion?.confidence).toBeGreaterThan(0);
});
test("should provide auto-applicable flag for safe changes", async () => {
const oldCode = `
export function simpleChange(a: number): number {
return a;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "simple.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Minor change
const newCode = `
export function simpleChange(a: number): number {
return a * 2;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "simple.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
// Minor internal changes shouldn't require doc updates if signature is same
const simpleDrift = drifts.find((d) => d.filePath.includes("simple.ts"));
if (simpleDrift && simpleDrift.suggestions.length > 0) {
const suggestion = simpleDrift.suggestions[0];
expect(typeof suggestion.autoApplicable).toBe("boolean");
}
});
});
describe("Impact Analysis", () => {
test("should analyze impact of changes", async () => {
const oldCode = `
export function breaking(): void {}
export function major(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "impact.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Breaking change - remove function
const newCode = `
export function major(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "impact.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const impactDrift = drifts.find((d) => d.filePath.includes("impact.ts"));
expect(impactDrift?.impactAnalysis).toBeDefined();
expect(impactDrift?.impactAnalysis.breakingChanges).toBeGreaterThan(0);
expect(impactDrift?.impactAnalysis.estimatedUpdateEffort).toBeDefined();
});
test("should identify affected documentation files", async () => {
const code = `
export function documented(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "documented.ts"), code);
const doc = `
# Documentation
\`documented()\` is a function.
`.trim();
await fs.writeFile(join(docsPath, "documented.md"), doc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Change the function
const newCode = `
export function documented(param: string): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "documented.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const docDrift = drifts.find((d) => d.filePath.includes("documented.ts"));
expect(docDrift?.impactAnalysis.affectedDocFiles.length).toBeGreaterThan(
0,
);
});
});
describe("Edge Cases", () => {
test("should handle no drift scenario", async () => {
const code = `
export function unchangedFunction(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "unchanged.ts"), code);
const snapshot1 = await detector.createSnapshot(projectPath, docsPath);
const snapshot2 = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(snapshot1, snapshot2);
// No changes should mean no drifts
const unchangedDrift = drifts.find((d) =>
d.filePath.includes("unchanged.ts"),
);
if (unchangedDrift) {
expect(unchangedDrift.hasDrift).toBe(false);
}
});
test("should handle missing documentation gracefully", async () => {
const code = `
export function undocumentedFunction(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "undocumented.ts"), code);
// Don't create documentation
const snapshot = await detector.createSnapshot(projectPath, docsPath);
expect(snapshot).toBeDefined();
expect(snapshot.documentation.size).toBeGreaterThanOrEqual(0);
});
test("should handle new files correctly", async () => {
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Add new file
const newCode = `
export function brandNew(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "brand-new.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
// New files might not show as drift if they have no corresponding docs
expect(Array.isArray(drifts)).toBe(true);
});
});
describe("Documentation Section Extraction", () => {
test("should extract documentation sections", async () => {
const doc = `
# Main Title
This is the introduction.
## Section 1
Content for section 1.
\`\`\`typescript
function example(): void {}
\`\`\`
## Section 2
Content for section 2.
`.trim();
await fs.writeFile(join(docsPath, "sections.md"), doc);
const snapshot = await detector.createSnapshot(projectPath, docsPath);
const docSnapshot = snapshot.documentation.get(
join(docsPath, "sections.md"),
);
expect(docSnapshot).toBeDefined();
expect(docSnapshot?.sections.length).toBeGreaterThan(0);
const section1 = docSnapshot?.sections.find(
(s) => s.title === "Section 1",
);
expect(section1).toBeDefined();
expect(section1?.codeExamples.length).toBeGreaterThan(0);
});
test("should extract code references from documentation", async () => {
const doc = `
# API Reference
See \`calculateSum()\` for details.
The function is in \`src/math.ts\`.
Check out the \`MathUtils\` class.
`.trim();
await fs.writeFile(join(docsPath, "references.md"), doc);
const snapshot = await detector.createSnapshot(projectPath, docsPath);
const docSnapshot = snapshot.documentation.get(
join(docsPath, "references.md"),
);
expect(docSnapshot).toBeDefined();
const section = docSnapshot?.sections[0];
expect(section?.referencedFunctions.length).toBeGreaterThan(0);
});
});
describe("Suggestion Generation Helper Methods", () => {
test("should generate removal suggestion with deprecation notice", async () => {
const oldCode = `
export function deprecatedFunc(x: number): number {
return x;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "deprecated.ts"), oldCode);
const oldDoc = `
# API
## deprecatedFunc(x: number): number
This function does something.
`.trim();
await fs.writeFile(join(docsPath, "deprecated.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Remove the function
const newCode = `// Function removed`;
await fs.writeFile(join(projectPath, "src", "deprecated.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const deprecatedDrift = drifts.find((d) =>
d.filePath.includes("deprecated.ts"),
);
expect(deprecatedDrift).toBeDefined();
expect(deprecatedDrift?.suggestions.length).toBeGreaterThan(0);
const suggestion = deprecatedDrift?.suggestions[0];
expect(suggestion?.suggestedContent).toContain("removed");
expect(suggestion?.suggestedContent).toContain("Note");
});
test("should generate addition suggestion with code signature", async () => {
const oldCode = `export function existing(): void {}`;
await fs.writeFile(join(projectPath, "src", "additions.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Add new function
const newCode = `
export function existing(): void {}
export function newAddedFunc(a: number, b: string): boolean {
return true;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "additions.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const additionDrift = drifts.find((d) =>
d.filePath.includes("additions.ts"),
);
expect(additionDrift?.drifts.some((d) => d.type === "missing")).toBe(
true,
);
});
test("should generate modification suggestion with signature update", async () => {
const oldCode = `
export function modifyMe(x: number): number {
return x;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "modify.ts"), oldCode);
const oldDoc = `
# API
## modifyMe(x: number): number
Returns the input number.
`.trim();
await fs.writeFile(join(docsPath, "modify.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify the function signature
const newCode = `
export function modifyMe(x: number, y: number): number {
return x + y;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "modify.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const modifyDrift = drifts.find((d) => d.filePath.includes("modify.ts"));
expect(modifyDrift).toBeDefined();
expect(modifyDrift?.suggestions.length).toBeGreaterThan(0);
const suggestion = modifyDrift?.suggestions[0];
expect(suggestion?.suggestedContent).toBeTruthy();
});
test("should set auto-applicable flag correctly for safe changes", async () => {
const oldCode = `
export function safeChange(x: number): number {
return x;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "safe.ts"), oldCode);
const oldDoc = `
# API
## safeChange(x: number): number
`.trim();
await fs.writeFile(join(docsPath, "safe.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Internal implementation change (patch level)
const newCode = `
export function safeChange(x: number): number {
return x * 2; // Changed implementation but not signature
}
`.trim();
await fs.writeFile(join(projectPath, "src", "safe.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
// If there are any drifts, check their suggestions
if (drifts.length > 0) {
const safeDrift = drifts.find((d) => d.filePath.includes("safe.ts"));
if (safeDrift && safeDrift.suggestions.length > 0) {
const suggestion = safeDrift.suggestions[0];
expect(typeof suggestion.autoApplicable).toBe("boolean");
}
}
});
});
describe("Comparison Helper Methods", () => {
test("should correctly identify affected sections by function name", async () => {
const code = `
export function targetFunc(): void {}
export function otherFunc(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "target.ts"), code);
const doc = `
# API
See \`targetFunc()\` for details.
## targetFunc
This documents the target function.
## otherFunc
This documents another function.
`.trim();
await fs.writeFile(join(docsPath, "target.md"), doc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify only targetFunc
const newCode = `
export function targetFunc(param: string): void {}
export function otherFunc(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "target.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const targetDrift = drifts.find((d) => d.filePath.includes("target.ts"));
expect(targetDrift).toBeDefined();
// Drift was detected, and impact analysis was performed
expect(targetDrift?.impactAnalysis).toBeDefined();
expect(
targetDrift?.impactAnalysis.affectedDocFiles.length,
).toBeGreaterThanOrEqual(0);
});
test("should correctly classify drift types", async () => {
const oldCode = `
export function removedFunc(): void {}
export function modifiedFunc(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "classify.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Remove one function, keep the other unchanged
const newCode = `
export function modifiedFunc(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "classify.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const classifyDrift = drifts.find((d) =>
d.filePath.includes("classify.ts"),
);
expect(classifyDrift).toBeDefined();
expect(classifyDrift?.drifts.length).toBeGreaterThan(0);
// Verify drift types are correctly classified
const driftTypes = classifyDrift?.drifts.map((d) => d.type) || [];
expect(driftTypes.length).toBeGreaterThan(0);
// Should have breaking or incorrect drift for removed function
const hasRemovalDrift = classifyDrift?.drifts.some(
(d) => d.type === "breaking" || d.type === "incorrect",
);
expect(hasRemovalDrift).toBe(true);
});
test("should map impact levels to severity correctly", async () => {
const oldCode = `
export function critical(): void {}
export function major(): void {}
export function minor(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "severity-map.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Breaking change
const newCode = `
export function major(): void {}
export function minor(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "severity-map.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const severityDrift = drifts.find((d) =>
d.filePath.includes("severity-map.ts"),
);
expect(severityDrift).toBeDefined();
expect(severityDrift?.severity).toBe("critical");
});
test("should estimate update effort based on drift count", async () => {
const oldCode = `
export function func1(): void {}
export function func2(): void {}
export function func3(): void {}
export function func4(): void {}
export function func5(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "effort.ts"), oldCode);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Remove multiple functions - high effort
const newCode = `
export function func5(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "effort.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const effortDrift = drifts.find((d) => d.filePath.includes("effort.ts"));
expect(effortDrift).toBeDefined();
expect(effortDrift?.impactAnalysis.estimatedUpdateEffort).toBeDefined();
expect(
["low", "medium", "high"].includes(
effortDrift!.impactAnalysis.estimatedUpdateEffort,
),
).toBe(true);
});
test("should calculate overall severity from multiple drifts", async () => {
const oldCode = `
export function criticalChange(): void {}
export function minorChange(): void {}
`.trim();
await fs.writeFile(
join(projectPath, "src", "overall-severity.ts"),
oldCode,
);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Breaking change dominates
const newCode = `
export function minorChange(x: number): void {}
`.trim();
await fs.writeFile(
join(projectPath, "src", "overall-severity.ts"),
newCode,
);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const overallDrift = drifts.find((d) =>
d.filePath.includes("overall-severity.ts"),
);
expect(overallDrift).toBeDefined();
expect(
["none", "low", "medium", "high", "critical"].includes(
overallDrift!.severity,
),
).toBe(true);
});
test("should handle multiple documentation files referencing same code", async () => {
const code = `
export function sharedFunc(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "shared.ts"), code);
const doc1 = `
# Guide 1
See \`sharedFunc()\` for details.
`.trim();
const doc2 = `
# Guide 2
Also uses \`sharedFunc()\`.
`.trim();
await fs.writeFile(join(docsPath, "guide1.md"), doc1);
await fs.writeFile(join(docsPath, "guide2.md"), doc2);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Change the shared function
const newCode = `
export function sharedFunc(param: string): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "shared.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const sharedDrift = drifts.find((d) => d.filePath.includes("shared.ts"));
expect(sharedDrift).toBeDefined();
// Should affect both documentation files
expect(
sharedDrift?.impactAnalysis.affectedDocFiles.length,
).toBeGreaterThanOrEqual(1);
});
});
describe("Advanced Suggestion Generation", () => {
test("should generate suggestions for added functions with signatures", async () => {
const oldCode = `export function existing(): void {}`;
await fs.writeFile(
join(projectPath, "src", "added-with-sig.ts"),
oldCode,
);
const oldDoc = `
# API
## existing
Existing function documentation.
`.trim();
await fs.writeFile(join(docsPath, "added-with-sig.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Add new function with signature
const newCode = `
export function existing(): void {}
export async function newFunction(param: string, count: number): Promise<boolean> {
return true;
}
`.trim();
await fs.writeFile(
join(projectPath, "src", "added-with-sig.ts"),
newCode,
);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const addedDrift = drifts.find((d) =>
d.filePath.includes("added-with-sig.ts"),
);
expect(addedDrift).toBeDefined();
expect(addedDrift?.drifts.some((d) => d.type === "missing")).toBe(true);
// Should detect the added function
const hasAddedFunction = addedDrift?.drifts.some((d) =>
d.codeChanges.some((c) => c.name === "newFunction"),
);
expect(hasAddedFunction).toBe(true);
});
test("should handle class changes in suggestions", async () => {
const oldCode = `
export class OldClass {
method(): void {}
}
`.trim();
await fs.writeFile(join(projectPath, "src", "class-change.ts"), oldCode);
const oldDoc = `
# Classes
## OldClass
Documentation for OldClass.
`.trim();
await fs.writeFile(join(docsPath, "class-change.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify class
const newCode = `
export class OldClass {
method(): void {}
newMethod(): void {}
}
`.trim();
await fs.writeFile(join(projectPath, "src", "class-change.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
expect(drifts.length).toBeGreaterThanOrEqual(0);
});
test("should handle interface changes in suggestions", async () => {
const oldCode = `
export interface UserInterface {
id: string;
}
`.trim();
await fs.writeFile(
join(projectPath, "src", "interface-change.ts"),
oldCode,
);
const oldDoc = `
# Interfaces
## UserInterface
The UserInterface interface.
`.trim();
await fs.writeFile(join(docsPath, "interface-change.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify interface
const newCode = `
export interface UserInterface {
id: string;
name: string;
}
`.trim();
await fs.writeFile(
join(projectPath, "src", "interface-change.ts"),
newCode,
);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
expect(drifts.length).toBeGreaterThanOrEqual(0);
});
test("should handle type alias changes in suggestions", async () => {
const oldCode = `
export type Status = "active" | "inactive";
`.trim();
await fs.writeFile(join(projectPath, "src", "type-change.ts"), oldCode);
const oldDoc = `
# Types
## Status
The Status type.
`.trim();
await fs.writeFile(join(docsPath, "type-change.md"), oldDoc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify type
const newCode = `
export type Status = "active" | "inactive" | "pending";
`.trim();
await fs.writeFile(join(projectPath, "src", "type-change.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
expect(drifts.length).toBeGreaterThanOrEqual(0);
});
test("should detect documentation referencing classes", async () => {
const code = `
export class DocumentedClass {
public property: string;
constructor(prop: string) {
this.property = prop;
}
}
`.trim();
await fs.writeFile(join(projectPath, "src", "doc-class.ts"), code);
const doc = `
# Classes
See the \`DocumentedClass\` for details.
## DocumentedClass
This class does something important.
`.trim();
await fs.writeFile(join(docsPath, "doc-class.md"), doc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify class
const newCode = `
export class DocumentedClass {
public property: string;
public newProperty: number;
constructor(prop: string, num: number) {
this.property = prop;
this.newProperty = num;
}
}
`.trim();
await fs.writeFile(join(projectPath, "src", "doc-class.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const classDrift = drifts.find((d) =>
d.filePath.includes("doc-class.ts"),
);
// Check that affected docs were identified
if (classDrift && classDrift.hasDrift) {
expect(classDrift.impactAnalysis).toBeDefined();
}
});
test("should detect documentation referencing types", async () => {
const code = `
export type ConfigType = {
apiKey: string;
timeout: number;
};
`.trim();
await fs.writeFile(join(projectPath, "src", "doc-type.ts"), code);
const doc = `
# Configuration
The \`ConfigType\` defines configuration options.
`.trim();
await fs.writeFile(join(docsPath, "doc-type.md"), doc);
const oldSnapshot = await detector.createSnapshot(projectPath, docsPath);
// Modify type
const newCode = `
export type ConfigType = {
apiKey: string;
timeout: number;
retries: number;
};
`.trim();
await fs.writeFile(join(projectPath, "src", "doc-type.ts"), newCode);
const newSnapshot = await detector.createSnapshot(projectPath, docsPath);
const drifts = await detector.detectDrift(oldSnapshot, newSnapshot);
const typeDrift = drifts.find((d) => d.filePath.includes("doc-type.ts"));
if (typeDrift && typeDrift.hasDrift) {
expect(typeDrift.impactAnalysis).toBeDefined();
}
});
});
});
```
--------------------------------------------------------------------------------
/tests/tools/evaluate-readme-health.test.ts:
--------------------------------------------------------------------------------
```typescript
import { evaluateReadmeHealth } from "../../src/tools/evaluate-readme-health.js";
import { writeFile, mkdir, rm } from "fs/promises";
import { join } from "path";
describe("evaluateReadmeHealth", () => {
const testDir = join(process.cwd(), "test-readme-temp");
beforeEach(async () => {
// Create test directory
await mkdir(testDir, { recursive: true });
});
afterEach(async () => {
// Clean up test directory
try {
await rm(testDir, { recursive: true, force: true });
} catch (error) {
// Ignore cleanup errors
}
});
describe("Basic Functionality", () => {
test("should evaluate README health with default parameters", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Test Project
## Description
This is a test project for evaluating README health.
## Installation
\`\`\`bash
npm install test-project
\`\`\`
## Usage
\`\`\`javascript
const test = require('test-project');
\`\`\`
## Contributing
Please read CONTRIBUTING.md for details.
## License
MIT License
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
expect(result.content).toBeDefined();
expect(result.content.length).toBeGreaterThan(0);
expect(result.isError).toBe(false);
// Check that it contains health report data
const healthData = result.content.find((c) =>
c.text.includes("healthReport"),
);
expect(healthData).toBeDefined();
});
test("should handle different project types", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
"# Enterprise Tool\n\nA professional enterprise tool.",
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
project_type: "enterprise_tool",
});
expect(result.content).toBeDefined();
expect(result.isError).toBe(false);
});
test("should include repository context when provided", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Project with Repo Context");
// Create a simple repository structure
await writeFile(join(testDir, "package.json"), '{"name": "test"}');
const result = await evaluateReadmeHealth({
readme_path: readmePath,
repository_path: testDir,
});
expect(result.content).toBeDefined();
expect(result.isError).toBe(false);
});
});
describe("Error Handling", () => {
test("should handle missing README file", async () => {
const result = await evaluateReadmeHealth({
readme_path: join(testDir, "nonexistent.md"),
});
expect(result.isError).toBe(true);
expect(result.content[0].text).toContain(
"Failed to evaluate README health",
);
});
test("should handle invalid project type", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Test");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
project_type: "invalid_type" as any,
});
expect(result.isError).toBe(true);
});
});
describe("Health Report Structure", () => {
test("should include all required health components", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Complete Project
## Table of Contents
- [Installation](#installation)
- [Usage](#usage)
## Description
Comprehensive project description here.
## Installation
Installation instructions.
## Usage
Usage examples.
## Contributing
How to contribute.
## License
MIT
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
expect(dataContent).toBeDefined();
const data = JSON.parse(dataContent!.text);
expect(data.healthReport).toBeDefined();
expect(data.healthReport.components).toBeDefined();
expect(data.healthReport.components.communityHealth).toBeDefined();
expect(data.healthReport.components.accessibility).toBeDefined();
expect(data.healthReport.components.onboarding).toBeDefined();
expect(data.healthReport.components.contentQuality).toBeDefined();
});
test("should provide grade and score", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Basic Project\n\nMinimal content.");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
expect(data.healthReport.overallScore).toBeGreaterThanOrEqual(0);
expect(data.healthReport.overallScore).toBeLessThanOrEqual(100);
expect(["A", "B", "C", "D", "F"]).toContain(data.healthReport.grade);
});
test("should include recommendations and next steps", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Incomplete Project");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("recommendations"),
);
expect(dataContent).toBeDefined();
const data = JSON.parse(dataContent!.text);
expect(data.healthReport.recommendations).toBeDefined();
expect(Array.isArray(data.healthReport.recommendations)).toBe(true);
expect(data.nextSteps).toBeDefined();
expect(Array.isArray(data.nextSteps)).toBe(true);
});
});
describe("Response Format", () => {
test("should return properly formatted MCP response", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Test Project");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
expect(result.content).toBeDefined();
expect(Array.isArray(result.content)).toBe(true);
expect(result.content.length).toBeGreaterThan(0);
// Should include execution metadata
const metadataContent = result.content.find((c) =>
c.text.includes("Execution completed"),
);
expect(metadataContent).toBeDefined();
});
});
describe("Repository Context Analysis", () => {
test("should analyze repository context when path is provided", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Project with Context");
// Create repository files
await writeFile(join(testDir, "CODE_OF_CONDUCT.md"), "# Code of Conduct");
await writeFile(join(testDir, "CONTRIBUTING.md"), "# Contributing");
await writeFile(join(testDir, "SECURITY.md"), "# Security Policy");
await mkdir(join(testDir, ".github"), { recursive: true });
await writeFile(join(testDir, "package.json"), '{"name": "test"}');
const result = await evaluateReadmeHealth({
readme_path: readmePath,
repository_path: testDir,
});
expect(result.isError).toBe(false);
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
expect(dataContent).toBeDefined();
});
test("should handle repository context analysis errors gracefully", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Project");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
repository_path: "/nonexistent/path",
});
expect(result.isError).toBe(false); // Should not fail, just return null context
});
});
describe("Community Health Evaluation", () => {
test("should detect code of conduct references", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
Please read our [Code of Conduct](CODE_OF_CONDUCT.md) before contributing.
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const conductCheck =
data.healthReport.components.communityHealth.details.find(
(d: any) => d.check === "Code of Conduct linked",
);
expect(conductCheck.passed).toBe(true);
expect(conductCheck.points).toBe(5);
});
test("should detect contributing guidelines", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution guidelines.
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const contributingCheck =
data.healthReport.components.communityHealth.details.find(
(d: any) => d.check === "Contributing guidelines visible",
);
expect(contributingCheck.passed).toBe(true);
});
test("should detect security policy references", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
Report security issues via our [Security Policy](SECURITY.md).
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const securityCheck =
data.healthReport.components.communityHealth.details.find(
(d: any) => d.check === "Security policy linked",
);
expect(securityCheck.passed).toBe(true);
});
test("should detect support channels", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
Join our Discord community for support and discussions.
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const supportCheck =
data.healthReport.components.communityHealth.details.find(
(d: any) => d.check === "Support channels provided",
);
expect(supportCheck.passed).toBe(true);
});
});
describe("Accessibility Evaluation", () => {
test("should detect proper spacing and structure", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
## Description
This is a well-structured README with proper spacing.
## Installation
Instructions here.
## Usage
Usage examples here.
## Contributing
Contributing guidelines.
## License
MIT License
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const spacingCheck =
data.healthReport.components.accessibility.details.find(
(d: any) => d.check === "Scannable structure with proper spacing",
);
expect(spacingCheck.passed).toBe(true);
});
test("should detect heading hierarchy", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Main Title
## Section 1
### Subsection 1.1
## Section 2
### Subsection 2.1
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const headingCheck =
data.healthReport.components.accessibility.details.find(
(d: any) => d.check === "Clear heading hierarchy",
);
expect(headingCheck.passed).toBe(true);
});
test("should detect images with alt text", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project


`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const altTextCheck =
data.healthReport.components.accessibility.details.find(
(d: any) => d.check === "Alt text for images",
);
expect(altTextCheck.passed).toBe(true);
});
test("should detect images without alt text", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project

`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const altTextCheck =
data.healthReport.components.accessibility.details.find(
(d: any) => d.check === "Alt text for images",
);
expect(altTextCheck.passed).toBe(false);
});
test("should detect inclusive language violations", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
Hey guys, this project uses a master branch and maintains a whitelist of contributors.
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const inclusiveCheck =
data.healthReport.components.accessibility.details.find(
(d: any) => d.check === "Inclusive language",
);
expect(inclusiveCheck.passed).toBe(false);
});
test("should pass inclusive language check with good content", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
Welcome team! This project uses the main branch and maintains an allowlist of contributors.
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const inclusiveCheck =
data.healthReport.components.accessibility.details.find(
(d: any) => d.check === "Inclusive language",
);
expect(inclusiveCheck.passed).toBe(true);
});
});
describe("Onboarding Evaluation", () => {
test("should detect quick start sections", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
## Quick Start
Get up and running in minutes!
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const quickStartCheck =
data.healthReport.components.onboarding.details.find(
(d: any) => d.check === "Quick start section",
);
expect(quickStartCheck.passed).toBe(true);
});
test("should detect prerequisites", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
## Prerequisites
- Node.js 16+
- npm or yarn
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const prereqCheck = data.healthReport.components.onboarding.details.find(
(d: any) => d.check === "Prerequisites clearly listed",
);
expect(prereqCheck.passed).toBe(true);
});
test("should detect first contribution guidance", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
## For New Contributors
Welcome first-time contributors! Here's how to get started.
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const firstContribCheck =
data.healthReport.components.onboarding.details.find(
(d: any) => d.check === "First contribution guide",
);
expect(firstContribCheck.passed).toBe(true);
});
test("should detect good first issues", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
Check out our good first issues for beginners!
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const goodFirstCheck =
data.healthReport.components.onboarding.details.find(
(d: any) => d.check === "Good first issues mentioned",
);
expect(goodFirstCheck.passed).toBe(true);
});
});
describe("Content Quality Evaluation", () => {
test("should evaluate adequate content length", async () => {
const readmePath = join(testDir, "README.md");
const content =
"# Project\n\n" +
"This is a well-sized README with adequate content. ".repeat(20);
await writeFile(readmePath, content);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const lengthCheck =
data.healthReport.components.contentQuality.details.find(
(d: any) => d.check === "Adequate content length",
);
expect(lengthCheck.passed).toBe(true);
});
test("should detect insufficient content length", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Project\n\nToo short.");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const lengthCheck =
data.healthReport.components.contentQuality.details.find(
(d: any) => d.check === "Adequate content length",
);
expect(lengthCheck.passed).toBe(false);
});
test("should detect code examples", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
## Installation
\`\`\`bash
npm install project
\`\`\`
## Usage
\`\`\`javascript
const project = require('project');
project.run();
\`\`\`
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const codeCheck =
data.healthReport.components.contentQuality.details.find(
(d: any) => d.check === "Code examples provided",
);
expect(codeCheck.passed).toBe(true);
});
test("should detect external links", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
Check out our [documentation](https://docs.example.com),
[demo](https://demo.example.com), and [related project](https://github.com/example/related).
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const linksCheck =
data.healthReport.components.contentQuality.details.find(
(d: any) => d.check === "External links present",
);
expect(linksCheck.passed).toBe(true);
});
test("should evaluate project description clarity", async () => {
const readmePath = join(testDir, "README.md");
const longContent = `# Project
## Description
This is a comprehensive project description that provides detailed information about what the project does, how it works, and why it's useful. The description is long enough and well-structured to meet the clarity requirements. This content needs to be over 500 characters to pass the clarity check, so I'm adding more detailed information about the project features, installation process, usage examples, and comprehensive documentation that explains all aspects of the project in great detail.
## Features
- Feature 1: Advanced functionality
- Feature 2: Enhanced performance
- Feature 3: User-friendly interface
## Installation
Detailed installation instructions here with step-by-step guidance.
## Usage
Comprehensive usage examples and documentation with code samples.
`;
await writeFile(readmePath, longContent);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
const clarityCheck =
data.healthReport.components.contentQuality.details.find(
(d: any) => d.check === "Project description clarity",
);
expect(clarityCheck.passed).toBe(true);
});
});
describe("Grade Calculation", () => {
test("should assign grade A for 90%+ score", async () => {
const readmePath = join(testDir, "README.md");
// Create comprehensive README that should score high
await writeFile(
readmePath,
`# Excellent Project
## Table of Contents
- [Description](#description)
- [Installation](#installation)
- [Usage](#usage)
## Description
This is a comprehensive project with excellent documentation. It includes all necessary sections and follows best practices for community health, accessibility, onboarding, and content quality.
## Quick Start
Get started in minutes with our simple installation process.
## Prerequisites
- Node.js 16+
- npm or yarn
## Installation
\`\`\`bash
npm install excellent-project
\`\`\`
## Usage
\`\`\`javascript
const project = require('excellent-project');
project.start();
\`\`\`
## Contributing
Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct and the process for submitting pull requests.
## First Contribution
New contributors welcome! Check out our good first issues for beginners.
## Support
Join our Discord community for help and discussions.
## Security
Report security issues via our [Security Policy](SECURITY.md).
## Links
- [Documentation](https://docs.example.com)
- [Demo](https://demo.example.com)
- [API Reference](https://api.example.com)
- [GitHub Issues](https://github.com/example/issues)
## License
MIT License
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
expect(data.healthReport.overallScore).toBeGreaterThanOrEqual(90);
expect(data.healthReport.grade).toBe("A");
});
test("should assign grade F for very low scores", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Bad\n\nMinimal.");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
expect(data.healthReport.overallScore).toBeLessThan(60);
expect(data.healthReport.grade).toBe("F");
});
});
describe("Recommendations and Critical Issues", () => {
test("should identify critical issues for low-scoring components", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Minimal Project\n\nVery basic content.");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
expect(data.healthReport.criticalIssues.length).toBeGreaterThan(0);
expect(
data.healthReport.criticalIssues.some((issue: string) =>
issue.includes("Critical:"),
),
).toBe(true);
});
test("should generate appropriate recommendations", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
"# Project\n\nBasic project with minimal content that will fail most health checks.",
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
// Should have recommendations since most checks will fail with minimal content
expect(data.healthReport.recommendations.length).toBeGreaterThan(0);
expect(data.healthReport.recommendations.length).toBeLessThanOrEqual(10);
});
test("should identify strengths in well-structured components", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Project
## Description
This project has good content quality with proper structure and adequate length.
## Installation
\`\`\`bash
npm install
\`\`\`
## Usage
\`\`\`javascript
const app = require('./app');
app.start();
\`\`\`
## Links
- [Docs](https://example.com)
- [Demo](https://demo.com)
- [API](https://api.com)
- [Support](https://support.com)
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
expect(data.healthReport.strengths.length).toBeGreaterThan(0);
});
});
describe("Time Estimation", () => {
test("should estimate time in minutes for quick fixes", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
`# Excellent Project
## Table of Contents
- [Description](#description)
- [Installation](#installation)
## Description
This is a comprehensive project with excellent documentation. It includes all necessary sections and follows best practices for community health, accessibility, onboarding, and content quality.
## Quick Start
Get started in minutes with our simple installation process.
## Prerequisites
- Node.js 16+
- npm or yarn
## Installation
\`\`\`bash
npm install excellent-project
\`\`\`
## Usage
\`\`\`javascript
const project = require('excellent-project');
project.start();
\`\`\`
## Contributing
Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details.
## First Contribution
New contributors welcome! Check out our good first issues for beginners.
## Support
Join our Discord community for help and discussions.
## Security
Report security issues via our [Security Policy](SECURITY.md).
## Links
- [Documentation](https://docs.example.com)
- [Demo](https://demo.example.com)
- [API Reference](https://api.example.com)
## License
MIT License
`,
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
// Should have very few recommendations, resulting in minutes
expect(data.healthReport.estimatedImprovementTime).toMatch(/\d+ minutes/);
});
test("should estimate time in hours for moderate improvements", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
"# Project\n\nBasic project needing improvements.",
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("healthReport"),
);
const data = JSON.parse(dataContent!.text);
// Should have enough recommendations to warrant hours
expect(data.healthReport.estimatedImprovementTime).toMatch(/\d+ hours?/);
});
});
describe("Next Steps Generation", () => {
test("should prioritize critical issues in next steps", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Minimal\n\nBad.");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("nextSteps"),
);
const data = JSON.parse(dataContent!.text);
expect(
data.nextSteps.some((step: string) => step.includes("critical issues")),
).toBe(true);
});
test("should suggest targeting 85+ score for low-scoring READMEs", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Project\n\nNeeds improvement.");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("nextSteps"),
);
const data = JSON.parse(dataContent!.text);
expect(
data.nextSteps.some((step: string) =>
step.includes("85+ health score"),
),
).toBe(true);
});
test("should always include re-evaluation step", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(readmePath, "# Any Project");
const result = await evaluateReadmeHealth({
readme_path: readmePath,
});
const dataContent = result.content.find((c) =>
c.text.includes("nextSteps"),
);
const data = JSON.parse(dataContent!.text);
expect(
data.nextSteps.some((step: string) => step.includes("Re-evaluate")),
).toBe(true);
});
});
describe("Project Type Variations", () => {
test("should handle enterprise_tool project type", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
"# Enterprise Tool\n\nProfessional enterprise solution.",
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
project_type: "enterprise_tool",
});
expect(result.isError).toBe(false);
const dataContent = result.content.find((c) =>
c.text.includes("projectType"),
);
const data = JSON.parse(dataContent!.text);
expect(data.projectType).toBe("enterprise_tool");
});
test("should handle personal_project project type", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
"# Personal Project\n\nMy personal coding project.",
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
project_type: "personal_project",
});
expect(result.isError).toBe(false);
const dataContent = result.content.find((c) =>
c.text.includes("projectType"),
);
const data = JSON.parse(dataContent!.text);
expect(data.projectType).toBe("personal_project");
});
test("should handle documentation project type", async () => {
const readmePath = join(testDir, "README.md");
await writeFile(
readmePath,
"# Documentation Project\n\nComprehensive documentation.",
);
const result = await evaluateReadmeHealth({
readme_path: readmePath,
project_type: "documentation",
});
expect(result.isError).toBe(false);
const dataContent = result.content.find((c) =>
c.text.includes("projectType"),
);
const data = JSON.parse(dataContent!.text);
expect(data.projectType).toBe("documentation");
});
});
});
```