#
tokens: 49448/50000 9/274 files (page 10/29)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 10 of 29. Use http://codebase.md/tosin2013/documcp?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .dockerignore
├── .eslintignore
├── .eslintrc.json
├── .github
│   ├── agents
│   │   ├── documcp-ast.md
│   │   ├── documcp-deploy.md
│   │   ├── documcp-memory.md
│   │   ├── documcp-test.md
│   │   └── documcp-tool.md
│   ├── copilot-instructions.md
│   ├── dependabot.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── automated-changelog.md
│   │   ├── bug_report.md
│   │   ├── bug_report.yml
│   │   ├── documentation_issue.md
│   │   ├── feature_request.md
│   │   ├── feature_request.yml
│   │   ├── npm-publishing-fix.md
│   │   └── release_improvements.md
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-drafter.yml
│   └── workflows
│       ├── auto-merge.yml
│       ├── ci.yml
│       ├── codeql.yml
│       ├── dependency-review.yml
│       ├── deploy-docs.yml
│       ├── README.md
│       ├── release-drafter.yml
│       └── release.yml
├── .gitignore
├── .husky
│   ├── commit-msg
│   └── pre-commit
├── .linkcheck.config.json
├── .markdown-link-check.json
├── .nvmrc
├── .pre-commit-config.yaml
├── .versionrc.json
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── docker-compose.docs.yml
├── Dockerfile.docs
├── docs
│   ├── .docusaurus
│   │   ├── docusaurus-plugin-content-docs
│   │   │   └── default
│   │   │       └── __mdx-loader-dependency.json
│   │   └── docusaurus-plugin-content-pages
│   │       └── default
│   │           └── __plugin.json
│   ├── adrs
│   │   ├── 001-mcp-server-architecture.md
│   │   ├── 002-repository-analysis-engine.md
│   │   ├── 003-static-site-generator-recommendation-engine.md
│   │   ├── 004-diataxis-framework-integration.md
│   │   ├── 005-github-pages-deployment-automation.md
│   │   ├── 006-mcp-tools-api-design.md
│   │   ├── 007-mcp-prompts-and-resources-integration.md
│   │   ├── 008-intelligent-content-population-engine.md
│   │   ├── 009-content-accuracy-validation-framework.md
│   │   ├── 010-mcp-resource-pattern-redesign.md
│   │   └── README.md
│   ├── api
│   │   ├── .nojekyll
│   │   ├── assets
│   │   │   ├── hierarchy.js
│   │   │   ├── highlight.css
│   │   │   ├── icons.js
│   │   │   ├── icons.svg
│   │   │   ├── main.js
│   │   │   ├── navigation.js
│   │   │   ├── search.js
│   │   │   └── style.css
│   │   ├── hierarchy.html
│   │   ├── index.html
│   │   ├── modules.html
│   │   └── variables
│   │       └── TOOLS.html
│   ├── assets
│   │   └── logo.svg
│   ├── development
│   │   └── MCP_INSPECTOR_TESTING.md
│   ├── docusaurus.config.js
│   ├── explanation
│   │   ├── architecture.md
│   │   └── index.md
│   ├── guides
│   │   ├── link-validation.md
│   │   ├── playwright-integration.md
│   │   └── playwright-testing-workflow.md
│   ├── how-to
│   │   ├── analytics-setup.md
│   │   ├── custom-domains.md
│   │   ├── documentation-freshness-tracking.md
│   │   ├── github-pages-deployment.md
│   │   ├── index.md
│   │   ├── local-testing.md
│   │   ├── performance-optimization.md
│   │   ├── prompting-guide.md
│   │   ├── repository-analysis.md
│   │   ├── seo-optimization.md
│   │   ├── site-monitoring.md
│   │   ├── troubleshooting.md
│   │   └── usage-examples.md
│   ├── index.md
│   ├── knowledge-graph.md
│   ├── package-lock.json
│   ├── package.json
│   ├── phase-2-intelligence.md
│   ├── reference
│   │   ├── api-overview.md
│   │   ├── cli.md
│   │   ├── configuration.md
│   │   ├── deploy-pages.md
│   │   ├── index.md
│   │   ├── mcp-tools.md
│   │   └── prompt-templates.md
│   ├── research
│   │   ├── cross-domain-integration
│   │   │   └── README.md
│   │   ├── domain-1-mcp-architecture
│   │   │   ├── index.md
│   │   │   └── mcp-performance-research.md
│   │   ├── domain-2-repository-analysis
│   │   │   └── README.md
│   │   ├── domain-3-ssg-recommendation
│   │   │   ├── index.md
│   │   │   └── ssg-performance-analysis.md
│   │   ├── domain-4-diataxis-integration
│   │   │   └── README.md
│   │   ├── domain-5-github-deployment
│   │   │   ├── github-pages-security-analysis.md
│   │   │   └── index.md
│   │   ├── domain-6-api-design
│   │   │   └── README.md
│   │   ├── README.md
│   │   ├── research-integration-summary-2025-01-14.md
│   │   ├── research-progress-template.md
│   │   └── research-questions-2025-01-14.md
│   ├── robots.txt
│   ├── sidebars.js
│   ├── sitemap.xml
│   ├── src
│   │   └── css
│   │       └── custom.css
│   └── tutorials
│       ├── development-setup.md
│       ├── environment-setup.md
│       ├── first-deployment.md
│       ├── getting-started.md
│       ├── index.md
│       ├── memory-workflows.md
│       └── user-onboarding.md
├── jest.config.js
├── LICENSE
├── Makefile
├── MCP_PHASE2_IMPLEMENTATION.md
├── mcp-config-example.json
├── mcp.json
├── package-lock.json
├── package.json
├── README.md
├── release.sh
├── scripts
│   └── check-package-structure.cjs
├── SECURITY.md
├── setup-precommit.sh
├── src
│   ├── benchmarks
│   │   └── performance.ts
│   ├── index.ts
│   ├── memory
│   │   ├── contextual-retrieval.ts
│   │   ├── deployment-analytics.ts
│   │   ├── enhanced-manager.ts
│   │   ├── export-import.ts
│   │   ├── freshness-kg-integration.ts
│   │   ├── index.ts
│   │   ├── integration.ts
│   │   ├── kg-code-integration.ts
│   │   ├── kg-health.ts
│   │   ├── kg-integration.ts
│   │   ├── kg-link-validator.ts
│   │   ├── kg-storage.ts
│   │   ├── knowledge-graph.ts
│   │   ├── learning.ts
│   │   ├── manager.ts
│   │   ├── multi-agent-sharing.ts
│   │   ├── pruning.ts
│   │   ├── schemas.ts
│   │   ├── storage.ts
│   │   ├── temporal-analysis.ts
│   │   ├── user-preferences.ts
│   │   └── visualization.ts
│   ├── prompts
│   │   └── technical-writer-prompts.ts
│   ├── scripts
│   │   └── benchmark.ts
│   ├── templates
│   │   └── playwright
│   │       ├── accessibility.spec.template.ts
│   │       ├── Dockerfile.template
│   │       ├── docs-e2e.workflow.template.yml
│   │       ├── link-validation.spec.template.ts
│   │       └── playwright.config.template.ts
│   ├── tools
│   │   ├── analyze-deployments.ts
│   │   ├── analyze-readme.ts
│   │   ├── analyze-repository.ts
│   │   ├── check-documentation-links.ts
│   │   ├── deploy-pages.ts
│   │   ├── detect-gaps.ts
│   │   ├── evaluate-readme-health.ts
│   │   ├── generate-config.ts
│   │   ├── generate-contextual-content.ts
│   │   ├── generate-llm-context.ts
│   │   ├── generate-readme-template.ts
│   │   ├── generate-technical-writer-prompts.ts
│   │   ├── kg-health-check.ts
│   │   ├── manage-preferences.ts
│   │   ├── manage-sitemap.ts
│   │   ├── optimize-readme.ts
│   │   ├── populate-content.ts
│   │   ├── readme-best-practices.ts
│   │   ├── recommend-ssg.ts
│   │   ├── setup-playwright-tests.ts
│   │   ├── setup-structure.ts
│   │   ├── sync-code-to-docs.ts
│   │   ├── test-local-deployment.ts
│   │   ├── track-documentation-freshness.ts
│   │   ├── update-existing-documentation.ts
│   │   ├── validate-content.ts
│   │   ├── validate-documentation-freshness.ts
│   │   ├── validate-readme-checklist.ts
│   │   └── verify-deployment.ts
│   ├── types
│   │   └── api.ts
│   ├── utils
│   │   ├── ast-analyzer.ts
│   │   ├── code-scanner.ts
│   │   ├── content-extractor.ts
│   │   ├── drift-detector.ts
│   │   ├── freshness-tracker.ts
│   │   ├── language-parsers-simple.ts
│   │   ├── permission-checker.ts
│   │   └── sitemap-generator.ts
│   └── workflows
│       └── documentation-workflow.ts
├── test-docs-local.sh
├── tests
│   ├── api
│   │   └── mcp-responses.test.ts
│   ├── benchmarks
│   │   └── performance.test.ts
│   ├── edge-cases
│   │   └── error-handling.test.ts
│   ├── functional
│   │   └── tools.test.ts
│   ├── integration
│   │   ├── kg-documentation-workflow.test.ts
│   │   ├── knowledge-graph-workflow.test.ts
│   │   ├── mcp-readme-tools.test.ts
│   │   ├── memory-mcp-tools.test.ts
│   │   ├── readme-technical-writer.test.ts
│   │   └── workflow.test.ts
│   ├── memory
│   │   ├── contextual-retrieval.test.ts
│   │   ├── enhanced-manager.test.ts
│   │   ├── export-import.test.ts
│   │   ├── freshness-kg-integration.test.ts
│   │   ├── kg-code-integration.test.ts
│   │   ├── kg-health.test.ts
│   │   ├── kg-link-validator.test.ts
│   │   ├── kg-storage-validation.test.ts
│   │   ├── kg-storage.test.ts
│   │   ├── knowledge-graph-enhanced.test.ts
│   │   ├── knowledge-graph.test.ts
│   │   ├── learning.test.ts
│   │   ├── manager-advanced.test.ts
│   │   ├── manager.test.ts
│   │   ├── mcp-resource-integration.test.ts
│   │   ├── mcp-tool-persistence.test.ts
│   │   ├── schemas.test.ts
│   │   ├── storage.test.ts
│   │   ├── temporal-analysis.test.ts
│   │   └── user-preferences.test.ts
│   ├── performance
│   │   ├── memory-load-testing.test.ts
│   │   └── memory-stress-testing.test.ts
│   ├── prompts
│   │   ├── guided-workflow-prompts.test.ts
│   │   └── technical-writer-prompts.test.ts
│   ├── server.test.ts
│   ├── setup.ts
│   ├── tools
│   │   ├── all-tools.test.ts
│   │   ├── analyze-coverage.test.ts
│   │   ├── analyze-deployments.test.ts
│   │   ├── analyze-readme.test.ts
│   │   ├── analyze-repository.test.ts
│   │   ├── check-documentation-links.test.ts
│   │   ├── deploy-pages-kg-retrieval.test.ts
│   │   ├── deploy-pages-tracking.test.ts
│   │   ├── deploy-pages.test.ts
│   │   ├── detect-gaps.test.ts
│   │   ├── evaluate-readme-health.test.ts
│   │   ├── generate-contextual-content.test.ts
│   │   ├── generate-llm-context.test.ts
│   │   ├── generate-readme-template.test.ts
│   │   ├── generate-technical-writer-prompts.test.ts
│   │   ├── kg-health-check.test.ts
│   │   ├── manage-sitemap.test.ts
│   │   ├── optimize-readme.test.ts
│   │   ├── readme-best-practices.test.ts
│   │   ├── recommend-ssg-historical.test.ts
│   │   ├── recommend-ssg-preferences.test.ts
│   │   ├── recommend-ssg.test.ts
│   │   ├── simple-coverage.test.ts
│   │   ├── sync-code-to-docs.test.ts
│   │   ├── test-local-deployment.test.ts
│   │   ├── tool-error-handling.test.ts
│   │   ├── track-documentation-freshness.test.ts
│   │   ├── validate-content.test.ts
│   │   ├── validate-documentation-freshness.test.ts
│   │   └── validate-readme-checklist.test.ts
│   ├── types
│   │   └── type-safety.test.ts
│   └── utils
│       ├── ast-analyzer.test.ts
│       ├── content-extractor.test.ts
│       ├── drift-detector.test.ts
│       ├── freshness-tracker.test.ts
│       └── sitemap-generator.test.ts
├── tsconfig.json
└── typedoc.json
```

# Files

--------------------------------------------------------------------------------
/src/tools/optimize-readme.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { z } from "zod";
  2 | import { promises as fs } from "fs";
  3 | import path from "path";
  4 | import { MCPToolResponse } from "../types/api.js";
  5 | 
  6 | // Input validation schema
  7 | const OptimizeReadmeInputSchema = z.object({
  8 |   readme_path: z.string().min(1, "README path is required"),
  9 |   strategy: z
 10 |     .enum([
 11 |       "community_focused",
 12 |       "enterprise_focused",
 13 |       "developer_focused",
 14 |       "general",
 15 |     ])
 16 |     .optional()
 17 |     .default("community_focused"),
 18 |   max_length: z.number().min(50).max(1000).optional().default(300),
 19 |   include_tldr: z.boolean().optional().default(true),
 20 |   preserve_existing: z.boolean().optional().default(false),
 21 |   output_path: z.string().optional(),
 22 |   create_docs_directory: z.boolean().optional().default(true),
 23 | });
 24 | 
 25 | export type OptimizeReadmeInput = z.infer<typeof OptimizeReadmeInputSchema>;
 26 | 
 27 | interface OptimizationResult {
 28 |   originalLength: number;
 29 |   optimizedLength: number;
 30 |   reductionPercentage: number;
 31 |   optimizedContent: string;
 32 |   extractedSections: ExtractedSection[];
 33 |   tldrGenerated: string | null;
 34 |   restructuringChanges: RestructuringChange[];
 35 |   recommendations: string[];
 36 | }
 37 | 
 38 | interface ExtractedSection {
 39 |   title: string;
 40 |   content: string;
 41 |   suggestedLocation: string;
 42 |   reason: string;
 43 | }
 44 | 
 45 | interface RestructuringChange {
 46 |   type: "moved" | "condensed" | "split" | "added" | "removed";
 47 |   section: string;
 48 |   description: string;
 49 |   impact: string;
 50 | }
 51 | 
 52 | /**
 53 |  * Optimizes README content by restructuring, condensing, and extracting detailed sections.
 54 |  *
 55 |  * Performs intelligent README optimization including length reduction, structure improvement,
 56 |  * content extraction to separate documentation, and TL;DR generation. Uses different strategies
 57 |  * based on target audience (community, enterprise, developer, general) to maximize effectiveness.
 58 |  *
 59 |  * @param input - The input parameters for README optimization
 60 |  * @param input.readme_path - The file system path to the README file to optimize
 61 |  * @param input.strategy - The optimization strategy to apply (default: "community_focused")
 62 |  * @param input.max_length - Target maximum length in lines (default: 300)
 63 |  * @param input.include_tldr - Whether to generate a TL;DR section (default: true)
 64 |  * @param input.preserve_existing - Whether to preserve existing content structure (default: false)
 65 |  * @param input.output_path - Optional output path for optimized README
 66 |  * @param input.create_docs_directory - Whether to create docs/ directory for extracted content (default: true)
 67 |  *
 68 |  * @returns Promise resolving to README optimization results
 69 |  * @returns optimization - Complete optimization results including length reduction and restructuring
 70 |  * @returns nextSteps - Array of recommended next actions after optimization
 71 |  *
 72 |  * @throws {Error} When README file is inaccessible or invalid
 73 |  * @throws {Error} When optimization processing fails
 74 |  * @throws {Error} When output directory cannot be created
 75 |  *
 76 |  * @example
 77 |  * ```typescript
 78 |  * // Optimize README for community contributors
 79 |  * const result = await optimizeReadme({
 80 |  *   readme_path: "./README.md",
 81 |  *   strategy: "community_focused",
 82 |  *   max_length: 300,
 83 |  *   include_tldr: true
 84 |  * });
 85 |  *
 86 |  * console.log(`Reduced from ${result.data.optimization.originalLength} to ${result.data.optimization.optimizedLength} lines`);
 87 |  * console.log(`Reduction: ${result.data.optimization.reductionPercentage}%`);
 88 |  *
 89 |  * // Optimize for enterprise with aggressive reduction
 90 |  * const enterprise = await optimizeReadme({
 91 |  *   readme_path: "./README.md",
 92 |  *   strategy: "enterprise_focused",
 93 |  *   max_length: 200,
 94 |  *   preserve_existing: true
 95 |  * });
 96 |  * ```
 97 |  *
 98 |  * @since 1.0.0
 99 |  */
100 | export async function optimizeReadme(
101 |   input: Partial<OptimizeReadmeInput>,
102 | ): Promise<
103 |   MCPToolResponse<{ optimization: OptimizationResult; nextSteps: string[] }>
104 | > {
105 |   const startTime = Date.now();
106 | 
107 |   try {
108 |     // Validate input
109 |     const validatedInput = OptimizeReadmeInputSchema.parse(input);
110 |     const {
111 |       readme_path,
112 |       strategy,
113 |       max_length,
114 |       include_tldr,
115 |       output_path,
116 |       create_docs_directory,
117 |     } = validatedInput;
118 | 
119 |     // Read original README
120 |     const originalContent = await fs.readFile(readme_path, "utf-8");
121 |     const originalLength = originalContent.split("\n").length;
122 | 
123 |     // Parse README structure
124 |     const sections = parseReadmeStructure(originalContent);
125 | 
126 |     // Generate TL;DR if requested
127 |     const tldrGenerated = include_tldr
128 |       ? generateTldr(originalContent, sections)
129 |       : null;
130 | 
131 |     // Identify sections to extract
132 |     const extractedSections = identifySectionsToExtract(
133 |       sections,
134 |       strategy,
135 |       max_length,
136 |     );
137 | 
138 |     // Create basic optimization result
139 |     const optimizedContent =
140 |       originalContent +
141 |       "\n\n## TL;DR\n\n" +
142 |       (tldrGenerated || "Quick overview of the project.");
143 |     const restructuringChanges = [
144 |       {
145 |         type: "added" as const,
146 |         section: "TL;DR",
147 |         description: "Added concise project overview",
148 |         impact: "Helps users quickly understand project value",
149 |       },
150 |     ];
151 | 
152 |     const optimizedLength = optimizedContent.split("\n").length;
153 |     const reductionPercentage = Math.round(
154 |       ((originalLength - optimizedLength) / originalLength) * 100,
155 |     );
156 | 
157 |     // Create docs directory and extract detailed content if requested
158 |     if (create_docs_directory && extractedSections.length > 0) {
159 |       await createDocsStructure(path.dirname(readme_path), extractedSections);
160 |     }
161 | 
162 |     // Write optimized README if output path specified
163 |     if (output_path) {
164 |       await fs.writeFile(output_path, optimizedContent, "utf-8");
165 |     }
166 | 
167 |     const recommendations = generateOptimizationRecommendations(
168 |       originalLength,
169 |       optimizedLength,
170 |       extractedSections,
171 |       strategy,
172 |     );
173 | 
174 |     const optimization: OptimizationResult = {
175 |       originalLength,
176 |       optimizedLength,
177 |       reductionPercentage,
178 |       optimizedContent,
179 |       extractedSections,
180 |       tldrGenerated,
181 |       restructuringChanges,
182 |       recommendations,
183 |     };
184 | 
185 |     const nextSteps = generateOptimizationNextSteps(
186 |       optimization,
187 |       validatedInput,
188 |     );
189 | 
190 |     return {
191 |       success: true,
192 |       data: {
193 |         optimization,
194 |         nextSteps,
195 |       },
196 |       metadata: {
197 |         toolVersion: "1.0.0",
198 |         executionTime: Date.now() - startTime,
199 |         timestamp: new Date().toISOString(),
200 |       },
201 |     };
202 |   } catch (error) {
203 |     return {
204 |       success: false,
205 |       error: {
206 |         code: "OPTIMIZATION_FAILED",
207 |         message: "Failed to optimize README",
208 |         details: error instanceof Error ? error.message : "Unknown error",
209 |         resolution: "Check README file path and permissions",
210 |       },
211 |       metadata: {
212 |         toolVersion: "1.0.0",
213 |         executionTime: Date.now() - startTime,
214 |         timestamp: new Date().toISOString(),
215 |       },
216 |     };
217 |   }
218 | }
219 | 
220 | interface ReadmeSection {
221 |   title: string;
222 |   content: string;
223 |   level: number;
224 |   startLine: number;
225 |   endLine: number;
226 |   wordCount: number;
227 |   isEssential: boolean;
228 | }
229 | 
230 | function parseReadmeStructure(content: string): ReadmeSection[] {
231 |   const lines = content.split("\n");
232 |   const sections: ReadmeSection[] = [];
233 |   let currentTitle = "";
234 |   let currentLevel = 0;
235 |   let currentStartLine = 0;
236 | 
237 |   lines.forEach((line, index) => {
238 |     const headingMatch = line.match(/^(#{1,6})\s+(.+)$/);
239 | 
240 |     if (headingMatch) {
241 |       // Save previous section
242 |       if (currentTitle) {
243 |         const endLine = index - 1;
244 |         const sectionContent = lines
245 |           .slice(currentStartLine, endLine + 1)
246 |           .join("\n");
247 |         const wordCount = sectionContent.split(/\s+/).length;
248 |         const isEssential = isEssentialSection(currentTitle);
249 | 
250 |         sections.push({
251 |           title: currentTitle,
252 |           content: sectionContent,
253 |           level: currentLevel,
254 |           startLine: currentStartLine,
255 |           endLine: endLine,
256 |           wordCount: wordCount,
257 |           isEssential: isEssential,
258 |         });
259 |       }
260 | 
261 |       // Start new section
262 |       currentTitle = headingMatch[2].trim();
263 |       currentLevel = headingMatch[1].length;
264 |       currentStartLine = index;
265 |     }
266 |   });
267 | 
268 |   // Add final section
269 |   if (currentTitle) {
270 |     const endLine = lines.length - 1;
271 |     const sectionContent = lines
272 |       .slice(currentStartLine, endLine + 1)
273 |       .join("\n");
274 |     const wordCount = sectionContent.split(/\s+/).length;
275 |     const isEssential = isEssentialSection(currentTitle);
276 | 
277 |     sections.push({
278 |       title: currentTitle,
279 |       content: sectionContent,
280 |       level: currentLevel,
281 |       startLine: currentStartLine,
282 |       endLine: endLine,
283 |       wordCount: wordCount,
284 |       isEssential: isEssential,
285 |     });
286 |   }
287 | 
288 |   return sections;
289 | }
290 | 
291 | function isEssentialSection(title: string): boolean {
292 |   const essentialKeywords = [
293 |     "installation",
294 |     "install",
295 |     "setup",
296 |     "getting started",
297 |     "quick start",
298 |     "usage",
299 |     "example",
300 |     "api",
301 |     "license",
302 |     "contributing",
303 |   ];
304 | 
305 |   return essentialKeywords.some((keyword) =>
306 |     title.toLowerCase().includes(keyword),
307 |   );
308 | }
309 | 
310 | function generateTldr(content: string, sections: ReadmeSection[]): string {
311 |   // Extract project name from first heading
312 |   const projectNameMatch = content.match(/^#\s+(.+)$/m);
313 |   const projectName = projectNameMatch ? projectNameMatch[1] : "This project";
314 | 
315 |   // Extract description (usually after title or in blockquote)
316 |   const descriptionMatch = content.match(/>\s*(.+)/);
317 |   let description = descriptionMatch ? descriptionMatch[1] : "";
318 | 
319 |   // If no description found, try to extract from first paragraph
320 |   if (!description) {
321 |     const firstParagraphMatch = content.match(/^[^#\n].{20,200}/m);
322 |     description = firstParagraphMatch
323 |       ? firstParagraphMatch[0].substring(0, 100) + "..."
324 |       : "";
325 |   }
326 | 
327 |   // Identify key features or use cases
328 |   const features: string[] = [];
329 |   sections.forEach((section) => {
330 |     if (
331 |       section.title.toLowerCase().includes("feature") ||
332 |       section.title.toLowerCase().includes("what") ||
333 |       section.title.toLowerCase().includes("why")
334 |     ) {
335 |       const bullets = section.content.match(/^\s*[-*+]\s+(.+)$/gm);
336 |       if (bullets && bullets.length > 0) {
337 |         features.push(
338 |           ...bullets
339 |             .slice(0, 3)
340 |             .map((b) => b.replace(/^\s*[-*+]\s+/, "").trim()),
341 |         );
342 |       }
343 |     }
344 |   });
345 | 
346 |   let tldr = `## TL;DR\n\n${projectName} ${description}\n\n`;
347 | 
348 |   if (features.length > 0) {
349 |     tldr += `**Key features:**\n`;
350 |     features.slice(0, 3).forEach((feature) => {
351 |       tldr += `- ${feature}\n`;
352 |     });
353 |     tldr += "\n";
354 |   }
355 | 
356 |   // Add quick start reference
357 |   const hasInstallSection = sections.some(
358 |     (s) =>
359 |       s.title.toLowerCase().includes("install") ||
360 |       s.title.toLowerCase().includes("setup"),
361 |   );
362 | 
363 |   if (hasInstallSection) {
364 |     tldr += `**Quick start:** See [Installation](#installation) → [Usage](#usage)\n\n`;
365 |   }
366 | 
367 |   return tldr;
368 | }
369 | 
370 | function identifySectionsToExtract(
371 |   sections: ReadmeSection[],
372 |   strategy: string,
373 |   maxLength: number,
374 | ): ExtractedSection[] {
375 |   const extractedSections: ExtractedSection[] = [];
376 |   const currentLength = sections.reduce(
377 |     (sum, s) => sum + s.content.split("\n").length,
378 |     0,
379 |   );
380 | 
381 |   if (currentLength <= maxLength) {
382 |     return extractedSections; // No extraction needed
383 |   }
384 | 
385 |   // Define extraction rules based on strategy
386 |   const extractionRules = getExtractionRules(strategy);
387 | 
388 |   sections.forEach((section) => {
389 |     for (const rule of extractionRules) {
390 |       if (rule.matcher(section) && !section.isEssential) {
391 |         extractedSections.push({
392 |           title: section.title,
393 |           content: section.content,
394 |           suggestedLocation: rule.suggestedLocation,
395 |           reason: rule.reason,
396 |         });
397 |         break;
398 |       }
399 |     }
400 |   });
401 | 
402 |   return extractedSections;
403 | }
404 | 
405 | function getExtractionRules(strategy: string) {
406 |   const baseRules = [
407 |     {
408 |       matcher: (section: ReadmeSection) => section.wordCount > 200,
409 |       suggestedLocation: "docs/detailed-guide.md",
410 |       reason: "Section too long for main README",
411 |     },
412 |     {
413 |       matcher: (section: ReadmeSection) =>
414 |         /troubleshoot|faq|common issues|problems/i.test(section.title),
415 |       suggestedLocation: "docs/troubleshooting.md",
416 |       reason: "Troubleshooting content better suited for separate document",
417 |     },
418 |     {
419 |       matcher: (section: ReadmeSection) =>
420 |         /advanced|configuration|config/i.test(section.title),
421 |       suggestedLocation: "docs/configuration.md",
422 |       reason: "Advanced configuration details",
423 |     },
424 |     {
425 |       matcher: (section: ReadmeSection) =>
426 |         /development|developer|build|compile/i.test(section.title),
427 |       suggestedLocation: "docs/development.md",
428 |       reason: "Development-specific information",
429 |     },
430 |   ];
431 | 
432 |   if (strategy === "community_focused") {
433 |     baseRules.push({
434 |       matcher: (section: ReadmeSection) =>
435 |         /architecture|design|technical/i.test(section.title),
436 |       suggestedLocation: "docs/technical.md",
437 |       reason: "Technical details can overwhelm community contributors",
438 |     });
439 |   }
440 | 
441 |   return baseRules;
442 | }
443 | 
444 | async function createDocsStructure(
445 |   projectDir: string,
446 |   extractedSections: ExtractedSection[],
447 | ): Promise<void> {
448 |   const docsDir = path.join(projectDir, "docs");
449 | 
450 |   try {
451 |     await fs.mkdir(docsDir, { recursive: true });
452 |   } catch {
453 |     // Directory might already exist
454 |   }
455 | 
456 |   // Create extracted documentation files
457 |   for (const section of extractedSections) {
458 |     const filePath = path.join(projectDir, section.suggestedLocation);
459 |     const fileDir = path.dirname(filePath);
460 | 
461 |     try {
462 |       await fs.mkdir(fileDir, { recursive: true });
463 |       await fs.writeFile(filePath, section.content, "utf-8");
464 |     } catch (error) {
465 |       console.warn(`Failed to create ${filePath}:`, error);
466 |     }
467 |   }
468 | 
469 |   // Create docs index
470 |   const indexContent = generateDocsIndex(extractedSections);
471 |   await fs.writeFile(path.join(docsDir, "README.md"), indexContent, "utf-8");
472 | }
473 | 
474 | function generateDocsIndex(extractedSections: ExtractedSection[]): string {
475 |   let content = "# Documentation\n\n";
476 |   content +=
477 |     "This directory contains detailed documentation extracted from the main README for better organization.\n\n";
478 | 
479 |   content += "## Available Documentation\n\n";
480 |   extractedSections.forEach((section) => {
481 |     const filename = path.basename(section.suggestedLocation);
482 |     content += `- [${section.title}](${filename}) - ${section.reason}\n`;
483 |   });
484 | 
485 |   return content;
486 | }
487 | 
488 | function generateOptimizationRecommendations(
489 |   originalLength: number,
490 |   optimizedLength: number,
491 |   extractedSections: ExtractedSection[],
492 |   strategy: string,
493 | ): string[] {
494 |   const recommendations: string[] = [];
495 |   const reduction = originalLength - optimizedLength;
496 | 
497 |   if (reduction > 0) {
498 |     recommendations.push(
499 |       `✅ Reduced README length by ${reduction} lines (${Math.round(
500 |         (reduction / originalLength) * 100,
501 |       )}%)`,
502 |     );
503 |   }
504 | 
505 |   if (extractedSections.length > 0) {
506 |     recommendations.push(
507 |       `📁 Moved ${extractedSections.length} detailed sections to docs/ directory`,
508 |     );
509 |   }
510 | 
511 |   if (strategy === "community_focused") {
512 |     recommendations.push(
513 |       "👥 Optimized for community contributors - prioritized quick start and contribution info",
514 |     );
515 |   }
516 | 
517 |   recommendations.push(
518 |     "🔗 Added links to detailed documentation for users who need more information",
519 |   );
520 |   recommendations.push(
521 |     "📊 Consider adding a table of contents for sections with 5+ headings",
522 |   );
523 | 
524 |   return recommendations;
525 | }
526 | 
527 | function generateOptimizationNextSteps(
528 |   optimization: OptimizationResult,
529 |   input: OptimizeReadmeInput,
530 | ): string[] {
531 |   const steps: string[] = [];
532 | 
533 |   if (!input.output_path) {
534 |     steps.push("💾 Review optimized content and save to README.md when ready");
535 |   }
536 | 
537 |   if (optimization.extractedSections.length > 0) {
538 |     steps.push("📝 Review extracted documentation files in docs/ directory");
539 |     steps.push("🔗 Update any internal links that may have been affected");
540 |   }
541 | 
542 |   if (optimization.reductionPercentage > 30) {
543 |     steps.push(
544 |       "👀 Have team members review the condensed content for accuracy",
545 |     );
546 |   }
547 | 
548 |   steps.push("📈 Run analyze_readme again to verify improvements");
549 |   steps.push("🎯 Consider setting up automated README length monitoring");
550 | 
551 |   return steps;
552 | }
553 | 
```

--------------------------------------------------------------------------------
/src/memory/kg-integration.ts:
--------------------------------------------------------------------------------

```typescript
  1 | /**
  2 |  * Knowledge Graph Integration Helper
  3 |  * Implements Phase 1.2: Context-Aware Repository Analysis Integration
  4 |  *
  5 |  * Provides helper functions for integrating the Knowledge Graph
  6 |  * with DocuMCP tools and workflows.
  7 |  */
  8 | 
  9 | import KnowledgeGraph, { GraphNode } from "./knowledge-graph.js";
 10 | import { KGStorage } from "./kg-storage.js";
 11 | import { MemoryManager } from "./manager.js";
 12 | import {
 13 |   createCodeFileEntities,
 14 |   createDocumentationEntities,
 15 |   linkCodeToDocs,
 16 | } from "./kg-code-integration.js";
 17 | 
 18 | let globalKnowledgeGraph: KnowledgeGraph | null = null;
 19 | let globalKGStorage: KGStorage | null = null;
 20 | let globalMemoryManager: MemoryManager | null = null;
 21 | let currentStorageDir: string | null = null;
 22 | 
 23 | /**
 24 |  * Initialize the global Knowledge Graph instance
 25 |  */
 26 | export async function initializeKnowledgeGraph(
 27 |   storageDir?: string,
 28 | ): Promise<KnowledgeGraph> {
 29 |   const dir =
 30 |     storageDir ||
 31 |     process.env.DOCUMCP_STORAGE_DIR ||
 32 |     `${process.cwd()}/.documcp/memory`;
 33 | 
 34 |   // Reinitialize if storage directory changed
 35 |   if (currentStorageDir !== dir) {
 36 |     globalKnowledgeGraph = null;
 37 |     globalKGStorage = null;
 38 |     globalMemoryManager = null;
 39 |     currentStorageDir = dir;
 40 |   }
 41 | 
 42 |   if (!globalKnowledgeGraph) {
 43 |     // Initialize memory manager
 44 |     globalMemoryManager = new MemoryManager(dir);
 45 |     await globalMemoryManager.initialize();
 46 | 
 47 |     // Initialize KG storage
 48 |     globalKGStorage = new KGStorage({ storageDir: dir });
 49 |     await globalKGStorage.initialize();
 50 | 
 51 |     // Initialize knowledge graph
 52 |     globalKnowledgeGraph = new KnowledgeGraph(globalMemoryManager);
 53 |     await globalKnowledgeGraph.initialize();
 54 | 
 55 |     // Load existing graph data if available
 56 |     const { entities, relationships } = await globalKGStorage.loadGraph();
 57 |     for (const entity of entities) {
 58 |       globalKnowledgeGraph.addNode(entity);
 59 |     }
 60 |     for (const relationship of relationships) {
 61 |       globalKnowledgeGraph.addEdge(relationship);
 62 |     }
 63 |   }
 64 | 
 65 |   return globalKnowledgeGraph;
 66 | }
 67 | 
 68 | /**
 69 |  * Get the global Knowledge Graph instance
 70 |  */
 71 | export async function getKnowledgeGraph(): Promise<KnowledgeGraph> {
 72 |   if (!globalKnowledgeGraph) {
 73 |     return await initializeKnowledgeGraph();
 74 |   }
 75 |   return globalKnowledgeGraph;
 76 | }
 77 | 
 78 | /**
 79 |  * Get the global KG Storage instance
 80 |  */
 81 | export async function getKGStorage(): Promise<KGStorage> {
 82 |   if (!globalKGStorage) {
 83 |     await initializeKnowledgeGraph();
 84 |   }
 85 |   return globalKGStorage!;
 86 | }
 87 | 
 88 | /**
 89 |  * Get the global Memory Manager instance
 90 |  */
 91 | export async function getMemoryManager(): Promise<MemoryManager> {
 92 |   if (!globalMemoryManager) {
 93 |     await initializeKnowledgeGraph();
 94 |   }
 95 |   return globalMemoryManager!;
 96 | }
 97 | 
 98 | /**
 99 |  * Convert file extension to language name
100 |  */
101 | function convertExtToLanguage(ext: string): string | null {
102 |   const languageMap: Record<string, string> = {
103 |     ".js": "javascript",
104 |     ".jsx": "javascript",
105 |     ".ts": "typescript",
106 |     ".tsx": "typescript",
107 |     ".py": "python",
108 |     ".rb": "ruby",
109 |     ".go": "go",
110 |     ".java": "java",
111 |     ".c": "c",
112 |     ".cpp": "cpp",
113 |     ".cs": "csharp",
114 |     ".php": "php",
115 |     ".rs": "rust",
116 |     ".kt": "kotlin",
117 |     ".swift": "swift",
118 |     ".sh": "shell",
119 |     ".bash": "shell",
120 |     ".zsh": "shell",
121 |     ".fish": "shell",
122 |     ".yml": "yaml",
123 |     ".yaml": "yaml",
124 |     ".sql": "sql",
125 |     ".html": "html",
126 |     ".css": "css",
127 |     ".scss": "scss",
128 |     ".vue": "vue",
129 |     ".dart": "dart",
130 |   };
131 |   return languageMap[ext] || null;
132 | }
133 | 
134 | /**
135 |  * Save the Knowledge Graph to persistent storage
136 |  */
137 | export async function saveKnowledgeGraph(): Promise<void> {
138 |   if (!globalKnowledgeGraph || !globalKGStorage) {
139 |     throw new Error("Knowledge Graph not initialized");
140 |   }
141 | 
142 |   const entities = await globalKnowledgeGraph.getAllNodes();
143 |   const edges = await globalKnowledgeGraph.getAllEdges();
144 | 
145 |   await globalKGStorage.saveGraph(entities, edges);
146 | }
147 | 
148 | /**
149 |  * Create or update a Project entity in the Knowledge Graph
150 |  */
151 | export async function createOrUpdateProject(analysis: any): Promise<GraphNode> {
152 |   const kg = await getKnowledgeGraph();
153 | 
154 |   // Check for existing project
155 |   const projectId = `project:${analysis.id}`;
156 |   const existingProject = await kg.findNode({
157 |     type: "project",
158 |     properties: { path: analysis.path },
159 |   });
160 | 
161 |   // Categorize project size
162 |   const size =
163 |     analysis.structure.totalFiles < 50
164 |       ? "small"
165 |       : analysis.structure.totalFiles < 500
166 |         ? "medium"
167 |         : "large";
168 | 
169 |   // Determine primary language
170 |   const languages = analysis.structure.languages || {};
171 |   const primaryKey = Object.keys(languages).reduce(
172 |     (a, b) => (languages[a] > languages[b] ? a : b),
173 |     Object.keys(languages)[0] || "unknown",
174 |   );
175 | 
176 |   // Convert to language name if it's an extension
177 |   let primaryLanguage: string;
178 |   if (analysis.recommendations?.primaryLanguage) {
179 |     primaryLanguage = analysis.recommendations.primaryLanguage;
180 |   } else if (primaryKey.startsWith(".")) {
181 |     primaryLanguage = convertExtToLanguage(primaryKey) || "unknown";
182 |   } else {
183 |     primaryLanguage = primaryKey; // Already a language name
184 |   }
185 | 
186 |   // Convert all extensions to language names
187 |   // Handle both extensions (.ts) and language names (typescript)
188 |   const technologies = Object.keys(languages)
189 |     .map((key) => {
190 |       // If it starts with '.', it's an extension - convert it
191 |       if (key.startsWith(".")) {
192 |         return convertExtToLanguage(key);
193 |       }
194 |       // Otherwise it's already a language name - use as is
195 |       return key;
196 |     })
197 |     .filter((lang): lang is string => lang !== null && lang !== "unknown");
198 | 
199 |   // Create/update project node
200 |   const projectNode = kg.addNode({
201 |     id: existingProject?.id || projectId,
202 |     type: "project",
203 |     label: analysis.projectName || "Unnamed Project",
204 |     properties: {
205 |       name: analysis.projectName,
206 |       path: analysis.path,
207 |       technologies,
208 |       size,
209 |       primaryLanguage,
210 |       hasTests: analysis.structure.hasTests || false,
211 |       hasCI: analysis.structure.hasCI || false,
212 |       hasDocs: analysis.structure.hasDocs || false,
213 |       totalFiles: analysis.structure.totalFiles,
214 |       lastAnalyzed: analysis.timestamp,
215 |       analysisCount: existingProject
216 |         ? (existingProject.properties.analysisCount || 0) + 1
217 |         : 1,
218 |     },
219 |     weight: 1.0,
220 |   });
221 | 
222 |   // Create technology nodes and relationships
223 |   for (const [key, fileCount] of Object.entries(languages) as [
224 |     string,
225 |     number,
226 |   ][]) {
227 |     // Handle both extensions (.ts) and language names (typescript)
228 |     let langName: string;
229 |     if (key.startsWith(".")) {
230 |       const converted = convertExtToLanguage(key);
231 |       if (!converted) continue; // Skip unknown extensions
232 |       langName = converted;
233 |     } else {
234 |       langName = key; // Already a language name
235 |     }
236 | 
237 |     const techNodeId = `technology:${langName.toLowerCase()}`;
238 | 
239 |     // Create technology node if it doesn't exist
240 |     const existingTech = await kg.findNode({
241 |       type: "technology",
242 |       properties: { name: langName },
243 |     });
244 | 
245 |     const techNode = kg.addNode({
246 |       id: existingTech?.id || techNodeId,
247 |       type: "technology",
248 |       label: langName,
249 |       properties: {
250 |         name: langName,
251 |         category: "language",
252 |         usageCount: existingTech
253 |           ? (existingTech.properties.usageCount || 0) + 1
254 |           : 1,
255 |       },
256 |       weight: 1.0,
257 |     });
258 | 
259 |     // Create relationship
260 |     kg.addEdge({
261 |       source: projectNode.id,
262 |       target: techNode.id,
263 |       type: "project_uses_technology",
264 |       weight: fileCount / analysis.structure.totalFiles,
265 |       confidence: 1.0,
266 |       properties: {
267 |         fileCount,
268 |         percentage: (fileCount / analysis.structure.totalFiles) * 100,
269 |         isPrimary: langName === primaryLanguage,
270 |       },
271 |     });
272 |   }
273 | 
274 |   // Phase 1.2: Create code file and documentation entities
275 |   try {
276 |     const codeFiles = await createCodeFileEntities(
277 |       projectNode.id,
278 |       analysis.path,
279 |     );
280 | 
281 |     if (analysis.documentation?.extractedContent) {
282 |       const docSections = await createDocumentationEntities(
283 |         projectNode.id,
284 |         analysis.documentation.extractedContent,
285 |       );
286 | 
287 |       // Link code files to documentation sections
288 |       await linkCodeToDocs(codeFiles, docSections);
289 |     }
290 |   } catch (error) {
291 |     console.warn("Failed to populate code/docs entities:", error);
292 |     // Continue without code/docs entities - not a fatal error
293 |   }
294 | 
295 |   // Save to persistent storage
296 |   await saveKnowledgeGraph();
297 | 
298 |   return projectNode;
299 | }
300 | 
301 | /**
302 |  * Get historical context for a project
303 |  */
304 | export async function getProjectContext(projectPath: string): Promise<{
305 |   previousAnalyses: number;
306 |   lastAnalyzed: string | null;
307 |   knownTechnologies: string[];
308 |   similarProjects: GraphNode[];
309 | }> {
310 |   const kg = await getKnowledgeGraph();
311 | 
312 |   // Find project node
313 |   const projectNode = await kg.findNode({
314 |     type: "project",
315 |     properties: { path: projectPath },
316 |   });
317 | 
318 |   if (!projectNode) {
319 |     return {
320 |       previousAnalyses: 0,
321 |       lastAnalyzed: null,
322 |       knownTechnologies: [],
323 |       similarProjects: [],
324 |     };
325 |   }
326 | 
327 |   // Get project's technologies
328 |   const techEdges = await kg.findEdges({
329 |     source: projectNode.id,
330 |     type: "project_uses_technology",
331 |   });
332 | 
333 |   const technologies: string[] = [];
334 |   for (const edge of techEdges) {
335 |     const techNode = (await kg.getAllNodes()).find((n) => n.id === edge.target);
336 |     if (techNode) {
337 |       technologies.push(techNode.properties.name);
338 |     }
339 |   }
340 | 
341 |   // Find similar projects
342 |   const allProjects = await kg.findNodes({ type: "project" });
343 |   const similarProjects: GraphNode[] = [];
344 | 
345 |   for (const otherProject of allProjects) {
346 |     if (otherProject.id === projectNode.id) continue;
347 | 
348 |     // Check for similarity based on shared technologies
349 |     const otherTechEdges = await kg.findEdges({
350 |       source: otherProject.id,
351 |       type: "project_uses_technology",
352 |     });
353 | 
354 |     const otherTechs = new Set(otherTechEdges.map((e) => e.target));
355 |     const sharedTechs = techEdges.filter((e) => otherTechs.has(e.target));
356 | 
357 |     if (sharedTechs.length > 0) {
358 |       similarProjects.push(otherProject);
359 |     }
360 |   }
361 | 
362 |   // Sort by similarity (shared tech count)
363 |   similarProjects.sort((a, b) => {
364 |     const aShared = techEdges.filter(async (e) => {
365 |       const aEdges = await kg.findEdges({ source: a.id });
366 |       return aEdges.some((ae) => ae.target === e.target);
367 |     }).length;
368 |     const bShared = techEdges.filter(async (e) => {
369 |       const bEdges = await kg.findEdges({ source: b.id });
370 |       return bEdges.some((be) => be.target === e.target);
371 |     }).length;
372 |     return bShared - aShared;
373 |   });
374 | 
375 |   return {
376 |     previousAnalyses: projectNode.properties.analysisCount || 0,
377 |     lastAnalyzed: projectNode.properties.lastAnalyzed || null,
378 |     knownTechnologies: technologies,
379 |     similarProjects: similarProjects.slice(0, 5),
380 |   };
381 | }
382 | 
383 | // Counter to ensure unique deployment IDs even when timestamps collide
384 | let deploymentCounter = 0;
385 | 
386 | /**
387 |  * Track a deployment outcome in the Knowledge Graph
388 |  */
389 | export async function trackDeployment(
390 |   projectNodeId: string,
391 |   ssg: string,
392 |   success: boolean,
393 |   metadata?: {
394 |     buildTime?: number;
395 |     errorMessage?: string;
396 |     deploymentUrl?: string;
397 |   },
398 | ): Promise<void> {
399 |   const kg = await getKnowledgeGraph();
400 | 
401 |   // Find project node by ID
402 |   const allNodes = await kg.getAllNodes();
403 |   const projectNode = allNodes.find((n) => n.id === projectNodeId);
404 | 
405 |   if (!projectNode) {
406 |     throw new Error(`Project not found: ${projectNodeId}`);
407 |   }
408 | 
409 |   // Find or create configuration node
410 |   const configNodeId = `configuration:${ssg}`;
411 |   let configNode = await kg.findNode({
412 |     type: "configuration",
413 |     properties: { ssg },
414 |   });
415 | 
416 |   if (!configNode) {
417 |     configNode = kg.addNode({
418 |       id: configNodeId,
419 |       type: "configuration",
420 |       label: `${ssg} configuration`,
421 |       properties: {
422 |         ssg,
423 |         settings: {},
424 |         deploymentSuccessRate: success ? 1.0 : 0.0,
425 |         usageCount: 1,
426 |         lastUsed: new Date().toISOString(),
427 |       },
428 |       weight: 1.0,
429 |     });
430 |   } else {
431 |     // Update success rate
432 |     const currentRate = configNode.properties.deploymentSuccessRate || 0.5;
433 |     const currentCount = configNode.properties.usageCount || 1;
434 |     const newRate =
435 |       (currentRate * currentCount + (success ? 1.0 : 0.0)) / (currentCount + 1);
436 | 
437 |     configNode.properties.deploymentSuccessRate = newRate;
438 |     configNode.properties.usageCount = currentCount + 1;
439 |     configNode.properties.lastUsed = new Date().toISOString();
440 | 
441 |     if (metadata?.buildTime) {
442 |       const currentAvg = configNode.properties.buildTimeAverage || 0;
443 |       configNode.properties.buildTimeAverage =
444 |         (currentAvg * currentCount + metadata.buildTime) / (currentCount + 1);
445 |     }
446 | 
447 |     // Re-add the node to update it in the knowledge graph
448 |     kg.addNode(configNode);
449 |   }
450 | 
451 |   // Create deployment relationship with unique timestamp+counter to allow multiple deployments
452 |   const timestamp = new Date().toISOString();
453 |   const uniqueId = `${timestamp}:${deploymentCounter++}`;
454 |   kg.addEdge({
455 |     source: projectNode.id,
456 |     target: configNode.id,
457 |     type: `project_deployed_with:${uniqueId}`,
458 |     weight: success ? 1.0 : 0.5,
459 |     confidence: 1.0,
460 |     properties: {
461 |       success,
462 |       timestamp,
463 |       buildTime: metadata?.buildTime,
464 |       errorMessage: metadata?.errorMessage,
465 |       deploymentUrl: metadata?.deploymentUrl,
466 |       // Store the base type for filtering
467 |       baseType: "project_deployed_with",
468 |     },
469 |   });
470 | 
471 |   await saveKnowledgeGraph();
472 | }
473 | 
474 | /**
475 |  * Get deployment recommendations based on historical data
476 |  */
477 | export async function getDeploymentRecommendations(projectId: string): Promise<
478 |   Array<{
479 |     ssg: string;
480 |     confidence: number;
481 |     reasoning: string[];
482 |     successRate: number;
483 |   }>
484 | > {
485 |   const kg = await getKnowledgeGraph();
486 | 
487 |   // Find project
488 |   const projectNode = await kg.findNode({
489 |     type: "project",
490 |     properties: { id: projectId },
491 |   });
492 | 
493 |   if (!projectNode) {
494 |     return [];
495 |   }
496 | 
497 |   // Find similar projects
498 |   const similarProjects = await kg.findNodes({
499 |     type: "project",
500 |   });
501 | 
502 |   const recommendations = new Map<
503 |     string,
504 |     {
505 |       ssg: string;
506 |       totalWeight: number;
507 |       count: number;
508 |       successRate: number;
509 |       reasoning: string[];
510 |     }
511 |   >();
512 | 
513 |   // Analyze deployments from similar projects
514 |   for (const similar of similarProjects) {
515 |     if (similar.id === projectNode.id) continue;
516 | 
517 |     const deployments = await kg.findEdges({
518 |       source: similar.id,
519 |       type: "project_deployed_with",
520 |     });
521 | 
522 |     for (const deployment of deployments) {
523 |       const configNode = (await kg.getAllNodes()).find(
524 |         (n) => n.id === deployment.target,
525 |       );
526 |       if (!configNode) continue;
527 | 
528 |       const ssg = configNode.properties.ssg;
529 |       const existing = recommendations.get(ssg) || {
530 |         ssg,
531 |         totalWeight: 0,
532 |         count: 0,
533 |         successRate: 0,
534 |         reasoning: [] as string[],
535 |       };
536 | 
537 |       existing.totalWeight += deployment.weight;
538 |       existing.count += 1;
539 |       existing.successRate = configNode.properties.deploymentSuccessRate || 0;
540 | 
541 |       if (deployment.properties.success) {
542 |         existing.reasoning.push(
543 |           `Successfully used by similar project ${similar.label}`,
544 |         );
545 |       }
546 | 
547 |       recommendations.set(ssg, existing);
548 |     }
549 |   }
550 | 
551 |   // Convert to array and calculate confidence
552 |   return Array.from(recommendations.values())
553 |     .map((rec) => ({
554 |       ssg: rec.ssg,
555 |       confidence: (rec.totalWeight / rec.count) * rec.successRate,
556 |       reasoning: rec.reasoning.slice(0, 3), // Top 3 reasons
557 |       successRate: rec.successRate,
558 |     }))
559 |     .sort((a, b) => b.confidence - a.confidence);
560 | }
561 | 
562 | /**
563 |  * Get Knowledge Graph statistics
564 |  */
565 | export async function getKGStatistics(): Promise<{
566 |   nodeCount: number;
567 |   edgeCount: number;
568 |   projectCount: number;
569 |   technologyCount: number;
570 |   configurationCount: number;
571 |   storageStats: any;
572 | }> {
573 |   const kg = await getKnowledgeGraph();
574 |   const storage = await getKGStorage();
575 | 
576 |   const stats = await kg.getStatistics();
577 |   const storageStats = await storage.getStatistics();
578 | 
579 |   return {
580 |     nodeCount: stats.nodeCount,
581 |     edgeCount: stats.edgeCount,
582 |     projectCount: stats.nodesByType["project"] || 0,
583 |     technologyCount: stats.nodesByType["technology"] || 0,
584 |     configurationCount: stats.nodesByType["configuration"] || 0,
585 |     storageStats,
586 |   };
587 | }
588 | 
```

--------------------------------------------------------------------------------
/src/memory/kg-storage.ts:
--------------------------------------------------------------------------------

```typescript
  1 | /**
  2 |  * Knowledge Graph Storage Module
  3 |  * Implements Phase 1.1: Enhanced Storage Format
  4 |  *
  5 |  * Provides persistent storage for knowledge graph entities and relationships
  6 |  * using separate JSONL files with safety mechanisms.
  7 |  */
  8 | 
  9 | import { promises as fs } from "fs";
 10 | import { join, dirname } from "path";
 11 | import { GraphNode, GraphEdge } from "./knowledge-graph.js";
 12 | import { SCHEMA_METADATA } from "./schemas.js";
 13 | 
 14 | // File markers for safety
 15 | const ENTITY_FILE_MARKER = `# DOCUMCP_KNOWLEDGE_GRAPH_ENTITIES v${SCHEMA_METADATA.version}`;
 16 | const RELATIONSHIP_FILE_MARKER = `# DOCUMCP_KNOWLEDGE_GRAPH_RELATIONSHIPS v${SCHEMA_METADATA.version}`;
 17 | 
 18 | export interface KGStorageConfig {
 19 |   storageDir: string;
 20 |   backupOnWrite?: boolean;
 21 |   validateOnRead?: boolean;
 22 | }
 23 | 
 24 | export interface KGStorageStats {
 25 |   entityCount: number;
 26 |   relationshipCount: number;
 27 |   lastModified: string;
 28 |   schemaVersion: string;
 29 |   fileSize: {
 30 |     entities: number;
 31 |     relationships: number;
 32 |   };
 33 | }
 34 | 
 35 | export class KGStorage {
 36 |   private config: Required<KGStorageConfig>;
 37 |   private entityFilePath: string;
 38 |   private relationshipFilePath: string;
 39 |   private backupDir: string;
 40 | 
 41 |   constructor(config: KGStorageConfig) {
 42 |     this.config = {
 43 |       backupOnWrite: true,
 44 |       validateOnRead: true,
 45 |       ...config,
 46 |     };
 47 | 
 48 |     this.entityFilePath = join(
 49 |       config.storageDir,
 50 |       "knowledge-graph-entities.jsonl",
 51 |     );
 52 |     this.relationshipFilePath = join(
 53 |       config.storageDir,
 54 |       "knowledge-graph-relationships.jsonl",
 55 |     );
 56 |     this.backupDir = join(config.storageDir, "backups");
 57 |   }
 58 | 
 59 |   /**
 60 |    * Initialize storage (create directories and files if needed)
 61 |    */
 62 |   async initialize(): Promise<void> {
 63 |     try {
 64 |       // Create storage directory
 65 |       await fs.mkdir(this.config.storageDir, { recursive: true });
 66 | 
 67 |       // Create backup directory
 68 |       if (this.config.backupOnWrite) {
 69 |         await fs.mkdir(this.backupDir, { recursive: true });
 70 |       }
 71 | 
 72 |       // Initialize entity file
 73 |       await this.initializeFile(this.entityFilePath, ENTITY_FILE_MARKER);
 74 | 
 75 |       // Initialize relationship file
 76 |       await this.initializeFile(
 77 |         this.relationshipFilePath,
 78 |         RELATIONSHIP_FILE_MARKER,
 79 |       );
 80 |     } catch (error) {
 81 |       throw new Error(
 82 |         `Failed to initialize KG storage: ${
 83 |           error instanceof Error ? error.message : String(error)
 84 |         }`,
 85 |       );
 86 |     }
 87 |   }
 88 | 
 89 |   /**
 90 |    * Initialize a JSONL file with marker
 91 |    */
 92 |   private async initializeFile(
 93 |     filePath: string,
 94 |     marker: string,
 95 |   ): Promise<void> {
 96 |     try {
 97 |       // Check if file exists
 98 |       await fs.access(filePath);
 99 | 
100 |       // File exists, verify marker
101 |       const firstLine = await this.readFirstLine(filePath);
102 |       if (!firstLine.startsWith("# DOCUMCP_KNOWLEDGE_GRAPH")) {
103 |         throw new Error(
104 |           `File ${filePath} is not a DocuMCP knowledge graph file. ` +
105 |             `Refusing to overwrite to prevent data loss.`,
106 |         );
107 |       }
108 |     } catch (error: any) {
109 |       if (error.code === "ENOENT") {
110 |         // File doesn't exist, create it with marker
111 |         await fs.writeFile(filePath, marker + "\n", "utf-8");
112 |       } else {
113 |         throw error;
114 |       }
115 |     }
116 |   }
117 | 
118 |   /**
119 |    * Read the first line of a file
120 |    */
121 |   private async readFirstLine(filePath: string): Promise<string> {
122 |     const content = await fs.readFile(filePath, "utf-8");
123 |     return content.split("\n")[0];
124 |   }
125 | 
126 |   /**
127 |    * Save entities to storage
128 |    */
129 |   async saveEntities(entities: GraphNode[]): Promise<void> {
130 |     try {
131 |       // Ensure parent directory exists
132 |       await fs.mkdir(dirname(this.entityFilePath), { recursive: true });
133 | 
134 |       // Create backup if enabled
135 |       if (this.config.backupOnWrite) {
136 |         await this.backupFile(this.entityFilePath, "entities");
137 |       }
138 | 
139 |       // Write to temporary file first (atomic write)
140 |       const tempFile = `${this.entityFilePath}.tmp`;
141 | 
142 |       // Write marker
143 |       await fs.writeFile(tempFile, ENTITY_FILE_MARKER + "\n", "utf-8");
144 | 
145 |       // Append entities as JSONL
146 |       for (const entity of entities) {
147 |         const line = JSON.stringify(entity) + "\n";
148 |         await fs.appendFile(tempFile, line, "utf-8");
149 |       }
150 | 
151 |       // Atomic rename
152 |       await fs.rename(tempFile, this.entityFilePath);
153 |     } catch (error) {
154 |       throw new Error(
155 |         `Failed to save entities: ${
156 |           error instanceof Error ? error.message : String(error)
157 |         }`,
158 |       );
159 |     }
160 |   }
161 | 
162 |   /**
163 |    * Load entities from storage
164 |    */
165 |   async loadEntities(): Promise<GraphNode[]> {
166 |     try {
167 |       // Check if file exists
168 |       await fs.access(this.entityFilePath);
169 | 
170 |       const content = await fs.readFile(this.entityFilePath, "utf-8");
171 |       const lines = content.split("\n").filter((line) => line.trim());
172 | 
173 |       // Skip marker line
174 |       const dataLines = lines.slice(1);
175 | 
176 |       const entities: GraphNode[] = [];
177 |       for (const line of dataLines) {
178 |         try {
179 |           const entity = JSON.parse(line) as GraphNode;
180 | 
181 |           // Validate if enabled
182 |           if (this.config.validateOnRead) {
183 |             this.validateEntity(entity);
184 |           }
185 | 
186 |           entities.push(entity);
187 |         } catch (error) {
188 |           console.error(`Failed to parse entity line: ${line}`, error);
189 |         }
190 |       }
191 | 
192 |       return entities;
193 |     } catch (error: any) {
194 |       if (error.code === "ENOENT") {
195 |         return []; // File doesn't exist yet
196 |       }
197 |       throw new Error(
198 |         `Failed to load entities: ${
199 |           error instanceof Error ? error.message : String(error)
200 |         }`,
201 |       );
202 |     }
203 |   }
204 | 
205 |   /**
206 |    * Save relationships to storage
207 |    */
208 |   async saveRelationships(relationships: GraphEdge[]): Promise<void> {
209 |     try {
210 |       // Ensure parent directory exists
211 |       await fs.mkdir(dirname(this.relationshipFilePath), { recursive: true });
212 | 
213 |       // Create backup if enabled
214 |       if (this.config.backupOnWrite) {
215 |         await this.backupFile(this.relationshipFilePath, "relationships");
216 |       }
217 | 
218 |       // Write to temporary file first (atomic write)
219 |       const tempFile = `${this.relationshipFilePath}.tmp`;
220 | 
221 |       // Write marker
222 |       await fs.writeFile(tempFile, RELATIONSHIP_FILE_MARKER + "\n", "utf-8");
223 | 
224 |       // Append relationships as JSONL
225 |       for (const relationship of relationships) {
226 |         const line = JSON.stringify(relationship) + "\n";
227 |         await fs.appendFile(tempFile, line, "utf-8");
228 |       }
229 | 
230 |       // Atomic rename
231 |       await fs.rename(tempFile, this.relationshipFilePath);
232 |     } catch (error) {
233 |       throw new Error(
234 |         `Failed to save relationships: ${
235 |           error instanceof Error ? error.message : String(error)
236 |         }`,
237 |       );
238 |     }
239 |   }
240 | 
241 |   /**
242 |    * Load relationships from storage
243 |    */
244 |   async loadRelationships(): Promise<GraphEdge[]> {
245 |     try {
246 |       // Check if file exists
247 |       await fs.access(this.relationshipFilePath);
248 | 
249 |       const content = await fs.readFile(this.relationshipFilePath, "utf-8");
250 |       const lines = content.split("\n").filter((line) => line.trim());
251 | 
252 |       // Skip marker line
253 |       const dataLines = lines.slice(1);
254 | 
255 |       const relationships: GraphEdge[] = [];
256 |       for (const line of dataLines) {
257 |         try {
258 |           const relationship = JSON.parse(line) as GraphEdge;
259 | 
260 |           // Validate if enabled
261 |           if (this.config.validateOnRead) {
262 |             this.validateRelationship(relationship);
263 |           }
264 | 
265 |           relationships.push(relationship);
266 |         } catch (error) {
267 |           console.error(`Failed to parse relationship line: ${line}`, error);
268 |         }
269 |       }
270 | 
271 |       return relationships;
272 |     } catch (error: any) {
273 |       if (error.code === "ENOENT") {
274 |         return []; // File doesn't exist yet
275 |       }
276 |       throw new Error(
277 |         `Failed to load relationships: ${
278 |           error instanceof Error ? error.message : String(error)
279 |         }`,
280 |       );
281 |     }
282 |   }
283 | 
284 |   /**
285 |    * Save complete graph (entities + relationships)
286 |    */
287 |   async saveGraph(
288 |     entities: GraphNode[],
289 |     relationships: GraphEdge[],
290 |   ): Promise<void> {
291 |     await Promise.all([
292 |       this.saveEntities(entities),
293 |       this.saveRelationships(relationships),
294 |     ]);
295 |   }
296 | 
297 |   /**
298 |    * Load complete graph (entities + relationships)
299 |    */
300 |   async loadGraph(): Promise<{
301 |     entities: GraphNode[];
302 |     relationships: GraphEdge[];
303 |   }> {
304 |     const [entities, relationships] = await Promise.all([
305 |       this.loadEntities(),
306 |       this.loadRelationships(),
307 |     ]);
308 | 
309 |     return { entities, relationships };
310 |   }
311 | 
312 |   /**
313 |    * Create a backup of a file
314 |    */
315 |   private async backupFile(
316 |     filePath: string,
317 |     type: "entities" | "relationships",
318 |   ): Promise<void> {
319 |     try {
320 |       // Check if file exists
321 |       await fs.access(filePath);
322 | 
323 |       // Ensure backup directory exists
324 |       await fs.mkdir(this.backupDir, { recursive: true });
325 | 
326 |       // Create backup filename with timestamp
327 |       const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
328 |       const backupFilename = `${type}-${timestamp}.jsonl`;
329 |       const backupPath = join(this.backupDir, backupFilename);
330 | 
331 |       // Copy file
332 |       await fs.copyFile(filePath, backupPath);
333 | 
334 |       // Clean up old backups (keep last 10)
335 |       await this.cleanupOldBackups(type);
336 |     } catch (error: any) {
337 |       if (error.code !== "ENOENT") {
338 |         // Only warn if it's not a "file not found" error
339 |         console.warn(`Failed to backup file ${filePath}:`, error);
340 |       }
341 |     }
342 |   }
343 | 
344 |   /**
345 |    * Clean up old backup files (keep last N)
346 |    */
347 |   private async cleanupOldBackups(
348 |     type: "entities" | "relationships",
349 |     keepCount: number = 10,
350 |   ): Promise<void> {
351 |     try {
352 |       // Ensure backup directory exists before reading
353 |       await fs.mkdir(this.backupDir, { recursive: true });
354 | 
355 |       const files = await fs.readdir(this.backupDir);
356 | 
357 |       // Filter files by type
358 |       const typeFiles = files
359 |         .filter((file) => file.startsWith(type))
360 |         .map((file) => join(this.backupDir, file));
361 | 
362 |       // Sort by modification time (newest first)
363 |       const filesWithStats = await Promise.all(
364 |         typeFiles.map(async (file) => {
365 |           try {
366 |             const stats = await fs.stat(file);
367 |             return { file, mtime: stats.mtime.getTime() };
368 |           } catch (error) {
369 |             // File might have been deleted, return null
370 |             return null;
371 |           }
372 |         }),
373 |       );
374 | 
375 |       // Filter out null values and sort
376 |       const validFiles = filesWithStats.filter((f) => f !== null) as Array<{
377 |         file: string;
378 |         mtime: number;
379 |       }>;
380 |       validFiles.sort((a, b) => b.mtime - a.mtime);
381 | 
382 |       // Delete old files
383 |       const filesToDelete = validFiles.slice(keepCount);
384 |       await Promise.all(
385 |         filesToDelete.map(({ file }) => fs.unlink(file).catch(() => {})),
386 |       );
387 |     } catch (error) {
388 |       // Only log if it's not a missing directory error
389 |       if ((error as any).code !== "ENOENT") {
390 |         console.warn(`Failed to cleanup old backups:`, error);
391 |       }
392 |     }
393 |   }
394 | 
395 |   /**
396 |    * Get storage statistics
397 |    */
398 |   async getStatistics(): Promise<KGStorageStats> {
399 |     const [entities, relationships] = await Promise.all([
400 |       this.loadEntities(),
401 |       this.loadRelationships(),
402 |     ]);
403 | 
404 |     const [entitiesStats, relationshipsStats] = await Promise.all([
405 |       fs
406 |         .stat(this.entityFilePath)
407 |         .catch(() => ({ size: 0, mtime: new Date() })),
408 |       fs
409 |         .stat(this.relationshipFilePath)
410 |         .catch(() => ({ size: 0, mtime: new Date() })),
411 |     ]);
412 | 
413 |     const lastModified = new Date(
414 |       Math.max(
415 |         entitiesStats.mtime.getTime(),
416 |         relationshipsStats.mtime.getTime(),
417 |       ),
418 |     ).toISOString();
419 | 
420 |     return {
421 |       entityCount: entities.length,
422 |       relationshipCount: relationships.length,
423 |       lastModified,
424 |       schemaVersion: SCHEMA_METADATA.version,
425 |       fileSize: {
426 |         entities: entitiesStats.size,
427 |         relationships: relationshipsStats.size,
428 |       },
429 |     };
430 |   }
431 | 
432 |   /**
433 |    * Restore from backup
434 |    */
435 |   async restoreFromBackup(
436 |     type: "entities" | "relationships",
437 |     timestamp?: string,
438 |   ): Promise<void> {
439 |     try {
440 |       const files = await fs.readdir(this.backupDir);
441 | 
442 |       // Filter backup files by type
443 |       const backupFiles = files.filter((file) => file.startsWith(type));
444 | 
445 |       if (backupFiles.length === 0) {
446 |         throw new Error(`No backups found for ${type}`);
447 |       }
448 | 
449 |       let backupFile: string;
450 | 
451 |       if (timestamp) {
452 |         // Find backup with specific timestamp
453 |         backupFile = backupFiles.find((file) => file.includes(timestamp)) || "";
454 |         if (!backupFile) {
455 |           throw new Error(`Backup with timestamp ${timestamp} not found`);
456 |         }
457 |       } else {
458 |         // Use most recent backup
459 |         const filesWithStats = await Promise.all(
460 |           backupFiles.map(async (file) => {
461 |             const stats = await fs.stat(join(this.backupDir, file));
462 |             return { file, mtime: stats.mtime.getTime() };
463 |           }),
464 |         );
465 | 
466 |         filesWithStats.sort((a, b) => b.mtime - a.mtime);
467 |         backupFile = filesWithStats[0].file;
468 |       }
469 | 
470 |       const backupPath = join(this.backupDir, backupFile);
471 |       const targetPath =
472 |         type === "entities" ? this.entityFilePath : this.relationshipFilePath;
473 | 
474 |       // Restore backup
475 |       await fs.copyFile(backupPath, targetPath);
476 | 
477 |       // Log restoration success (can be monitored)
478 |       if (process.env.DEBUG) {
479 |         // eslint-disable-next-line no-console
480 |         console.log(`Restored ${type} from backup: ${backupFile}`);
481 |       }
482 |     } catch (error) {
483 |       throw new Error(
484 |         `Failed to restore from backup: ${
485 |           error instanceof Error ? error.message : String(error)
486 |         }`,
487 |       );
488 |     }
489 |   }
490 | 
491 |   /**
492 |    * Validate entity structure
493 |    */
494 |   private validateEntity(entity: GraphNode): void {
495 |     if (!entity.id || !entity.type || !entity.label) {
496 |       throw new Error(`Invalid entity structure: missing required fields`);
497 |     }
498 |   }
499 | 
500 |   /**
501 |    * Validate relationship structure
502 |    */
503 |   private validateRelationship(relationship: GraphEdge): void {
504 |     if (
505 |       !relationship.id ||
506 |       !relationship.source ||
507 |       !relationship.target ||
508 |       !relationship.type
509 |     ) {
510 |       throw new Error(
511 |         `Invalid relationship structure: missing required fields`,
512 |       );
513 |     }
514 |   }
515 | 
516 |   /**
517 |    * Verify integrity of stored data
518 |    */
519 |   async verifyIntegrity(): Promise<{
520 |     valid: boolean;
521 |     errors: string[];
522 |     warnings: string[];
523 |   }> {
524 |     const errors: string[] = [];
525 |     const warnings: string[] = [];
526 | 
527 |     try {
528 |       // Load all data
529 |       const { entities, relationships } = await this.loadGraph();
530 | 
531 |       // Check for orphaned relationships
532 |       const entityIds = new Set(entities.map((e) => e.id));
533 | 
534 |       for (const relationship of relationships) {
535 |         if (!entityIds.has(relationship.source)) {
536 |           warnings.push(
537 |             `Relationship ${relationship.id} references missing source entity: ${relationship.source}`,
538 |           );
539 |         }
540 |         if (!entityIds.has(relationship.target)) {
541 |           warnings.push(
542 |             `Relationship ${relationship.id} references missing target entity: ${relationship.target}`,
543 |           );
544 |         }
545 |       }
546 | 
547 |       // Check for duplicate entities
548 |       const idCounts = new Map<string, number>();
549 |       for (const entity of entities) {
550 |         idCounts.set(entity.id, (idCounts.get(entity.id) || 0) + 1);
551 |       }
552 | 
553 |       for (const [id, count] of idCounts) {
554 |         if (count > 1) {
555 |           errors.push(`Duplicate entity ID found: ${id} (${count} instances)`);
556 |         }
557 |       }
558 | 
559 |       return {
560 |         valid: errors.length === 0,
561 |         errors,
562 |         warnings,
563 |       };
564 |     } catch (error) {
565 |       errors.push(
566 |         `Integrity check failed: ${
567 |           error instanceof Error ? error.message : String(error)
568 |         }`,
569 |       );
570 |       return { valid: false, errors, warnings };
571 |     }
572 |   }
573 | 
574 |   /**
575 |    * Export graph as JSON (for inspection/debugging)
576 |    */
577 |   async exportAsJSON(): Promise<string> {
578 |     const { entities, relationships } = await this.loadGraph();
579 | 
580 |     return JSON.stringify(
581 |       {
582 |         metadata: {
583 |           version: SCHEMA_METADATA.version,
584 |           exportDate: new Date().toISOString(),
585 |           entityCount: entities.length,
586 |           relationshipCount: relationships.length,
587 |         },
588 |         entities,
589 |         relationships,
590 |       },
591 |       null,
592 |       2,
593 |     );
594 |   }
595 | }
596 | 
```

--------------------------------------------------------------------------------
/tests/tools/tool-error-handling.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { promises as fs } from "fs";
  2 | import { join } from "path";
  3 | import { tmpdir } from "os";
  4 | import { analyzeRepository } from "../../src/tools/analyze-repository.js";
  5 | import { recommendSSG } from "../../src/tools/recommend-ssg.js";
  6 | import { generateConfig } from "../../src/tools/generate-config.js";
  7 | import { setupStructure } from "../../src/tools/setup-structure.js";
  8 | import { deployPages } from "../../src/tools/deploy-pages.js";
  9 | 
 10 | describe("Tool Error Handling and Edge Cases", () => {
 11 |   let tempDir: string;
 12 | 
 13 |   beforeEach(async () => {
 14 |     tempDir = join(
 15 |       tmpdir(),
 16 |       `test-errors-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
 17 |     );
 18 |     await fs.mkdir(tempDir, { recursive: true });
 19 |   });
 20 | 
 21 |   afterEach(async () => {
 22 |     try {
 23 |       await fs.rm(tempDir, { recursive: true });
 24 |     } catch {
 25 |       // Ignore cleanup errors
 26 |     }
 27 |   });
 28 | 
 29 |   describe("Repository Analysis Error Handling", () => {
 30 |     it.skip("should handle non-existent directories gracefully", async () => {
 31 |       const nonExistentPath = join(tempDir, "non-existent");
 32 | 
 33 |       await expect(
 34 |         analyzeRepository({ path: nonExistentPath }),
 35 |       ).rejects.toThrow();
 36 |     });
 37 | 
 38 |     it("should handle empty directories", async () => {
 39 |       const result = await analyzeRepository({ path: tempDir });
 40 | 
 41 |       expect(result).toBeDefined();
 42 |       expect(result.content).toBeDefined();
 43 |       expect(Array.isArray(result.content)).toBe(true);
 44 |     });
 45 | 
 46 |     it.skip("should handle directories with permission issues", async () => {
 47 |       const restrictedDir = join(tempDir, "restricted");
 48 |       await fs.mkdir(restrictedDir);
 49 | 
 50 |       try {
 51 |         // Make directory unreadable
 52 |         await fs.chmod(restrictedDir, 0o000);
 53 | 
 54 |         await expect(
 55 |           analyzeRepository({ path: restrictedDir }),
 56 |         ).rejects.toThrow();
 57 |       } finally {
 58 |         // Restore permissions for cleanup
 59 |         await fs.chmod(restrictedDir, 0o755);
 60 |       }
 61 |     });
 62 | 
 63 |     it("should handle malformed package.json files", async () => {
 64 |       await fs.writeFile(join(tempDir, "package.json"), "invalid json content");
 65 | 
 66 |       const result = await analyzeRepository({ path: tempDir });
 67 | 
 68 |       expect(result).toBeDefined();
 69 |       expect(result.content).toBeDefined();
 70 |       expect(Array.isArray(result.content)).toBe(true);
 71 |     });
 72 | 
 73 |     it("should handle very large directories efficiently", async () => {
 74 |       // Create many files to test performance
 75 |       const promises = [];
 76 |       for (let i = 0; i < 100; i++) {
 77 |         promises.push(
 78 |           fs.writeFile(join(tempDir, `file-${i}.js`), `console.log(${i});`),
 79 |         );
 80 |       }
 81 |       await Promise.all(promises);
 82 | 
 83 |       const startTime = Date.now();
 84 |       const result = await analyzeRepository({ path: tempDir, depth: "quick" });
 85 |       const duration = Date.now() - startTime;
 86 | 
 87 |       expect(result).toBeDefined();
 88 |       expect(duration).toBeLessThan(2000); // Should complete within 2 seconds
 89 |     });
 90 | 
 91 |     it.skip("should handle invalid depth parameters", async () => {
 92 |       await fs.writeFile(join(tempDir, "package.json"), '{"name": "test"}');
 93 | 
 94 |       // @ts-ignore - Testing invalid parameter
 95 |       const result = await analyzeRepository({
 96 |         path: tempDir,
 97 |         depth: "invalid" as any,
 98 |       });
 99 | 
100 |       expect(result).toBeDefined();
101 |       // Should default to 'standard' depth
102 |     });
103 |   });
104 | 
105 |   describe("SSG Recommendation Error Handling", () => {
106 |     it("should handle missing analysis data", async () => {
107 |       await expect(recommendSSG({})).rejects.toThrow();
108 |     });
109 | 
110 |     it.skip("should handle invalid analysis IDs", async () => {
111 |       await expect(
112 |         recommendSSG({ analysisId: "non-existent-id" }),
113 |       ).rejects.toThrow();
114 |     });
115 | 
116 |     it.skip("should provide fallback recommendations for edge cases", async () => {
117 |       // Create minimal valid analysis
118 |       const minimalAnalysis = {
119 |         projectType: "unknown",
120 |         languages: [],
121 |         frameworks: [],
122 |         complexity: "low" as const,
123 |         dependencies: [],
124 |         devDependencies: [],
125 |         scripts: {},
126 |         fileCount: 0,
127 |         totalSize: 0,
128 |       };
129 | 
130 |       // Test with various edge case preferences
131 |       const testCases = [
132 |         { ecosystem: "invalid" as any },
133 |         { priority: "unknown" as any },
134 |         { ecosystem: "javascript", priority: "performance" },
135 |       ];
136 | 
137 |       for (const preferences of testCases) {
138 |         const result = await recommendSSG({
139 |           analysisId: "test-analysis-id",
140 |           preferences,
141 |         });
142 | 
143 |         expect(result).toBeDefined();
144 |         expect(result.content).toBeDefined();
145 |         expect(Array.isArray(result.content)).toBe(true);
146 |       }
147 |     });
148 | 
149 |     it.skip("should handle analysis with missing required fields", async () => {
150 |       const incompleteAnalysis = {
151 |         projectType: "javascript",
152 |         // Missing other required fields
153 |       };
154 | 
155 |       await expect(
156 |         recommendSSG({
157 |           // @ts-ignore - Testing incomplete data
158 |           analysisId: undefined,
159 |         }),
160 |       ).rejects.toThrow();
161 |     });
162 |   });
163 | 
164 |   describe("Configuration Generation Error Handling", () => {
165 |     it("should handle invalid SSG types", async () => {
166 |       await expect(
167 |         generateConfig({
168 |           ssg: "invalid-ssg" as any,
169 |           projectName: "test",
170 |           outputPath: tempDir,
171 |         }),
172 |       ).rejects.toThrow();
173 |     });
174 | 
175 |     it("should handle missing required parameters", async () => {
176 |       await expect(generateConfig({})).rejects.toThrow();
177 |       await expect(generateConfig({ ssg: "jekyll" })).rejects.toThrow();
178 |       await expect(
179 |         generateConfig({ ssg: "jekyll", projectName: "test" }),
180 |       ).rejects.toThrow();
181 |     });
182 | 
183 |     it("should handle write permission issues", async () => {
184 |       const readOnlyDir = join(tempDir, "readonly");
185 |       await fs.mkdir(readOnlyDir);
186 | 
187 |       try {
188 |         await fs.chmod(readOnlyDir, 0o444);
189 | 
190 |         const result = await generateConfig({
191 |           ssg: "jekyll",
192 |           projectName: "test",
193 |           outputPath: readOnlyDir,
194 |         });
195 | 
196 |         expect((result as any).isError).toBe(true);
197 |         expect(result.content).toBeDefined();
198 |         expect(
199 |           result.content.some(
200 |             (item: any) => item.text && item.text.includes("permission denied"),
201 |           ),
202 |         ).toBe(true);
203 |       } finally {
204 |         await fs.chmod(readOnlyDir, 0o755);
205 |       }
206 |     });
207 | 
208 |     it("should handle extremely long project names", async () => {
209 |       const longName = "a".repeat(1000);
210 | 
211 |       const result = await generateConfig({
212 |         ssg: "jekyll",
213 |         projectName: longName,
214 |         outputPath: tempDir,
215 |       });
216 | 
217 |       expect(result).toBeDefined();
218 |       expect(result.content).toBeDefined();
219 |     });
220 | 
221 |     it("should handle special characters in project names", async () => {
222 |       const specialChars = [
223 |         "test@project",
224 |         "test#project",
225 |         "test space project",
226 |         "test/project",
227 |       ];
228 | 
229 |       for (const projectName of specialChars) {
230 |         const result = await generateConfig({
231 |           ssg: "jekyll",
232 |           projectName,
233 |           outputPath: tempDir,
234 |         });
235 | 
236 |         expect(result).toBeDefined();
237 |         expect(result.content).toBeDefined();
238 |       }
239 |     });
240 | 
241 |     it("should validate SSG-specific configuration options", async () => {
242 |       const ssgTypes = ["jekyll", "hugo", "docusaurus", "mkdocs", "eleventy"];
243 | 
244 |       for (const ssg of ssgTypes) {
245 |         const result = await generateConfig({
246 |           ssg: ssg as any,
247 |           projectName: `test-${ssg}`,
248 |           outputPath: tempDir,
249 |           projectDescription: `Test project for ${ssg}`,
250 |         });
251 | 
252 |         expect(result).toBeDefined();
253 |         expect(result.content).toBeDefined();
254 |         expect(result.content).toBeDefined();
255 |         expect(result.content.length).toBeGreaterThan(0);
256 |       }
257 |     });
258 |   });
259 | 
260 |   describe("Structure Setup Error Handling", () => {
261 |     it("should handle invalid output paths", async () => {
262 |       // Use a path that will definitely fail - a file path instead of directory
263 |       // First create a file, then try to use it as a directory path
264 |       const invalidPath = join(tempDir, "not-a-directory.txt");
265 |       await fs.writeFile(invalidPath, "this is a file, not a directory");
266 | 
267 |       const result = await setupStructure({
268 |         path: invalidPath,
269 |         ssg: "jekyll",
270 |       });
271 | 
272 |       expect((result as any).isError).toBe(true);
273 |       expect(result.content).toBeDefined();
274 |       expect(
275 |         result.content.some(
276 |           (item: any) =>
277 |             item.text &&
278 |             (item.text.includes("ENOTDIR") ||
279 |               item.text.includes("EEXIST") ||
280 |               item.text.includes("not a directory")),
281 |         ),
282 |       ).toBe(true);
283 |     });
284 | 
285 |     it("should handle missing SSG parameter", async () => {
286 |       await expect(
287 |         setupStructure({
288 |           path: tempDir,
289 |         }),
290 |       ).rejects.toThrow();
291 |     });
292 | 
293 |     it("should create structure in existing directories with files", async () => {
294 |       // Create some existing files
295 |       await fs.writeFile(
296 |         join(tempDir, "existing-file.txt"),
297 |         "existing content",
298 |       );
299 |       await fs.mkdir(join(tempDir, "existing-dir"));
300 | 
301 |       const result = await setupStructure({
302 |         path: tempDir,
303 |         ssg: "jekyll",
304 |         includeExamples: true,
305 |       });
306 | 
307 |       expect(result).toBeDefined();
308 |       expect(result.content).toBeDefined();
309 |       expect(Array.isArray(result.content)).toBe(true);
310 | 
311 |       // Should not overwrite existing files
312 |       const existingContent = await fs.readFile(
313 |         join(tempDir, "existing-file.txt"),
314 |         "utf8",
315 |       );
316 |       expect(existingContent).toBe("existing content");
317 |     });
318 | 
319 |     it("should handle different Diataxis structure options", async () => {
320 |       const options = [
321 |         { includeExamples: true },
322 |         { includeExamples: false },
323 |         {
324 |           includeExamples: true,
325 |           customStructure: { tutorials: ["custom-tutorial"] },
326 |         },
327 |       ];
328 | 
329 |       for (const option of options) {
330 |         const testDir = join(
331 |           tempDir,
332 |           `test-${Math.random().toString(36).substr(2, 5)}`,
333 |         );
334 |         await fs.mkdir(testDir);
335 | 
336 |         const result = await setupStructure({
337 |           path: testDir,
338 |           ssg: "docusaurus",
339 |           ...option,
340 |         });
341 | 
342 |         expect(result).toBeDefined();
343 |         expect(result.content).toBeDefined();
344 |       }
345 |     });
346 |   });
347 | 
348 |   describe("Deployment Setup Error Handling", () => {
349 |     it("should handle repositories without proper configuration", async () => {
350 |       const result = await deployPages({
351 |         repository: "invalid/repo/format",
352 |         ssg: "jekyll",
353 |       });
354 | 
355 |       // deployPages actually succeeds with invalid repo format - it just creates workflow
356 |       expect(result.content).toBeDefined();
357 |       expect(result.content[0].text).toContain("invalid/repo/format");
358 |     });
359 | 
360 |     it("should handle missing repository parameter", async () => {
361 |       await expect(
362 |         deployPages({
363 |           ssg: "jekyll",
364 |         }),
365 |       ).rejects.toThrow();
366 |     });
367 | 
368 |     it("should handle different branch configurations", async () => {
369 |       const branchConfigs = [
370 |         { branch: "main" },
371 |         { branch: "master" },
372 |         { branch: "gh-pages" },
373 |         { branch: "custom-branch" },
374 |       ];
375 | 
376 |       for (const config of branchConfigs) {
377 |         const result = await deployPages({
378 |           repository: "user/test-repo",
379 |           ssg: "jekyll",
380 |           ...config,
381 |         });
382 | 
383 |         expect(result).toBeDefined();
384 |         expect(result.content).toBeDefined();
385 |         expect(result.content[0].text).toContain(config.branch);
386 |       }
387 |     });
388 | 
389 |     it("should handle custom domain configurations", async () => {
390 |       const customDomains = [
391 |         "example.com",
392 |         "docs.example.com",
393 |         "sub.domain.example.org",
394 |         "localhost", // Edge case
395 |       ];
396 | 
397 |       for (const customDomain of customDomains) {
398 |         const result = await deployPages({
399 |           repository: "user/test-repo",
400 |           ssg: "jekyll",
401 |           customDomain,
402 |         });
403 | 
404 |         expect(result).toBeDefined();
405 |         expect(result.content).toBeDefined();
406 |         expect(result.content[0].text).toContain(customDomain);
407 |       }
408 |     });
409 | 
410 |     it("should generate workflows for all supported SSGs", async () => {
411 |       const ssgTypes = ["jekyll", "hugo", "docusaurus", "mkdocs", "eleventy"];
412 | 
413 |       for (const ssg of ssgTypes) {
414 |         const result = await deployPages({
415 |           repository: "user/test-repo",
416 |           ssg: ssg as any,
417 |         });
418 | 
419 |         expect(result).toBeDefined();
420 |         expect(result.content).toBeDefined();
421 |         expect(result.content[0].text).toContain(ssg);
422 |         expect(result.content).toBeDefined();
423 |       }
424 |     });
425 |   });
426 | 
427 |   describe("Input Validation", () => {
428 |     it("should validate string inputs for XSS and injection attacks", async () => {
429 |       const maliciousInputs = [
430 |         '<script>alert("xss")</script>',
431 |         "${process.env}",
432 |         "../../../etc/passwd",
433 |         "test`rm -rf /`test",
434 |         "test && rm -rf /",
435 |         "test; cat /etc/passwd",
436 |       ];
437 | 
438 |       for (const maliciousInput of maliciousInputs) {
439 |         // Test with different tools
440 |         const result = await generateConfig({
441 |           ssg: "jekyll",
442 |           projectName: maliciousInput,
443 |           outputPath: tempDir,
444 |           projectDescription: maliciousInput,
445 |         });
446 | 
447 |         expect(result).toBeDefined();
448 |         expect(result.content).toBeDefined();
449 |         // Should sanitize or escape malicious content
450 |       }
451 |     });
452 | 
453 |     it("should handle Unicode and international characters", async () => {
454 |       const unicodeInputs = [
455 |         "тест", // Cyrillic
456 |         "测试", // Chinese
457 |         "🚀📊", // Emojis
458 |         "café", // Accented characters
459 |         "مشروع", // Arabic
460 |       ];
461 | 
462 |       for (const unicodeInput of unicodeInputs) {
463 |         const result = await generateConfig({
464 |           ssg: "jekyll",
465 |           projectName: unicodeInput,
466 |           outputPath: tempDir,
467 |         });
468 | 
469 |         expect(result).toBeDefined();
470 |         expect(result.content).toBeDefined();
471 |       }
472 |     });
473 | 
474 |     it("should handle extremely large parameter values", async () => {
475 |       const largeDescription = "A".repeat(10000);
476 | 
477 |       const result = await generateConfig({
478 |         ssg: "jekyll",
479 |         projectName: "test",
480 |         outputPath: tempDir,
481 |         projectDescription: largeDescription,
482 |       });
483 | 
484 |       expect(result).toBeDefined();
485 |       expect(result.content).toBeDefined();
486 |     });
487 |   });
488 | 
489 |   describe("Concurrent Operations", () => {
490 |     it("should handle multiple simultaneous tool calls", async () => {
491 |       // Create test directories
492 |       const dirs = await Promise.all([
493 |         fs.mkdir(join(tempDir, "test1"), { recursive: true }),
494 |         fs.mkdir(join(tempDir, "test2"), { recursive: true }),
495 |         fs.mkdir(join(tempDir, "test3"), { recursive: true }),
496 |       ]);
497 | 
498 |       // Run multiple operations concurrently
499 |       const promises = [
500 |         generateConfig({
501 |           ssg: "jekyll",
502 |           projectName: "test1",
503 |           outputPath: join(tempDir, "test1"),
504 |         }),
505 |         generateConfig({
506 |           ssg: "hugo",
507 |           projectName: "test2",
508 |           outputPath: join(tempDir, "test2"),
509 |         }),
510 |         generateConfig({
511 |           ssg: "docusaurus",
512 |           projectName: "test3",
513 |           outputPath: join(tempDir, "test3"),
514 |         }),
515 |       ];
516 | 
517 |       const results = await Promise.all(promises);
518 | 
519 |       results.forEach((result) => {
520 |         expect(result).toBeDefined();
521 |         expect(result.content).toBeDefined();
522 |       });
523 |     });
524 | 
525 |     it("should handle resource contention gracefully", async () => {
526 |       // Multiple operations on same directory
527 |       const promises = Array(5)
528 |         .fill(null)
529 |         .map((_, i) =>
530 |           setupStructure({
531 |             path: join(tempDir, `concurrent-${i}`),
532 |             ssg: "jekyll",
533 |             includeExamples: false,
534 |           }),
535 |         );
536 | 
537 |       // Create directories first
538 |       await Promise.all(
539 |         promises.map((_, i) =>
540 |           fs.mkdir(join(tempDir, `concurrent-${i}`), { recursive: true }),
541 |         ),
542 |       );
543 | 
544 |       const results = await Promise.allSettled(promises);
545 | 
546 |       // All should succeed or fail gracefully
547 |       results.forEach((result) => {
548 |         if (result.status === "fulfilled") {
549 |           expect(result.value.content).toBeDefined();
550 |         } else {
551 |           expect(result.reason).toBeInstanceOf(Error);
552 |         }
553 |       });
554 |     });
555 |   });
556 | });
557 | 
```

--------------------------------------------------------------------------------
/tests/memory/storage.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | /**
  2 |  * Comprehensive unit tests for Memory Storage System
  3 |  * Tests JSONL storage, indexing, CRUD operations, and performance
  4 |  * Part of Issue #54 - Core Memory System Unit Tests
  5 |  */
  6 | 
  7 | import { promises as fs } from "fs";
  8 | import path from "path";
  9 | import os from "os";
 10 | import { JSONLStorage, MemoryEntry } from "../../src/memory/storage.js";
 11 | 
 12 | describe("JSONLStorage", () => {
 13 |   let storage: JSONLStorage;
 14 |   let tempDir: string;
 15 | 
 16 |   beforeEach(async () => {
 17 |     // Create unique temp directory for each test
 18 |     tempDir = path.join(
 19 |       os.tmpdir(),
 20 |       `memory-storage-test-${Date.now()}-${Math.random()
 21 |         .toString(36)
 22 |         .substr(2, 9)}`,
 23 |     );
 24 |     await fs.mkdir(tempDir, { recursive: true });
 25 |     storage = new JSONLStorage(tempDir);
 26 |     await storage.initialize();
 27 |   });
 28 | 
 29 |   afterEach(async () => {
 30 |     // Cleanup temp directory
 31 |     try {
 32 |       await fs.rm(tempDir, { recursive: true, force: true });
 33 |     } catch (error) {
 34 |       // Ignore cleanup errors
 35 |     }
 36 |   });
 37 | 
 38 |   describe("Basic Storage Operations", () => {
 39 |     test("should create storage instance and initialize", async () => {
 40 |       expect(storage).toBeDefined();
 41 |       expect(storage).toBeInstanceOf(JSONLStorage);
 42 | 
 43 |       // Verify storage directory was created
 44 |       const stats = await fs.stat(tempDir);
 45 |       expect(stats.isDirectory()).toBe(true);
 46 |     });
 47 | 
 48 |     test("should append and retrieve memory entries", async () => {
 49 |       const entry = {
 50 |         timestamp: new Date().toISOString(),
 51 |         type: "analysis" as const,
 52 |         data: { project: "test-project", result: "success" },
 53 |         metadata: { projectId: "test-proj", tags: ["test"] },
 54 |       };
 55 | 
 56 |       const stored = await storage.append(entry);
 57 |       expect(stored.id).toBeDefined();
 58 |       expect(stored.checksum).toBeDefined();
 59 |       expect(stored.type).toBe("analysis");
 60 |       expect(stored.data).toEqual(entry.data);
 61 |     });
 62 | 
 63 |     test("should handle different entry types", async () => {
 64 |       const entryTypes: Array<MemoryEntry["type"]> = [
 65 |         "analysis",
 66 |         "recommendation",
 67 |         "deployment",
 68 |         "configuration",
 69 |         "interaction",
 70 |       ];
 71 | 
 72 |       for (const type of entryTypes) {
 73 |         const entry = {
 74 |           timestamp: new Date().toISOString(),
 75 |           type,
 76 |           data: { testType: type },
 77 |           metadata: { projectId: "test-types" },
 78 |         };
 79 | 
 80 |         const stored = await storage.append(entry);
 81 |         expect(stored.type).toBe(type);
 82 |         expect(stored.data.testType).toBe(type);
 83 |       }
 84 |     });
 85 | 
 86 |     test("should generate unique IDs for different entries", async () => {
 87 |       const entry1 = {
 88 |         timestamp: new Date().toISOString(),
 89 |         type: "analysis" as const,
 90 |         data: { project: "test-1" },
 91 |         metadata: { projectId: "test-1" },
 92 |       };
 93 | 
 94 |       const entry2 = {
 95 |         timestamp: new Date().toISOString(),
 96 |         type: "analysis" as const,
 97 |         data: { project: "test-2" },
 98 |         metadata: { projectId: "test-2" },
 99 |       };
100 | 
101 |       const stored1 = await storage.append(entry1);
102 |       const stored2 = await storage.append(entry2);
103 | 
104 |       expect(stored1.id).not.toBe(stored2.id);
105 |       expect(stored1.checksum).not.toBe(stored2.checksum);
106 |     });
107 | 
108 |     test("should generate same ID for identical entries", async () => {
109 |       const entry = {
110 |         timestamp: new Date().toISOString(),
111 |         type: "analysis" as const,
112 |         data: { project: "identical-test" },
113 |         metadata: { projectId: "identical" },
114 |       };
115 | 
116 |       const stored1 = await storage.append(entry);
117 |       const stored2 = await storage.append(entry);
118 | 
119 |       expect(stored1.id).toBe(stored2.id);
120 |       expect(stored1.checksum).toBe(stored2.checksum);
121 |     });
122 |   });
123 | 
124 |   describe("File Management", () => {
125 |     test("should create proper JSONL file structure", async () => {
126 |       const entry = {
127 |         timestamp: "2024-01-15T10:30:00.000Z",
128 |         type: "analysis" as const,
129 |         data: { fileTest: true },
130 |         metadata: { projectId: "file-proj" },
131 |       };
132 | 
133 |       await storage.append(entry);
134 | 
135 |       // Check that file was created with expected name pattern
136 |       const files = await fs.readdir(tempDir);
137 |       const jsonlFiles = files.filter((f) => f.endsWith(".jsonl"));
138 |       expect(jsonlFiles.length).toBeGreaterThan(0);
139 | 
140 |       // Should have analysis_2024_01.jsonl
141 |       const expectedFile = "analysis_2024_01.jsonl";
142 |       expect(jsonlFiles).toContain(expectedFile);
143 | 
144 |       // Verify file contains the entry
145 |       const filePath = path.join(tempDir, expectedFile);
146 |       const content = await fs.readFile(filePath, "utf-8");
147 |       const lines = content.trim().split("\n");
148 |       expect(lines.length).toBeGreaterThan(0);
149 | 
150 |       const parsedEntry = JSON.parse(lines[0]);
151 |       expect(parsedEntry.data.fileTest).toBe(true);
152 |     });
153 | 
154 |     test("should organize files by type and date", async () => {
155 |       const entries = [
156 |         {
157 |           timestamp: "2024-01-15T10:30:00.000Z",
158 |           type: "analysis" as const,
159 |           data: { test: "analysis-jan" },
160 |           metadata: { projectId: "date-test" },
161 |         },
162 |         {
163 |           timestamp: "2024-02-15T10:30:00.000Z",
164 |           type: "analysis" as const,
165 |           data: { test: "analysis-feb" },
166 |           metadata: { projectId: "date-test" },
167 |         },
168 |         {
169 |           timestamp: "2024-01-15T10:30:00.000Z",
170 |           type: "recommendation" as const,
171 |           data: { test: "recommendation-jan" },
172 |           metadata: { projectId: "date-test" },
173 |         },
174 |       ];
175 | 
176 |       for (const entry of entries) {
177 |         await storage.append(entry);
178 |       }
179 | 
180 |       const files = await fs.readdir(tempDir);
181 |       const jsonlFiles = files.filter((f) => f.endsWith(".jsonl"));
182 | 
183 |       expect(jsonlFiles).toContain("analysis_2024_01.jsonl");
184 |       expect(jsonlFiles).toContain("analysis_2024_02.jsonl");
185 |       expect(jsonlFiles).toContain("recommendation_2024_01.jsonl");
186 |     });
187 | 
188 |     test("should handle index persistence", async () => {
189 |       const entry = {
190 |         timestamp: new Date().toISOString(),
191 |         type: "configuration" as const,
192 |         data: { indexTest: true },
193 |         metadata: { projectId: "index-test" },
194 |       };
195 | 
196 |       await storage.append(entry);
197 | 
198 |       // Check that index file was created
199 |       const indexPath = path.join(tempDir, ".index.json");
200 |       const indexExists = await fs
201 |         .access(indexPath)
202 |         .then(() => true)
203 |         .catch(() => false);
204 |       expect(indexExists).toBe(true);
205 | 
206 |       // Index should contain entry information
207 |       const indexContent = await fs.readFile(indexPath, "utf-8");
208 |       const indexData = JSON.parse(indexContent);
209 |       expect(typeof indexData).toBe("object");
210 |       expect(Array.isArray(indexData.entries)).toBe(true);
211 |       expect(indexData.entries.length).toBeGreaterThan(0);
212 |     });
213 |   });
214 | 
215 |   describe("Data Integrity", () => {
216 |     test("should generate checksums for data integrity", async () => {
217 |       const entry = {
218 |         timestamp: new Date().toISOString(),
219 |         type: "deployment" as const,
220 |         data: { integrity: "test", checkData: "important" },
221 |         metadata: { projectId: "integrity-test" },
222 |       };
223 | 
224 |       const stored = await storage.append(entry);
225 |       expect(stored.checksum).toBeDefined();
226 |       expect(typeof stored.checksum).toBe("string");
227 |       expect(stored.checksum?.length).toBe(32); // MD5 hash length
228 |     });
229 | 
230 |     test("should handle entry timestamps correctly", async () => {
231 |       const customTimestamp = "2024-06-15T14:30:00.000Z";
232 |       const entry = {
233 |         timestamp: customTimestamp,
234 |         type: "interaction" as const,
235 |         data: { timestampTest: true },
236 |         metadata: { projectId: "timestamp-test" },
237 |       };
238 | 
239 |       const stored = await storage.append(entry);
240 |       expect(stored.timestamp).toBe(customTimestamp);
241 |     });
242 | 
243 |     test("should auto-generate timestamp if not provided", async () => {
244 |       const entry = {
245 |         timestamp: "", // Will be auto-generated
246 |         type: "analysis" as const,
247 |         data: { autoTimestamp: true },
248 |         metadata: { projectId: "auto-timestamp-test" },
249 |       };
250 | 
251 |       const beforeTime = new Date().toISOString();
252 |       const stored = await storage.append(entry);
253 |       const afterTime = new Date().toISOString();
254 | 
255 |       expect(stored.timestamp).toBeDefined();
256 |       expect(stored.timestamp >= beforeTime).toBe(true);
257 |       expect(stored.timestamp <= afterTime).toBe(true);
258 |     });
259 |   });
260 | 
261 |   describe("Metadata Handling", () => {
262 |     test("should preserve metadata structure", async () => {
263 |       const metadata = {
264 |         projectId: "metadata-test",
265 |         repository: "github.com/test/repo",
266 |         ssg: "docusaurus",
267 |         tags: ["frontend", "typescript"],
268 |         version: "1.0.0",
269 |       };
270 | 
271 |       const entry = {
272 |         timestamp: new Date().toISOString(),
273 |         type: "recommendation" as const,
274 |         data: { recommendation: "use-docusaurus" },
275 |         metadata,
276 |       };
277 | 
278 |       const stored = await storage.append(entry);
279 |       expect(stored.metadata).toEqual(metadata);
280 |       expect(stored.metadata.projectId).toBe("metadata-test");
281 |       expect(stored.metadata.tags).toEqual(["frontend", "typescript"]);
282 |     });
283 | 
284 |     test("should handle optional metadata fields", async () => {
285 |       const entry = {
286 |         timestamp: new Date().toISOString(),
287 |         type: "analysis" as const,
288 |         data: { minimal: true },
289 |         metadata: { projectId: "minimal-test" },
290 |       };
291 | 
292 |       const stored = await storage.append(entry);
293 |       expect(stored.metadata.projectId).toBe("minimal-test");
294 |       expect(stored.metadata.repository).toBeUndefined();
295 |       expect(stored.metadata.tags).toBeUndefined();
296 |     });
297 | 
298 |     test("should handle compression metadata", async () => {
299 |       const metadata = {
300 |         projectId: "compression-test",
301 |         compressed: true,
302 |         compressionType: "gzip",
303 |         compressedAt: new Date().toISOString(),
304 |         originalSize: 1024,
305 |       };
306 | 
307 |       const entry = {
308 |         timestamp: new Date().toISOString(),
309 |         type: "configuration" as const,
310 |         data: { compressed: "data" },
311 |         metadata,
312 |       };
313 | 
314 |       const stored = await storage.append(entry);
315 |       expect(stored.metadata.compressed).toBe(true);
316 |       expect(stored.metadata.compressionType).toBe("gzip");
317 |       expect(stored.metadata.originalSize).toBe(1024);
318 |     });
319 |   });
320 | 
321 |   describe("Performance and Concurrency", () => {
322 |     test("should handle concurrent writes safely", async () => {
323 |       const concurrentWrites = 10;
324 |       const promises: Promise<MemoryEntry>[] = [];
325 | 
326 |       // Create multiple concurrent append operations
327 |       for (let i = 0; i < concurrentWrites; i++) {
328 |         const promise = storage.append({
329 |           timestamp: new Date().toISOString(),
330 |           type: "analysis",
331 |           data: { index: i, concurrent: true },
332 |           metadata: { projectId: "concurrent-test" },
333 |         });
334 |         promises.push(promise);
335 |       }
336 | 
337 |       const results = await Promise.all(promises);
338 |       expect(results).toHaveLength(concurrentWrites);
339 | 
340 |       // All IDs should be unique (since data is different)
341 |       const ids = results.map((r) => r.id);
342 |       expect(new Set(ids).size).toBe(concurrentWrites);
343 | 
344 |       // All should have correct structure
345 |       results.forEach((result, index) => {
346 |         expect(result.data.index).toBe(index);
347 |         expect(result.metadata.projectId).toBe("concurrent-test");
348 |       });
349 |     });
350 | 
351 |     test("should handle bulk append operations efficiently", async () => {
352 |       const startTime = Date.now();
353 |       const bulkSize = 50;
354 | 
355 |       // Append bulk entries
356 |       for (let i = 0; i < bulkSize; i++) {
357 |         await storage.append({
358 |           timestamp: new Date().toISOString(),
359 |           type: i % 2 === 0 ? "analysis" : "recommendation",
360 |           data: { index: i, bulk: true },
361 |           metadata: {
362 |             projectId: "bulk-test",
363 |           },
364 |         });
365 |       }
366 | 
367 |       const appendTime = Date.now() - startTime;
368 |       expect(appendTime).toBeLessThan(5000); // Should complete within 5 seconds
369 | 
370 |       // Verify files were created
371 |       const files = await fs.readdir(tempDir);
372 |       const jsonlFiles = files.filter((f) => f.endsWith(".jsonl"));
373 |       expect(jsonlFiles.length).toBeGreaterThan(0);
374 |     });
375 | 
376 |     test("should maintain performance with large data entries", async () => {
377 |       const largeData = {
378 |         description: "x".repeat(10000), // 10KB string
379 |         array: new Array(1000).fill(0).map((_, i) => ({
380 |           id: i,
381 |           data: `large-item-${i}`,
382 |           metadata: { processed: true },
383 |         })),
384 |       };
385 | 
386 |       const entry = {
387 |         timestamp: new Date().toISOString(),
388 |         type: "analysis" as const,
389 |         data: largeData,
390 |         metadata: { projectId: "large-test" },
391 |       };
392 | 
393 |       const startTime = Date.now();
394 |       const stored = await storage.append(entry);
395 |       const appendTime = Date.now() - startTime;
396 | 
397 |       expect(appendTime).toBeLessThan(1000); // Should append within 1 second
398 |       expect(stored.data.description).toHaveLength(10000);
399 |       expect(stored.data.array).toHaveLength(1000);
400 |     });
401 |   });
402 | 
403 |   describe("Error Handling and Edge Cases", () => {
404 |     test("should handle special characters in data", async () => {
405 |       const entry = {
406 |         timestamp: new Date().toISOString(),
407 |         type: "interaction" as const,
408 |         data: {
409 |           message: "Special chars: äöü 🚀 @#$%^&*()[]{}|\\:\";'<>?,./`~",
410 |           unicode: "测试中文字符",
411 |           emoji: "🎉🔥💯⚡🚀",
412 |           json: { nested: { deeply: { value: "test" } } },
413 |         },
414 |         metadata: {
415 |           projectId: "special-chars-项目-🏗️",
416 |           tags: ["special", "unicode", "特殊字符"],
417 |         },
418 |       };
419 | 
420 |       const stored = await storage.append(entry);
421 |       expect(stored.data.message).toContain("Special chars");
422 |       expect(stored.data.unicode).toBe("测试中文字符");
423 |       expect(stored.data.emoji).toBe("🎉🔥💯⚡🚀");
424 |       expect(stored.metadata.projectId).toBe("special-chars-项目-🏗️");
425 |     });
426 | 
427 |     test("should handle empty data gracefully", async () => {
428 |       const entry = {
429 |         timestamp: new Date().toISOString(),
430 |         type: "configuration" as const,
431 |         data: {},
432 |         metadata: { projectId: "empty-test" },
433 |       };
434 | 
435 |       const stored = await storage.append(entry);
436 |       expect(stored.data).toEqual({});
437 |       expect(stored.id).toBeDefined();
438 |       expect(stored.checksum).toBeDefined();
439 |     });
440 | 
441 |     test("should handle missing storage directory", async () => {
442 |       const nonExistentDir = path.join(
443 |         tempDir,
444 |         "non-existent",
445 |         "deeply",
446 |         "nested",
447 |       );
448 |       const newStorage = new JSONLStorage(nonExistentDir);
449 | 
450 |       // Should create directory during initialization
451 |       await newStorage.initialize();
452 | 
453 |       const stats = await fs.stat(nonExistentDir);
454 |       expect(stats.isDirectory()).toBe(true);
455 | 
456 |       // Should be able to append entries
457 |       const entry = {
458 |         timestamp: new Date().toISOString(),
459 |         type: "analysis" as const,
460 |         data: { recovery: true },
461 |         metadata: { projectId: "recovery-test" },
462 |       };
463 | 
464 |       const stored = await newStorage.append(entry);
465 |       expect(stored.data.recovery).toBe(true);
466 |     });
467 | 
468 |     test("should maintain data consistency across operations", async () => {
469 |       const entries = [
470 |         {
471 |           timestamp: new Date().toISOString(),
472 |           type: "analysis" as const,
473 |           data: { step: 1, consistency: "test" },
474 |           metadata: { projectId: "consistency-test" },
475 |         },
476 |         {
477 |           timestamp: new Date().toISOString(),
478 |           type: "recommendation" as const,
479 |           data: { step: 2, consistency: "test" },
480 |           metadata: { projectId: "consistency-test" },
481 |         },
482 |         {
483 |           timestamp: new Date().toISOString(),
484 |           type: "deployment" as const,
485 |           data: { step: 3, consistency: "test" },
486 |           metadata: { projectId: "consistency-test" },
487 |         },
488 |       ];
489 | 
490 |       const storedEntries = [];
491 |       for (const entry of entries) {
492 |         const stored = await storage.append(entry);
493 |         storedEntries.push(stored);
494 |       }
495 | 
496 |       // Verify all entries were stored correctly
497 |       expect(storedEntries).toHaveLength(3);
498 |       storedEntries.forEach((stored, index) => {
499 |         expect(stored.data.step).toBe(index + 1);
500 |         expect(stored.metadata.projectId).toBe("consistency-test");
501 |         expect(stored.id).toBeDefined();
502 |         expect(stored.checksum).toBeDefined();
503 |       });
504 | 
505 |       // All IDs should be unique
506 |       const ids = storedEntries.map((s) => s.id);
507 |       expect(new Set(ids).size).toBe(3);
508 |     });
509 |   });
510 | });
511 | 
```

--------------------------------------------------------------------------------
/src/tools/generate-config.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { promises as fs } from "fs";
  2 | import path from "path";
  3 | import { z } from "zod";
  4 | import { MCPToolResponse, formatMCPResponse } from "../types/api.js";
  5 | 
  6 | const inputSchema = z.object({
  7 |   ssg: z.enum(["jekyll", "hugo", "docusaurus", "mkdocs", "eleventy"]),
  8 |   projectName: z.string(),
  9 |   projectDescription: z.string().optional(),
 10 |   outputPath: z.string(),
 11 | });
 12 | 
 13 | /**
 14 |  * Generates configuration files for a specified static site generator.
 15 |  *
 16 |  * Creates comprehensive configuration files tailored to the chosen SSG, including
 17 |  * build configurations, theme settings, plugin configurations, and deployment
 18 |  * settings. The generated configurations are optimized based on the project
 19 |  * characteristics and follow best practices for each SSG.
 20 |  *
 21 |  * @param args - The input arguments for configuration generation
 22 |  * @param args.ssg - The static site generator to generate configuration for
 23 |  * @param args.projectName - The name of the project for configuration customization
 24 |  * @param args.projectDescription - Optional description for the project
 25 |  * @param args.outputPath - The directory path where configuration files should be written
 26 |  *
 27 |  * @returns Promise resolving to configuration generation results
 28 |  * @returns content - Array containing the generation results in MCP tool response format
 29 |  *
 30 |  * @throws {Error} When the output path is inaccessible or invalid
 31 |  * @throws {Error} When the SSG type is unsupported
 32 |  * @throws {Error} When configuration file generation fails
 33 |  *
 34 |  * @example
 35 |  * ```typescript
 36 |  * // Generate Docusaurus configuration
 37 |  * const result = await generateConfig({
 38 |  *   ssg: "docusaurus",
 39 |  *   projectName: "My Documentation",
 40 |  *   projectDescription: "Comprehensive project documentation",
 41 |  *   outputPath: "./docs"
 42 |  * });
 43 |  *
 44 |  * // Generate Hugo configuration
 45 |  * const hugoConfig = await generateConfig({
 46 |  *   ssg: "hugo",
 47 |  *   projectName: "My Site",
 48 |  *   outputPath: "./site"
 49 |  * });
 50 |  * ```
 51 |  *
 52 |  * @since 1.0.0
 53 |  */
 54 | export async function generateConfig(
 55 |   args: unknown,
 56 | ): Promise<{ content: any[] }> {
 57 |   const startTime = Date.now();
 58 |   const { ssg, projectName, projectDescription, outputPath } =
 59 |     inputSchema.parse(args);
 60 | 
 61 |   try {
 62 |     // Ensure output directory exists
 63 |     await fs.mkdir(outputPath, { recursive: true });
 64 | 
 65 |     let configFiles: Array<{ path: string; content: string }> = [];
 66 | 
 67 |     switch (ssg) {
 68 |       case "docusaurus":
 69 |         configFiles = await generateDocusaurusConfig(
 70 |           projectName,
 71 |           projectDescription || "",
 72 |         );
 73 |         break;
 74 |       case "mkdocs":
 75 |         configFiles = await generateMkDocsConfig(
 76 |           projectName,
 77 |           projectDescription || "",
 78 |         );
 79 |         break;
 80 |       case "hugo":
 81 |         configFiles = await generateHugoConfig(
 82 |           projectName,
 83 |           projectDescription || "",
 84 |         );
 85 |         break;
 86 |       case "jekyll":
 87 |         configFiles = await generateJekyllConfig(
 88 |           projectName,
 89 |           projectDescription || "",
 90 |         );
 91 |         break;
 92 |       case "eleventy":
 93 |         configFiles = await generateEleventyConfig(
 94 |           projectName,
 95 |           projectDescription || "",
 96 |         );
 97 |         break;
 98 |     }
 99 | 
100 |     // Write all config files
101 |     for (const file of configFiles) {
102 |       const filePath = path.join(outputPath, file.path);
103 |       await fs.mkdir(path.dirname(filePath), { recursive: true });
104 |       await fs.writeFile(filePath, file.content);
105 |     }
106 | 
107 |     const configResult = {
108 |       ssg,
109 |       projectName,
110 |       projectDescription,
111 |       outputPath,
112 |       filesCreated: configFiles.map((f) => f.path),
113 |       totalFiles: configFiles.length,
114 |     };
115 | 
116 |     const response: MCPToolResponse<typeof configResult> = {
117 |       success: true,
118 |       data: configResult,
119 |       metadata: {
120 |         toolVersion: "1.0.0",
121 |         executionTime: Date.now() - startTime,
122 |         timestamp: new Date().toISOString(),
123 |       },
124 |       recommendations: [
125 |         {
126 |           type: "info",
127 |           title: "Configuration Complete",
128 |           description: `Generated ${configFiles.length} configuration files for ${ssg}`,
129 |         },
130 |       ],
131 |       nextSteps: [
132 |         {
133 |           action: "Setup Documentation Structure",
134 |           toolRequired: "setup_structure",
135 |           description: `Create Diataxis-compliant documentation structure`,
136 |           priority: "high",
137 |         },
138 |       ],
139 |     };
140 | 
141 |     return formatMCPResponse(response);
142 |   } catch (error) {
143 |     const errorResponse: MCPToolResponse = {
144 |       success: false,
145 |       error: {
146 |         code: "CONFIG_GENERATION_FAILED",
147 |         message: `Failed to generate config: ${error}`,
148 |         resolution: "Ensure output path is writable and SSG type is supported",
149 |       },
150 |       metadata: {
151 |         toolVersion: "1.0.0",
152 |         executionTime: Date.now() - startTime,
153 |         timestamp: new Date().toISOString(),
154 |       },
155 |     };
156 |     return formatMCPResponse(errorResponse);
157 |   }
158 | }
159 | 
160 | async function generateDocusaurusConfig(
161 |   projectName: string,
162 |   projectDescription: string,
163 | ): Promise<Array<{ path: string; content: string }>> {
164 |   return [
165 |     {
166 |       path: "docusaurus.config.js",
167 |       content: `module.exports = {
168 |   title: '${projectName}',
169 |   tagline: '${projectDescription}',
170 |   url: 'https://your-domain.com',
171 |   baseUrl: '/your-repo/',
172 |   onBrokenLinks: 'throw',
173 |   onBrokenMarkdownLinks: 'warn',
174 |   favicon: 'img/favicon.ico',
175 |   organizationName: 'your-org',
176 |   projectName: '${projectName.toLowerCase().replace(/\\s+/g, "-")}',
177 | 
178 |   presets: [
179 |     [
180 |       'classic',
181 |       {
182 |         docs: {
183 |           sidebarPath: require.resolve('./sidebars.js'),
184 |           editUrl: 'https://github.com/your-org/your-repo/tree/main/docs/',
185 |           path: '../docs',
186 |           routeBasePath: '/',
187 |         },
188 |         theme: {
189 |           customCss: require.resolve('./src/css/custom.css'),
190 |         },
191 |         blog: false,
192 |       },
193 |     ],
194 |   ],
195 | 
196 |   themeConfig: {
197 |     navbar: {
198 |       title: '${projectName}',
199 |       items: [
200 |         {
201 |           type: 'doc',
202 |           docId: 'index',
203 |           position: 'left',
204 |           label: 'Documentation',
205 |         },
206 |         {
207 |           href: 'https://github.com/your-org/your-repo',
208 |           label: 'GitHub',
209 |           position: 'right',
210 |         },
211 |       ],
212 |     },
213 |   },
214 | };`,
215 |     },
216 |     {
217 |       path: "package.json",
218 |       content: JSON.stringify(
219 |         {
220 |           name: `${projectName.toLowerCase().replace(/\\s+/g, "-")}-docs`,
221 |           version: "0.0.0",
222 |           private: true,
223 |           scripts: {
224 |             docusaurus: "docusaurus",
225 |             start: "docusaurus start",
226 |             build: "docusaurus build",
227 |             swizzle: "docusaurus swizzle",
228 |             deploy: "docusaurus deploy",
229 |             clear: "docusaurus clear",
230 |             serve: "docusaurus serve --port 3001",
231 |           },
232 |           dependencies: {
233 |             "@docusaurus/core": "^3.0.0",
234 |             "@docusaurus/preset-classic": "^3.0.0",
235 |             "@mdx-js/react": "^3.0.0",
236 |             clsx: "^2.0.0",
237 |             "prism-react-renderer": "^2.1.0",
238 |             react: "^18.0.0",
239 |             "react-dom": "^18.0.0",
240 |           },
241 |           devDependencies: {
242 |             "@docusaurus/types": "^3.0.0",
243 |           },
244 |         },
245 |         null,
246 |         2,
247 |       ),
248 |     },
249 |     {
250 |       path: "sidebars.js",
251 |       content: `/**
252 |  * Creating a sidebar enables you to:
253 |  - create an ordered group of docs
254 |  - render a sidebar for each doc of that group
255 |  - provide next/previous navigation
256 | 
257 |  The sidebars can be generated from the filesystem, or explicitly defined here.
258 | 
259 |  Create as many sidebars as you want.
260 |  */
261 | 
262 | // @ts-check
263 | 
264 | /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */
265 | const sidebars = {
266 |   // Main documentation sidebar
267 |   docs: [
268 |     'index',
269 |     {
270 |       type: 'category',
271 |       label: 'Tutorials',
272 |       items: [
273 |         'tutorials/getting-started',
274 |       ],
275 |     },
276 |     {
277 |       type: 'category',
278 |       label: 'How-to Guides',
279 |       items: [
280 |         'how-to/prompting-guide',
281 |       ],
282 |     },
283 |     {
284 |       type: 'category',
285 |       label: 'Reference',
286 |       items: [
287 |         'reference/mcp-tools',
288 |       ],
289 |     },
290 |     {
291 |       type: 'category',
292 |       label: 'Explanation',
293 |       items: [
294 |         'explanation/architecture',
295 |       ],
296 |     },
297 |   ],
298 | };
299 | 
300 | module.exports = sidebars;`,
301 |     },
302 |     {
303 |       path: "src/css/custom.css",
304 |       content: `/**
305 |  * Any CSS included here will be global. The classic template
306 |  * bundles Infima by default. Infima is a CSS framework designed to
307 |  * work well for content-centric websites.
308 |  */
309 | 
310 | /* You can override the default Infima variables here. */
311 | :root {
312 |   --ifm-color-primary: #2e8555;
313 |   --ifm-color-primary-dark: #29784c;
314 |   --ifm-color-primary-darker: #277148;
315 |   --ifm-color-primary-darkest: #205d3b;
316 |   --ifm-color-primary-light: #33925d;
317 |   --ifm-color-primary-lighter: #359962;
318 |   --ifm-color-primary-lightest: #3cad6e;
319 |   --ifm-code-font-size: 95%;
320 |   --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1);
321 | }
322 | 
323 | /* For readability concerns, you should choose a lighter palette in dark mode. */
324 | [data-theme='dark'] {
325 |   --ifm-color-primary: #25c2a0;
326 |   --ifm-color-primary-dark: #21af90;
327 |   --ifm-color-primary-darker: #1fa588;
328 |   --ifm-color-primary-darkest: #1a8870;
329 |   --ifm-color-primary-light: #29d5b0;
330 |   --ifm-color-primary-lighter: #32d8b4;
331 |   --ifm-color-primary-lightest: #4fddbf;
332 |   --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
333 | }`,
334 |     },
335 |     {
336 |       path: "Dockerfile.docs",
337 |       content: `# Documentation testing container
338 | # Generated by DocuMCP
339 | FROM node:20-alpine
340 | 
341 | WORKDIR /app
342 | 
343 | # Copy package files
344 | COPY docs-site/package*.json ./docs-site/
345 | COPY docs-site/docusaurus.config.js ./docs-site/
346 | COPY docs-site/sidebars.js ./docs-site/
347 | COPY docs-site/src ./docs-site/src/
348 | 
349 | # Copy documentation source
350 | COPY docs ./docs/
351 | 
352 | # Install dependencies
353 | RUN cd docs-site && npm install
354 | 
355 | # Build documentation
356 | RUN cd docs-site && npm run build
357 | 
358 | # Expose port
359 | EXPOSE 3001
360 | 
361 | # Health check
362 | HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \\
363 |   CMD curl -f http://localhost:3001/ || exit 1
364 | 
365 | # Start server
366 | CMD ["sh", "-c", "cd docs-site && npm run serve"]`,
367 |     },
368 |     {
369 |       path: "test-docs-local.sh",
370 |       content: `#!/bin/bash
371 | # Containerized documentation testing script
372 | # Generated by DocuMCP
373 | 
374 | set -e
375 | 
376 | # Detect container runtime
377 | if command -v podman &> /dev/null; then
378 |     CONTAINER_CMD="podman"
379 | elif command -v docker &> /dev/null; then
380 |     CONTAINER_CMD="docker"
381 | else
382 |     echo "❌ Neither Podman nor Docker found. Please install one of them."
383 |     echo "📖 Podman: https://podman.io/getting-started/installation"
384 |     echo "📖 Docker: https://docs.docker.com/get-docker/"
385 |     exit 1
386 | fi
387 | 
388 | echo "🔧 Using $CONTAINER_CMD for containerized documentation testing..."
389 | 
390 | # Build the documentation container
391 | echo "📦 Building documentation container..."
392 | $CONTAINER_CMD build -f Dockerfile.docs -t documcp-docs .
393 | 
394 | if [ $? -ne 0 ]; then
395 |     echo "❌ Container build failed!"
396 |     exit 1
397 | fi
398 | 
399 | echo "✅ Container build successful!"
400 | 
401 | # Run link checking outside container (faster)
402 | echo "🔗 Checking for broken links..."
403 | if command -v markdown-link-check &> /dev/null; then
404 |     find docs -name "*.md" -exec markdown-link-check {} \\;
405 | else
406 |     echo "⚠️  markdown-link-check not found. Install with: npm install -g markdown-link-check"
407 | fi
408 | 
409 | # Start the container
410 | echo ""
411 | echo "🚀 Starting documentation server in container..."
412 | echo "📖 Documentation will be available at: http://localhost:3001"
413 | echo "💡 Press Ctrl+C to stop the server"
414 | echo ""
415 | 
416 | # Run container with port mapping and cleanup
417 | $CONTAINER_CMD run --rm -p 3001:3001 --name documcp-docs-test documcp-docs`,
418 |     },
419 |     {
420 |       path: "docker-compose.docs.yml",
421 |       content: `# Docker Compose for documentation testing
422 | # Generated by DocuMCP
423 | version: '3.8'
424 | 
425 | services:
426 |   docs:
427 |     build:
428 |       context: .
429 |       dockerfile: Dockerfile.docs
430 |     ports:
431 |       - "3001:3001"
432 |     container_name: documcp-docs
433 |     healthcheck:
434 |       test: ["CMD", "curl", "-f", "http://localhost:3001/"]
435 |       interval: 30s
436 |       timeout: 10s
437 |       retries: 3
438 |     volumes:
439 |       # Mount docs directory for live editing (optional)
440 |       - ./docs:/app/docs:ro
441 |     environment:
442 |       - NODE_ENV=production`,
443 |     },
444 |     {
445 |       path: ".dockerignore",
446 |       content: `# Documentation container ignore file
447 | # Generated by DocuMCP
448 | 
449 | # Node modules (will be installed in container)
450 | node_modules/
451 | docs-site/node_modules/
452 | docs-site/.docusaurus/
453 | docs-site/build/
454 | 
455 | # Git files
456 | .git/
457 | .gitignore
458 | 
459 | # Development files
460 | .env*
461 | *.log
462 | npm-debug.log*
463 | 
464 | # OS files
465 | .DS_Store
466 | Thumbs.db
467 | 
468 | # IDE files
469 | .vscode/
470 | .idea/
471 | *.swp
472 | *.swo
473 | 
474 | # Build artifacts
475 | dist/
476 | build/
477 | *.tgz
478 | 
479 | # Test files
480 | coverage/
481 | .nyc_output/
482 | 
483 | # Documentation build (will be generated)
484 | docs-site/build/`,
485 |     },
486 |   ];
487 | }
488 | 
489 | async function generateMkDocsConfig(
490 |   projectName: string,
491 |   projectDescription: string,
492 | ): Promise<Array<{ path: string; content: string }>> {
493 |   return [
494 |     {
495 |       path: "mkdocs.yml",
496 |       content: `site_name: ${projectName}
497 | site_description: ${projectDescription}
498 | site_url: https://your-domain.com
499 | 
500 | theme:
501 |   name: material
502 |   features:
503 |     - navigation.tabs
504 |     - navigation.sections
505 |     - navigation.expand
506 |     - navigation.top
507 |     - search.suggest
508 |     - search.highlight
509 |   palette:
510 |     - scheme: default
511 |       primary: indigo
512 |       accent: indigo
513 |       toggle:
514 |         icon: material/brightness-7
515 |         name: Switch to dark mode
516 |     - scheme: slate
517 |       primary: indigo
518 |       accent: indigo
519 |       toggle:
520 |         icon: material/brightness-4
521 |         name: Switch to light mode
522 | 
523 | plugins:
524 |   - search
525 |   - mermaid2
526 | 
527 | markdown_extensions:
528 |   - pymdownx.highlight
529 |   - pymdownx.superfences
530 |   - pymdownx.tabbed
531 |   - pymdownx.details
532 |   - admonition
533 |   - toc:
534 |       permalink: true
535 | 
536 | nav:
537 |   - Home: index.md
538 |   - Tutorials:
539 |     - Getting Started: tutorials/getting-started.md
540 |   - How-To Guides:
541 |     - Installation: how-to/installation.md
542 |   - Reference:
543 |     - API: reference/api.md
544 |   - Explanation:
545 |     - Architecture: explanation/architecture.md`,
546 |     },
547 |     {
548 |       path: "requirements.txt",
549 |       content: `mkdocs>=1.5.0
550 | mkdocs-material>=9.0.0
551 | mkdocs-mermaid2-plugin>=1.0.0`,
552 |     },
553 |   ];
554 | }
555 | 
556 | async function generateHugoConfig(
557 |   projectName: string,
558 |   projectDescription: string,
559 | ): Promise<Array<{ path: string; content: string }>> {
560 |   return [
561 |     {
562 |       path: "hugo.toml",
563 |       content: `baseURL = 'https://your-domain.com/'
564 | languageCode = 'en-us'
565 | title = '${projectName}'
566 | 
567 | [params]
568 |   description = '${projectDescription}'
569 | 
570 | [[menu.main]]
571 |   name = 'Tutorials'
572 |   url = '/tutorials/'
573 |   weight = 10
574 | 
575 | [[menu.main]]
576 |   name = 'How-To'
577 |   url = '/how-to/'
578 |   weight = 20
579 | 
580 | [[menu.main]]
581 |   name = 'Reference'
582 |   url = '/reference/'
583 |   weight = 30
584 | 
585 | [[menu.main]]
586 |   name = 'Explanation'
587 |   url = '/explanation/'
588 |   weight = 40`,
589 |     },
590 |   ];
591 | }
592 | 
593 | async function generateJekyllConfig(
594 |   projectName: string,
595 |   projectDescription: string,
596 | ): Promise<Array<{ path: string; content: string }>> {
597 |   return [
598 |     {
599 |       path: "_config.yml",
600 |       content: `title: ${projectName}
601 | description: ${projectDescription}
602 | baseurl: ""
603 | url: "https://your-domain.com"
604 | 
605 | theme: minima
606 | 
607 | plugins:
608 |   - jekyll-feed
609 |   - jekyll-seo-tag
610 |   - jekyll-sitemap
611 | 
612 | collections:
613 |   tutorials:
614 |     output: true
615 |     permalink: /tutorials/:name
616 |   how-to:
617 |     output: true
618 |     permalink: /how-to/:name
619 |   reference:
620 |     output: true
621 |     permalink: /reference/:name
622 |   explanation:
623 |     output: true
624 |     permalink: /explanation/:name`,
625 |     },
626 |     {
627 |       path: "Gemfile",
628 |       content: `source "https://rubygems.org"
629 | 
630 | gem "jekyll", "~> 4.3"
631 | gem "minima", "~> 2.5"
632 | 
633 | group :jekyll_plugins do
634 |   gem "jekyll-feed", "~> 0.12"
635 |   gem "jekyll-seo-tag", "~> 2.8"
636 |   gem "jekyll-sitemap", "~> 1.4"
637 | end`,
638 |     },
639 |   ];
640 | }
641 | 
642 | async function generateEleventyConfig(
643 |   projectName: string,
644 |   projectDescription: string,
645 | ): Promise<Array<{ path: string; content: string }>> {
646 |   return [
647 |     {
648 |       path: ".eleventy.js",
649 |       content: `module.exports = function(eleventyConfig) {
650 |   eleventyConfig.addPassthroughCopy("css");
651 | 
652 |   return {
653 |     dir: {
654 |       input: "src",
655 |       output: "_site",
656 |       includes: "_includes",
657 |       layouts: "_layouts"
658 |     },
659 |     templateFormats: ["md", "njk", "html"],
660 |     markdownTemplateEngine: "njk",
661 |     htmlTemplateEngine: "njk"
662 |   };
663 | };`,
664 |     },
665 |     {
666 |       path: "package.json",
667 |       content: JSON.stringify(
668 |         {
669 |           name: projectName.toLowerCase().replace(/\\s+/g, "-"),
670 |           version: "1.0.0",
671 |           description: projectDescription,
672 |           scripts: {
673 |             build: "eleventy",
674 |             serve: "eleventy --serve",
675 |             debug: "DEBUG=* eleventy",
676 |           },
677 |           devDependencies: {
678 |             "@11ty/eleventy": "^2.0.0",
679 |           },
680 |         },
681 |         null,
682 |         2,
683 |       ),
684 |     },
685 |   ];
686 | }
687 | 
```

--------------------------------------------------------------------------------
/tests/tools/deploy-pages-kg-retrieval.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | /**
  2 |  * Tests for deploy-pages.ts getSSGFromKnowledgeGraph function
  3 |  * Covers uncovered branches in lines 53-110, 294-305, 549-581
  4 |  */
  5 | 
  6 | import { describe, it, expect, beforeEach, afterEach } from "@jest/globals";
  7 | import { promises as fs } from "fs";
  8 | import { join } from "path";
  9 | import { tmpdir } from "os";
 10 | import {
 11 |   initializeKnowledgeGraph,
 12 |   getKnowledgeGraph,
 13 |   createOrUpdateProject,
 14 |   trackDeployment,
 15 | } from "../../src/memory/kg-integration.js";
 16 | import { deployPages } from "../../src/tools/deploy-pages.js";
 17 | import { clearPreferenceManagerCache } from "../../src/memory/user-preferences.js";
 18 | 
 19 | describe("deployPages - getSSGFromKnowledgeGraph Coverage", () => {
 20 |   let testDir: string;
 21 |   let originalEnv: string | undefined;
 22 | 
 23 |   beforeEach(async () => {
 24 |     testDir = join(tmpdir(), `deploy-kg-retrieval-test-${Date.now()}`);
 25 |     await fs.mkdir(testDir, { recursive: true });
 26 | 
 27 |     originalEnv = process.env.DOCUMCP_STORAGE_DIR;
 28 |     process.env.DOCUMCP_STORAGE_DIR = testDir;
 29 | 
 30 |     await initializeKnowledgeGraph(testDir);
 31 |     clearPreferenceManagerCache();
 32 |   });
 33 | 
 34 |   afterEach(async () => {
 35 |     if (originalEnv) {
 36 |       process.env.DOCUMCP_STORAGE_DIR = originalEnv;
 37 |     } else {
 38 |       delete process.env.DOCUMCP_STORAGE_DIR;
 39 |     }
 40 | 
 41 |     try {
 42 |       await fs.rm(testDir, { recursive: true, force: true });
 43 |     } catch (error) {
 44 |       // Ignore cleanup errors
 45 |     }
 46 | 
 47 |     clearPreferenceManagerCache();
 48 |   });
 49 | 
 50 |   describe("SSG Retrieval from Knowledge Graph", () => {
 51 |     it("should return null when project node not found (line 62-64)", async () => {
 52 |       // Test the path where projectNode is null
 53 |       const result = await deployPages({
 54 |         repository: testDir,
 55 |         analysisId: "non-existent-analysis-id",
 56 |         projectPath: testDir,
 57 |         projectName: "Test",
 58 |       });
 59 | 
 60 |       const content = result.content[0];
 61 |       const data = JSON.parse(content.text);
 62 | 
 63 |       // Should fail because no SSG was found and none was provided
 64 |       expect(data.success).toBe(false);
 65 |       expect(data.error.code).toBe("SSG_NOT_SPECIFIED");
 66 |     });
 67 | 
 68 |     // TODO: Fix - getDeploymentRecommendations doesn't work with manually created KG nodes
 69 |     it.skip("should sort deployment recommendations by confidence (lines 69-75)", async () => {
 70 |       const kg = await getKnowledgeGraph();
 71 |       const analysisId = "test-analysis-multi-recommendations";
 72 | 
 73 |       // Create a project with multiple SSG recommendations
 74 |       const projectNode = await kg.addNode({
 75 |         id: `project:${analysisId}`,
 76 |         type: "project",
 77 |         label: "Multi-SSG Project",
 78 |         properties: { id: analysisId, name: "Multi-SSG Project" },
 79 |         weight: 1.0,
 80 |       });
 81 | 
 82 |       // Add multiple configuration nodes with different confidence levels
 83 |       const config1 = await kg.addNode({
 84 |         id: "config:jekyll",
 85 |         type: "configuration",
 86 |         label: "Jekyll Config",
 87 |         properties: { ssg: "jekyll", confidence: 0.5 },
 88 |         weight: 1.0,
 89 |       });
 90 | 
 91 |       const config2 = await kg.addNode({
 92 |         id: "config:hugo",
 93 |         type: "configuration",
 94 |         label: "Hugo Config",
 95 |         properties: { ssg: "hugo", confidence: 0.9 },
 96 |         weight: 1.0,
 97 |       });
 98 | 
 99 |       const config3 = await kg.addNode({
100 |         id: "config:docusaurus",
101 |         type: "configuration",
102 |         label: "Docusaurus Config",
103 |         properties: { ssg: "docusaurus", confidence: 0.7 },
104 |         weight: 1.0,
105 |       });
106 | 
107 |       // Add recommendation edges
108 |       await kg.addEdge({
109 |         source: projectNode.id,
110 |         target: config1.id,
111 |         type: "recommends",
112 |         properties: { confidence: 0.5 },
113 |         weight: 1.0,
114 |         confidence: 0.5,
115 |       });
116 | 
117 |       await kg.addEdge({
118 |         source: projectNode.id,
119 |         target: config2.id,
120 |         type: "recommends",
121 |         properties: { confidence: 0.9 },
122 |         weight: 1.0,
123 |         confidence: 0.9,
124 |       });
125 | 
126 |       await kg.addEdge({
127 |         source: projectNode.id,
128 |         target: config3.id,
129 |         type: "recommends",
130 |         properties: { confidence: 0.7 },
131 |         weight: 1.0,
132 |         confidence: 0.7,
133 |       });
134 | 
135 |       // Deploy without specifying SSG - should pick Hugo (highest confidence)
136 |       const result = await deployPages({
137 |         repository: testDir,
138 |         analysisId,
139 |         projectPath: testDir,
140 |         projectName: "Test",
141 |       });
142 | 
143 |       const content = result.content[0];
144 |       const data = JSON.parse(content.text);
145 | 
146 |       expect(data.success).toBe(true);
147 |       expect(data.ssg).toBe("hugo"); // Highest confidence
148 |     });
149 | 
150 |     // TODO: Fix - trackDeployment creates different KG structure than manual nodes
151 |     it.skip("should retrieve SSG from successful deployment history (lines 86-105)", async () => {
152 |       const kg = await getKnowledgeGraph();
153 |       const analysisId = "test-analysis-deployment-history";
154 | 
155 |       // Create a project
156 |       const project = await createOrUpdateProject({
157 |         id: analysisId,
158 |         timestamp: new Date().toISOString(),
159 |         path: testDir,
160 |         projectName: "History Project",
161 |         structure: {
162 |           totalFiles: 10,
163 |           languages: { typescript: 5, javascript: 5 },
164 |           hasTests: true,
165 |           hasCI: false,
166 |           hasDocs: false,
167 |         },
168 |       });
169 | 
170 |       // Track successful deployment with jekyll
171 |       await trackDeployment(project.id, "jekyll", true, {
172 |         buildTime: 5000,
173 |       });
174 | 
175 |       // Now deploy without SSG - should retrieve jekyll from history
176 |       const result = await deployPages({
177 |         repository: testDir,
178 |         analysisId,
179 |         projectPath: testDir,
180 |         projectName: "History Project",
181 |       });
182 | 
183 |       const content = result.content[0];
184 |       const data = JSON.parse(content.text);
185 | 
186 |       expect(data.success).toBe(true);
187 |       expect(data.ssg).toBe("jekyll"); // Retrieved from history
188 |     });
189 | 
190 |     // TODO: Fix - trackDeployment creates different KG structure than manual nodes
191 |     it.skip("should retrieve most recent successful deployment (lines 93-103)", async () => {
192 |       const kg = await getKnowledgeGraph();
193 |       const analysisId = "test-analysis-multiple-deployments";
194 | 
195 |       // Create a project
196 |       const project = await createOrUpdateProject({
197 |         id: analysisId,
198 |         timestamp: new Date().toISOString(),
199 |         path: testDir,
200 |         projectName: "Multi-Deploy Project",
201 |         structure: {
202 |           totalFiles: 10,
203 |           languages: { typescript: 10 },
204 |           hasTests: true,
205 |           hasCI: false,
206 |           hasDocs: false,
207 |         },
208 |       });
209 | 
210 |       // Track multiple successful deployments at different times
211 |       await trackDeployment(project.id, "jekyll", true, {
212 |         buildTime: 5000,
213 |       });
214 | 
215 |       // Wait a bit to ensure different timestamps
216 |       await new Promise((resolve) => setTimeout(resolve, 10));
217 | 
218 |       await trackDeployment(project.id, "hugo", true, {
219 |         buildTime: 6000,
220 |       });
221 | 
222 |       await new Promise((resolve) => setTimeout(resolve, 10));
223 | 
224 |       await trackDeployment(project.id, "docusaurus", true, {
225 |         buildTime: 7000,
226 |       });
227 | 
228 |       // Should retrieve the most recent (docusaurus)
229 |       const result = await deployPages({
230 |         repository: testDir,
231 |         analysisId,
232 |         projectPath: testDir,
233 |         projectName: "Multi-Deploy Project",
234 |       });
235 | 
236 |       const content = result.content[0];
237 |       const data = JSON.parse(content.text);
238 | 
239 |       expect(data.success).toBe(true);
240 |       expect(data.ssg).toBe("docusaurus"); // Most recent
241 |     });
242 | 
243 |     // TODO: Fix - trackDeployment creates different KG structure than manual nodes
244 |     it.skip("should skip failed deployments and use successful ones (line 89)", async () => {
245 |       const kg = await getKnowledgeGraph();
246 |       const analysisId = "test-analysis-mixed-deployments";
247 | 
248 |       // Create a project
249 |       const project = await createOrUpdateProject({
250 |         id: analysisId,
251 |         timestamp: new Date().toISOString(),
252 |         path: testDir,
253 |         projectName: "Mixed Deploy Project",
254 |         structure: {
255 |           totalFiles: 10,
256 |           languages: { typescript: 10 },
257 |           hasTests: true,
258 |           hasCI: false,
259 |           hasDocs: false,
260 |         },
261 |       });
262 | 
263 |       // Track failed deployment
264 |       await trackDeployment(project.id, "jekyll", false, {
265 |         errorMessage: "Build failed",
266 |       });
267 | 
268 |       // Track successful deployment
269 |       await trackDeployment(project.id, "hugo", true, {
270 |         buildTime: 5000,
271 |       });
272 | 
273 |       // Should retrieve hugo (successful) not jekyll (failed)
274 |       const result = await deployPages({
275 |         repository: testDir,
276 |         analysisId,
277 |         projectPath: testDir,
278 |         projectName: "Mixed Deploy Project",
279 |       });
280 | 
281 |       const content = result.content[0];
282 |       const data = JSON.parse(content.text);
283 | 
284 |       expect(data.success).toBe(true);
285 |       expect(data.ssg).toBe("hugo"); // Only successful deployment
286 |     });
287 | 
288 |     // TODO: Fix - trackDeployment creates different KG structure than manual nodes
289 |     it.skip("should use provided SSG even when analysisId exists (line 307-309)", async () => {
290 |       const analysisId = "test-analysis-explicit-ssg";
291 | 
292 |       // Create a project with jekyll
293 |       const project = await createOrUpdateProject({
294 |         id: analysisId,
295 |         timestamp: new Date().toISOString(),
296 |         path: testDir,
297 |         projectName: "Explicit SSG Project",
298 |         structure: {
299 |           totalFiles: 10,
300 |           languages: { typescript: 10 },
301 |           hasTests: true,
302 |           hasCI: false,
303 |           hasDocs: false,
304 |         },
305 |       });
306 | 
307 |       await trackDeployment(project.id, "jekyll", true, {
308 |         buildTime: 5000,
309 |       });
310 | 
311 |       // Explicitly provide hugo - should use hugo not jekyll
312 |       const result = await deployPages({
313 |         repository: testDir,
314 |         ssg: "hugo",
315 |         analysisId,
316 |         projectPath: testDir,
317 |         projectName: "Explicit SSG Project",
318 |       });
319 | 
320 |       const content = result.content[0];
321 |       const data = JSON.parse(content.text);
322 | 
323 |       expect(data.success).toBe(true);
324 |       expect(data.ssg).toBe("hugo"); // Explicitly provided, not from KG
325 |     });
326 |   });
327 | 
328 |   describe("Error Tracking in Catch Block (lines 549-581)", () => {
329 |     it("should track failed deployment in catch block when projectPath provided", async () => {
330 |       // Create invalid path to trigger error during workflow generation
331 |       const invalidPath = "/invalid/path/cannot/create";
332 | 
333 |       const result = await deployPages({
334 |         repository: invalidPath,
335 |         ssg: "jekyll",
336 |         projectPath: testDir,
337 |         projectName: "Failed Project",
338 |         userId: "test-user-error",
339 |       });
340 | 
341 |       const content = result.content[0];
342 |       const data = JSON.parse(content.text);
343 | 
344 |       expect(data.success).toBe(false);
345 |       expect(data.error.code).toBe("DEPLOYMENT_SETUP_FAILED");
346 | 
347 |       // Verify that failure was tracked in KG
348 |       const kg = await getKnowledgeGraph();
349 |       const edges = await kg.findEdges({
350 |         properties: { baseType: "project_deployed_with" },
351 |       });
352 | 
353 |       // Should have tracked the failure
354 |       const failedDeployments = edges.filter(
355 |         (e) => e.properties.success === false,
356 |       );
357 |       expect(failedDeployments.length).toBeGreaterThan(0);
358 |     });
359 | 
360 |     it("should track user preference for failed deployment (lines 571-578)", async () => {
361 |       const invalidPath = "/invalid/path/cannot/create";
362 |       const userId = "test-user-failed-tracking";
363 | 
364 |       const result = await deployPages({
365 |         repository: invalidPath,
366 |         ssg: "mkdocs",
367 |         projectPath: testDir,
368 |         projectName: "Failed MkDocs",
369 |         userId,
370 |       });
371 | 
372 |       const content = result.content[0];
373 |       const data = JSON.parse(content.text);
374 | 
375 |       expect(data.success).toBe(false);
376 | 
377 |       // User preferences should still be tracked (with failure)
378 |       // This tests the path through lines 571-578
379 |     });
380 | 
381 |     it("should handle tracking error gracefully (line 580-582)", async () => {
382 |       // Set an invalid storage dir to cause tracking to fail
383 |       const originalDir = process.env.DOCUMCP_STORAGE_DIR;
384 |       process.env.DOCUMCP_STORAGE_DIR = "/completely/invalid/path/for/storage";
385 | 
386 |       const invalidPath = "/invalid/path/cannot/create";
387 | 
388 |       const result = await deployPages({
389 |         repository: invalidPath,
390 |         ssg: "hugo",
391 |         projectPath: testDir,
392 |         projectName: "Tracking Error Test",
393 |         userId: "test-user-tracking-error",
394 |       });
395 | 
396 |       // Restore original dir
397 |       process.env.DOCUMCP_STORAGE_DIR = originalDir;
398 | 
399 |       const content = result.content[0];
400 |       const data = JSON.parse(content.text);
401 | 
402 |       // Should still return error response even if tracking fails
403 |       expect(data.success).toBe(false);
404 |       expect(data.error.code).toBe("DEPLOYMENT_SETUP_FAILED");
405 |     });
406 | 
407 |     it("should not track when ssg is unknown in error path (line 548)", async () => {
408 |       const kg = await getKnowledgeGraph();
409 | 
410 |       // Get initial count of deployments
411 |       const beforeEdges = await kg.findEdges({
412 |         properties: { baseType: "project_deployed_with" },
413 |       });
414 |       const beforeCount = beforeEdges.length;
415 | 
416 |       // Trigger error without SSG or analysisId
417 |       const result = await deployPages({
418 |         repository: "/invalid/path",
419 |         projectPath: testDir,
420 |         projectName: "No SSG Error",
421 |       });
422 | 
423 |       const content = result.content[0];
424 |       const data = JSON.parse(content.text);
425 | 
426 |       expect(data.success).toBe(false);
427 | 
428 |       // Should not have created new deployment tracking (no SSG available)
429 |       const afterEdges = await kg.findEdges({
430 |         properties: { baseType: "project_deployed_with" },
431 |       });
432 |       const afterCount = afterEdges.length;
433 | 
434 |       expect(afterCount).toBe(beforeCount); // No new deployment tracked
435 |     });
436 |   });
437 | 
438 |   describe("SSG Retrieval Edge Cases", () => {
439 |     it("should handle knowledge graph query errors gracefully (line 108-110)", async () => {
440 |       // Create a corrupt scenario by setting invalid storage
441 |       const invalidDir = "/completely/invalid/kg/path";
442 |       process.env.DOCUMCP_STORAGE_DIR = invalidDir;
443 | 
444 |       const result = await deployPages({
445 |         repository: testDir,
446 |         analysisId: "some-analysis-id",
447 |         projectPath: testDir,
448 |         projectName: "KG Error Test",
449 |       });
450 | 
451 |       // Restore to valid directory
452 |       process.env.DOCUMCP_STORAGE_DIR = testDir;
453 | 
454 |       const content = result.content[0];
455 |       const data = JSON.parse(content.text);
456 | 
457 |       // Should fail gracefully - unable to find SSG
458 |       expect(data.success).toBe(false);
459 |       expect(data.error.code).toBe("SSG_NOT_SPECIFIED");
460 |     });
461 | 
462 |     it("should handle empty deployment recommendations (line 69)", async () => {
463 |       const kg = await getKnowledgeGraph();
464 |       const analysisId = "test-analysis-no-recommendations";
465 | 
466 |       // Create project but no recommendations
467 |       await kg.addNode({
468 |         id: `project:${analysisId}`,
469 |         type: "project",
470 |         label: "No Recs Project",
471 |         properties: { id: analysisId },
472 |         weight: 1.0,
473 |       });
474 | 
475 |       const result = await deployPages({
476 |         repository: testDir,
477 |         analysisId,
478 |         projectPath: testDir,
479 |         projectName: "No Recs",
480 |       });
481 | 
482 |       const content = result.content[0];
483 |       const data = JSON.parse(content.text);
484 | 
485 |       // Should fail - no SSG found
486 |       expect(data.success).toBe(false);
487 |       expect(data.error.code).toBe("SSG_NOT_SPECIFIED");
488 |     });
489 | 
490 |     it("should handle no successful deployments in history (line 92)", async () => {
491 |       const kg = await getKnowledgeGraph();
492 |       const analysisId = "test-analysis-all-failed";
493 | 
494 |       // Create project with only failed deployments
495 |       const project = await createOrUpdateProject({
496 |         id: analysisId,
497 |         timestamp: new Date().toISOString(),
498 |         path: testDir,
499 |         projectName: "All Failed Project",
500 |         structure: {
501 |           totalFiles: 10,
502 |           languages: { typescript: 10 },
503 |           hasTests: false,
504 |           hasCI: false,
505 |           hasDocs: false,
506 |         },
507 |       });
508 | 
509 |       // Only track failed deployments
510 |       await trackDeployment(project.id, "jekyll", false, {
511 |         errorMessage: "Failed 1",
512 |       });
513 |       await trackDeployment(project.id, "hugo", false, {
514 |         errorMessage: "Failed 2",
515 |       });
516 | 
517 |       const result = await deployPages({
518 |         repository: testDir,
519 |         analysisId,
520 |         projectPath: testDir,
521 |         projectName: "All Failed Project",
522 |       });
523 | 
524 |       const content = result.content[0];
525 |       const data = JSON.parse(content.text);
526 | 
527 |       // Should fail - no successful SSG found
528 |       expect(data.success).toBe(false);
529 |       expect(data.error.code).toBe("SSG_NOT_SPECIFIED");
530 |     });
531 |   });
532 | });
533 | 
```

--------------------------------------------------------------------------------
/src/tools/sync-code-to-docs.ts:
--------------------------------------------------------------------------------

```typescript
  1 | /**
  2 |  * Code-to-Documentation Synchronization Tool (Phase 3)
  3 |  *
  4 |  * MCP tool for automatic documentation synchronization
  5 |  * Detects drift and applies/suggests updates
  6 |  */
  7 | 
  8 | import { Tool } from "@modelcontextprotocol/sdk/types.js";
  9 | import { z } from "zod";
 10 | import { promises as fs } from "fs";
 11 | import path from "path";
 12 | import {
 13 |   DriftDetector,
 14 |   DriftDetectionResult,
 15 |   DriftSuggestion,
 16 | } from "../utils/drift-detector.js";
 17 | import { formatMCPResponse, MCPToolResponse } from "../types/api.js";
 18 | import { getKnowledgeGraph } from "../memory/kg-integration.js";
 19 | import { updateDocFrontmatter } from "../utils/freshness-tracker.js";
 20 | import { simpleGit } from "simple-git";
 21 | 
 22 | const inputSchema = z.object({
 23 |   projectPath: z.string().describe("Path to the project root"),
 24 |   docsPath: z.string().describe("Path to the documentation directory"),
 25 |   mode: z
 26 |     .enum(["detect", "preview", "apply", "auto"])
 27 |     .default("detect")
 28 |     .describe(
 29 |       "Mode: detect=analyze only, preview=show changes, apply=apply safe changes, auto=apply all changes",
 30 |     ),
 31 |   autoApplyThreshold: z
 32 |     .number()
 33 |     .min(0)
 34 |     .max(1)
 35 |     .default(0.8)
 36 |     .describe("Confidence threshold for automatic application (0-1)"),
 37 |   createSnapshot: z
 38 |     .boolean()
 39 |     .default(true)
 40 |     .describe("Create a snapshot before making changes"),
 41 | });
 42 | 
 43 | type SyncMode = "detect" | "preview" | "apply" | "auto";
 44 | 
 45 | export interface SyncResult {
 46 |   mode: SyncMode;
 47 |   driftDetections: DriftDetectionResult[];
 48 |   appliedChanges: AppliedChange[];
 49 |   pendingChanges: PendingSuggestion[];
 50 |   stats: SyncStats;
 51 |   snapshotId?: string;
 52 | }
 53 | 
 54 | export interface AppliedChange {
 55 |   docFile: string;
 56 |   section: string;
 57 |   changeType: "updated" | "added" | "removed";
 58 |   confidence: number;
 59 |   details: string;
 60 | }
 61 | 
 62 | export interface PendingSuggestion {
 63 |   docFile: string;
 64 |   section: string;
 65 |   reason: string;
 66 |   suggestion: DriftSuggestion;
 67 |   requiresReview: boolean;
 68 | }
 69 | 
 70 | export interface SyncStats {
 71 |   filesAnalyzed: number;
 72 |   driftsDetected: number;
 73 |   changesApplied: number;
 74 |   changesPending: number;
 75 |   breakingChanges: number;
 76 |   estimatedUpdateTime: string;
 77 | }
 78 | 
 79 | /**
 80 |  * Main synchronization handler
 81 |  */
 82 | export async function handleSyncCodeToDocs(
 83 |   args: unknown,
 84 |   context?: any,
 85 | ): Promise<{ content: any[] }> {
 86 |   const startTime = Date.now();
 87 | 
 88 |   try {
 89 |     const { projectPath, docsPath, mode, autoApplyThreshold, createSnapshot } =
 90 |       inputSchema.parse(args);
 91 | 
 92 |     await context?.info?.(
 93 |       `🔄 Starting code-to-documentation synchronization (mode: ${mode})...`,
 94 |     );
 95 | 
 96 |     // Initialize drift detector
 97 |     const detector = new DriftDetector(projectPath);
 98 |     await detector.initialize();
 99 | 
100 |     // Create baseline snapshot if requested
101 |     if (createSnapshot || mode !== "detect") {
102 |       await context?.info?.("📸 Creating code snapshot...");
103 |       await detector.createSnapshot(projectPath, docsPath);
104 |     }
105 | 
106 |     // Load previous snapshot for comparison
107 |     await context?.info?.("🔍 Detecting documentation drift...");
108 |     const previousSnapshot = await detector.loadLatestSnapshot();
109 | 
110 |     if (!previousSnapshot) {
111 |       await context?.info?.(
112 |         "ℹ️ No previous snapshot found. Creating baseline...",
113 |       );
114 |       const baselineSnapshot = await detector.createSnapshot(
115 |         projectPath,
116 |         docsPath,
117 |       );
118 | 
119 |       const result: SyncResult = {
120 |         mode,
121 |         driftDetections: [],
122 |         appliedChanges: [],
123 |         pendingChanges: [],
124 |         stats: {
125 |           filesAnalyzed: baselineSnapshot.files.size,
126 |           driftsDetected: 0,
127 |           changesApplied: 0,
128 |           changesPending: 0,
129 |           breakingChanges: 0,
130 |           estimatedUpdateTime: "0 minutes",
131 |         },
132 |         snapshotId: baselineSnapshot.timestamp,
133 |       };
134 | 
135 |       const response: MCPToolResponse<typeof result> = {
136 |         success: true,
137 |         data: result,
138 |         metadata: {
139 |           toolVersion: "3.0.0",
140 |           executionTime: Date.now() - startTime,
141 |           timestamp: new Date().toISOString(),
142 |         },
143 |         recommendations: [
144 |           {
145 |             type: "info",
146 |             title: "Baseline Created",
147 |             description:
148 |               "Baseline snapshot created. Run sync again after code changes to detect drift.",
149 |           },
150 |         ],
151 |       };
152 | 
153 |       return formatMCPResponse(response, { fullResponse: true });
154 |     }
155 | 
156 |     // Create current snapshot and detect drift
157 |     const currentSnapshot = await detector.createSnapshot(
158 |       projectPath,
159 |       docsPath,
160 |     );
161 |     const driftResults = await detector.detectDrift(
162 |       previousSnapshot,
163 |       currentSnapshot,
164 |     );
165 | 
166 |     await context?.info?.(
167 |       `📊 Found ${driftResults.length} file(s) with documentation drift`,
168 |     );
169 | 
170 |     // Process based on mode
171 |     const appliedChanges: AppliedChange[] = [];
172 |     const pendingChanges: PendingSuggestion[] = [];
173 | 
174 |     for (const driftResult of driftResults) {
175 |       if (driftResult.hasDrift) {
176 |         for (const suggestion of driftResult.suggestions) {
177 |           if (mode === "apply" || mode === "auto") {
178 |             // Apply changes based on confidence
179 |             const shouldApply =
180 |               mode === "auto" ||
181 |               (suggestion.autoApplicable &&
182 |                 suggestion.confidence >= autoApplyThreshold);
183 | 
184 |             if (shouldApply) {
185 |               try {
186 |                 await applyDocumentationChange(
187 |                   suggestion,
188 |                   context,
189 |                   projectPath,
190 |                 );
191 |                 appliedChanges.push({
192 |                   docFile: suggestion.docFile,
193 |                   section: suggestion.section,
194 |                   changeType: "updated",
195 |                   confidence: suggestion.confidence,
196 |                   details: suggestion.reasoning,
197 |                 });
198 |               } catch (error: any) {
199 |                 await context?.warn?.(
200 |                   `Failed to apply change to ${suggestion.docFile}: ${error.message}`,
201 |                 );
202 |                 pendingChanges.push({
203 |                   docFile: suggestion.docFile,
204 |                   section: suggestion.section,
205 |                   reason: `Auto-apply failed: ${error.message}`,
206 |                   suggestion,
207 |                   requiresReview: true,
208 |                 });
209 |               }
210 |             } else {
211 |               pendingChanges.push({
212 |                 docFile: suggestion.docFile,
213 |                 section: suggestion.section,
214 |                 reason: "Requires manual review",
215 |                 suggestion,
216 |                 requiresReview: true,
217 |               });
218 |             }
219 |           } else {
220 |             // Preview/detect mode - just collect suggestions
221 |             pendingChanges.push({
222 |               docFile: suggestion.docFile,
223 |               section: suggestion.section,
224 |               reason: "Detected drift",
225 |               suggestion,
226 |               requiresReview: !suggestion.autoApplicable,
227 |             });
228 |           }
229 |         }
230 |       }
231 |     }
232 | 
233 |     // Calculate stats
234 |     const stats = calculateSyncStats(
235 |       driftResults,
236 |       appliedChanges,
237 |       pendingChanges,
238 |     );
239 | 
240 |     // Store sync results in knowledge graph
241 |     await storeSyncResults(projectPath, driftResults, appliedChanges, context);
242 | 
243 |     const result: SyncResult = {
244 |       mode,
245 |       driftDetections: driftResults,
246 |       appliedChanges,
247 |       pendingChanges,
248 |       stats,
249 |       snapshotId: currentSnapshot.timestamp,
250 |     };
251 | 
252 |     const response: MCPToolResponse<typeof result> = {
253 |       success: true,
254 |       data: result,
255 |       metadata: {
256 |         toolVersion: "3.0.0",
257 |         executionTime: Date.now() - startTime,
258 |         timestamp: new Date().toISOString(),
259 |       },
260 |       recommendations: generateRecommendations(result),
261 |       nextSteps: generateNextSteps(result),
262 |     };
263 | 
264 |     await context?.info?.(
265 |       `✅ Synchronization complete: ${appliedChanges.length} applied, ${pendingChanges.length} pending`,
266 |     );
267 | 
268 |     return formatMCPResponse(response, { fullResponse: true });
269 |   } catch (error: any) {
270 |     const errorResponse: MCPToolResponse = {
271 |       success: false,
272 |       error: {
273 |         code: "SYNC_FAILED",
274 |         message: `Documentation synchronization failed: ${error.message}`,
275 |         resolution: "Check project and documentation paths are correct",
276 |       },
277 |       metadata: {
278 |         toolVersion: "3.0.0",
279 |         executionTime: Date.now() - startTime,
280 |         timestamp: new Date().toISOString(),
281 |       },
282 |     };
283 | 
284 |     return formatMCPResponse(errorResponse, { fullResponse: true });
285 |   }
286 | }
287 | 
288 | /**
289 |  * Apply a documentation change to a file
290 |  */
291 | async function applyDocumentationChange(
292 |   suggestion: DriftSuggestion,
293 |   context?: any,
294 |   projectPath?: string,
295 | ): Promise<void> {
296 |   const filePath = suggestion.docFile;
297 | 
298 |   // Read current file
299 |   const content = await fs.readFile(filePath, "utf-8");
300 | 
301 |   // Find and replace the section
302 |   const sectionPattern = new RegExp(
303 |     `(#{1,6}\\s+${escapeRegex(suggestion.section)}[\\s\\S]*?)(?=#{1,6}\\s+|$)`,
304 |     "g",
305 |   );
306 | 
307 |   let newContent = content;
308 |   const match = sectionPattern.exec(content);
309 | 
310 |   if (match) {
311 |     // Replace existing section
312 |     newContent = content.replace(sectionPattern, suggestion.suggestedContent);
313 |     await context?.info?.(
314 |       `✏️ Updated section '${suggestion.section}' in ${path.basename(
315 |         filePath,
316 |       )}`,
317 |     );
318 |   } else {
319 |     // Append new section
320 |     newContent = content + "\n\n" + suggestion.suggestedContent;
321 |     await context?.info?.(
322 |       `➕ Added section '${suggestion.section}' to ${path.basename(filePath)}`,
323 |     );
324 |   }
325 | 
326 |   // Write back to file
327 |   await fs.writeFile(filePath, newContent, "utf-8");
328 | 
329 |   // Update freshness metadata
330 |   try {
331 |     let currentCommit: string | undefined;
332 |     if (projectPath) {
333 |       try {
334 |         const git = simpleGit(projectPath);
335 |         const isRepo = await git.checkIsRepo();
336 |         if (isRepo) {
337 |           const log = await git.log({ maxCount: 1 });
338 |           currentCommit = log.latest?.hash;
339 |         }
340 |       } catch {
341 |         // Git not available, continue without it
342 |       }
343 |     }
344 | 
345 |     await updateDocFrontmatter(filePath, {
346 |       last_updated: new Date().toISOString(),
347 |       last_validated: new Date().toISOString(),
348 |       auto_updated: true,
349 |       validated_against_commit: currentCommit,
350 |     });
351 | 
352 |     await context?.info?.(
353 |       `🏷️ Updated freshness metadata for ${path.basename(filePath)}`,
354 |     );
355 |   } catch (error) {
356 |     // Non-critical error, just log it
357 |     await context?.warn?.(`Failed to update freshness metadata: ${error}`);
358 |   }
359 | }
360 | 
361 | /**
362 |  * Store sync results in knowledge graph
363 |  */
364 | async function storeSyncResults(
365 |   projectPath: string,
366 |   driftResults: DriftDetectionResult[],
367 |   appliedChanges: AppliedChange[],
368 |   context?: any,
369 | ): Promise<void> {
370 |   try {
371 |     const kg = await getKnowledgeGraph();
372 | 
373 |     // Store sync event
374 |     const syncNode = {
375 |       id: `sync:${projectPath}:${Date.now()}`,
376 |       type: "sync_event" as const,
377 |       label: "Code-Docs Sync",
378 |       properties: {
379 |         projectPath,
380 |         timestamp: new Date().toISOString(),
381 |         driftsDetected: driftResults.length,
382 |         changesApplied: appliedChanges.length,
383 |         success: true,
384 |       },
385 |       weight: 1.0,
386 |       lastUpdated: new Date().toISOString(),
387 |     };
388 | 
389 |     kg.addNode(syncNode);
390 | 
391 |     // Link to project
392 |     const projectId = `project:${projectPath.split("/").pop() || "unknown"}`;
393 |     kg.addEdge({
394 |       source: projectId,
395 |       target: syncNode.id,
396 |       type: "has_sync_event",
397 |       weight: 1.0,
398 |       confidence: 1.0,
399 |       properties: {
400 |         eventType: "sync",
401 |       },
402 |     });
403 |   } catch (error) {
404 |     await context?.warn?.(
405 |       `Failed to store sync results in knowledge graph: ${error}`,
406 |     );
407 |   }
408 | }
409 | 
410 | /**
411 |  * Calculate synchronization statistics
412 |  */
413 | function calculateSyncStats(
414 |   driftResults: DriftDetectionResult[],
415 |   appliedChanges: AppliedChange[],
416 |   pendingChanges: PendingSuggestion[],
417 | ): SyncStats {
418 |   const filesAnalyzed = driftResults.length;
419 |   const driftsDetected = driftResults.filter((r) => r.hasDrift).length;
420 |   const breakingChanges = driftResults.reduce(
421 |     (sum, r) => sum + r.impactAnalysis.breakingChanges,
422 |     0,
423 |   );
424 | 
425 |   // Estimate update time (5 min per breaking change, 2 min per pending change)
426 |   const estimatedMinutes = breakingChanges * 5 + pendingChanges.length * 2;
427 |   const estimatedUpdateTime =
428 |     estimatedMinutes < 60
429 |       ? `${estimatedMinutes} minutes`
430 |       : `${Math.round(estimatedMinutes / 60)} hours`;
431 | 
432 |   return {
433 |     filesAnalyzed,
434 |     driftsDetected,
435 |     changesApplied: appliedChanges.length,
436 |     changesPending: pendingChanges.length,
437 |     breakingChanges,
438 |     estimatedUpdateTime,
439 |   };
440 | }
441 | 
442 | /**
443 |  * Generate recommendations based on sync results
444 |  */
445 | function generateRecommendations(result: SyncResult): Array<{
446 |   type: "critical" | "warning" | "info";
447 |   title: string;
448 |   description: string;
449 | }> {
450 |   const recommendations: Array<{
451 |     type: "critical" | "warning" | "info";
452 |     title: string;
453 |     description: string;
454 |   }> = [];
455 | 
456 |   if (result.stats.breakingChanges > 0) {
457 |     recommendations.push({
458 |       type: "critical",
459 |       title: "Breaking Changes Detected",
460 |       description: `${result.stats.breakingChanges} breaking change(s) detected. Review and update documentation carefully.`,
461 |     });
462 |   }
463 | 
464 |   if (result.pendingChanges.filter((c) => c.requiresReview).length > 0) {
465 |     const reviewCount = result.pendingChanges.filter(
466 |       (c) => c.requiresReview,
467 |     ).length;
468 |     recommendations.push({
469 |       type: "warning",
470 |       title: "Manual Review Required",
471 |       description: `${reviewCount} change(s) require manual review before applying.`,
472 |     });
473 |   }
474 | 
475 |   if (result.appliedChanges.length > 0) {
476 |     recommendations.push({
477 |       type: "info",
478 |       title: "Changes Applied Successfully",
479 |       description: `${result.appliedChanges.length} documentation update(s) applied automatically.`,
480 |     });
481 |   }
482 | 
483 |   if (result.stats.driftsDetected === 0) {
484 |     recommendations.push({
485 |       type: "info",
486 |       title: "No Drift Detected",
487 |       description: "Documentation is up to date with code changes.",
488 |     });
489 |   }
490 | 
491 |   return recommendations;
492 | }
493 | 
494 | /**
495 |  * Generate next steps based on sync results
496 |  */
497 | function generateNextSteps(result: SyncResult): Array<{
498 |   action: string;
499 |   toolRequired?: string;
500 |   description: string;
501 |   priority: "high" | "medium" | "low";
502 | }> {
503 |   const nextSteps: Array<{
504 |     action: string;
505 |     toolRequired?: string;
506 |     description: string;
507 |     priority: "high" | "medium" | "low";
508 |   }> = [];
509 | 
510 |   if (result.pendingChanges.length > 0 && result.mode === "detect") {
511 |     nextSteps.push({
512 |       action: "Apply safe documentation changes",
513 |       toolRequired: "sync_code_to_docs",
514 |       description:
515 |         "Run sync with mode='apply' to apply high-confidence changes automatically",
516 |       priority: "high",
517 |     });
518 |   }
519 | 
520 |   if (result.stats.breakingChanges > 0) {
521 |     nextSteps.push({
522 |       action: "Review breaking changes",
523 |       description:
524 |         "Manually review and update documentation for breaking API changes",
525 |       priority: "high",
526 |     });
527 |   }
528 | 
529 |   if (result.appliedChanges.length > 0) {
530 |     nextSteps.push({
531 |       action: "Validate updated documentation",
532 |       toolRequired: "validate_diataxis_content",
533 |       description: "Run validation to ensure updated documentation is accurate",
534 |       priority: "medium",
535 |     });
536 |   }
537 | 
538 |   if (result.pendingChanges.filter((c) => c.requiresReview).length > 0) {
539 |     nextSteps.push({
540 |       action: "Review pending suggestions",
541 |       description:
542 |         "Examine pending suggestions and apply manually where appropriate",
543 |       priority: "medium",
544 |     });
545 |   }
546 | 
547 |   return nextSteps;
548 | }
549 | 
550 | /**
551 |  * Escape special regex characters
552 |  */
553 | function escapeRegex(str: string): string {
554 |   return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
555 | }
556 | 
557 | /**
558 |  * Tool definition
559 |  */
560 | export const syncCodeToDocs: Tool = {
561 |   name: "sync_code_to_docs",
562 |   description:
563 |     "Automatically synchronize documentation with code changes using AST-based drift detection (Phase 3)",
564 |   inputSchema: {
565 |     type: "object",
566 |     properties: {
567 |       projectPath: {
568 |         type: "string",
569 |         description: "Path to the project root directory",
570 |       },
571 |       docsPath: {
572 |         type: "string",
573 |         description: "Path to the documentation directory",
574 |       },
575 |       mode: {
576 |         type: "string",
577 |         enum: ["detect", "preview", "apply", "auto"],
578 |         default: "detect",
579 |         description:
580 |           "Sync mode: detect=analyze only, preview=show changes, apply=apply safe changes, auto=apply all",
581 |       },
582 |       autoApplyThreshold: {
583 |         type: "number",
584 |         minimum: 0,
585 |         maximum: 1,
586 |         default: 0.8,
587 |         description:
588 |           "Confidence threshold (0-1) for automatic application of changes",
589 |       },
590 |       createSnapshot: {
591 |         type: "boolean",
592 |         default: true,
593 |         description: "Create a snapshot before making changes (recommended)",
594 |       },
595 |     },
596 |     required: ["projectPath", "docsPath"],
597 |   },
598 | };
599 | 
```

--------------------------------------------------------------------------------
/tests/tools/deploy-pages.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { describe, it, expect, beforeEach, afterEach } from "@jest/globals";
  2 | import * as fs from "fs/promises";
  3 | import * as path from "path";
  4 | import { deployPages } from "../../src/tools/deploy-pages.js";
  5 | 
  6 | describe("deployPages", () => {
  7 |   const testTempDir = path.join(__dirname, "../../.tmp/test-deploy-pages");
  8 | 
  9 |   beforeEach(async () => {
 10 |     // Create test directory
 11 |     await fs.mkdir(testTempDir, { recursive: true });
 12 |   });
 13 | 
 14 |   afterEach(async () => {
 15 |     // Clean up test directory
 16 |     try {
 17 |       await fs.rm(testTempDir, { recursive: true });
 18 |     } catch {
 19 |       // Ignore cleanup errors
 20 |     }
 21 |   });
 22 | 
 23 |   describe("Input Validation", () => {
 24 |     it("should validate required repository parameter", async () => {
 25 |       await expect(deployPages({})).rejects.toThrow();
 26 |     });
 27 | 
 28 |     it("should return error when ssg not provided and no analysisId", async () => {
 29 |       const result = await deployPages({ repository: "test-repo" });
 30 |       expect(result.content).toBeDefined();
 31 | 
 32 |       // Parse the response to check for error
 33 |       const textContent = result.content.find((c: any) => c.type === "text");
 34 |       expect(textContent).toBeDefined();
 35 |       const response = JSON.parse(textContent.text);
 36 |       expect(response.success).toBe(false);
 37 |       expect(response.error.code).toBe("SSG_NOT_SPECIFIED");
 38 |     });
 39 | 
 40 |     it("should validate ssg enum values", async () => {
 41 |       await expect(
 42 |         deployPages({
 43 |           repository: "test-repo",
 44 |           ssg: "invalid-ssg",
 45 |         }),
 46 |       ).rejects.toThrow();
 47 |     });
 48 | 
 49 |     it("should accept valid ssg values", async () => {
 50 |       const validSSGs = ["jekyll", "hugo", "docusaurus", "mkdocs", "eleventy"];
 51 | 
 52 |       for (const ssg of validSSGs) {
 53 |         const result = await deployPages({
 54 |           repository: testTempDir,
 55 |           ssg,
 56 |         });
 57 | 
 58 |         expect(result.content).toBeDefined();
 59 |         const data = JSON.parse(result.content[0].text);
 60 |         expect(data.ssg).toBe(ssg);
 61 |       }
 62 |     });
 63 | 
 64 |     it("should use default branch when not specified", async () => {
 65 |       const result = await deployPages({
 66 |         repository: testTempDir,
 67 |         ssg: "jekyll",
 68 |       });
 69 | 
 70 |       const data = JSON.parse(result.content[0].text);
 71 |       expect(data.branch).toBe("gh-pages");
 72 |     });
 73 | 
 74 |     it("should accept custom branch", async () => {
 75 |       const result = await deployPages({
 76 |         repository: testTempDir,
 77 |         ssg: "jekyll",
 78 |         branch: "main",
 79 |       });
 80 | 
 81 |       const data = JSON.parse(result.content[0].text);
 82 |       expect(data.branch).toBe("main");
 83 |     });
 84 |   });
 85 | 
 86 |   describe("Workflow Generation", () => {
 87 |     it("should generate Jekyll workflow", async () => {
 88 |       const result = await deployPages({
 89 |         repository: testTempDir,
 90 |         ssg: "jekyll",
 91 |       });
 92 | 
 93 |       expect(result.content).toBeDefined();
 94 | 
 95 |       // Check that workflow file was created
 96 |       const workflowPath = path.join(
 97 |         testTempDir,
 98 |         ".github",
 99 |         "workflows",
100 |         "deploy-docs.yml",
101 |       );
102 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
103 | 
104 |       expect(workflowContent).toContain("Deploy Jekyll to GitHub Pages");
105 |       expect(workflowContent).toContain("ruby/setup-ruby@v1");
106 |       expect(workflowContent).toContain("bundle exec jekyll build");
107 |     });
108 | 
109 |     it("should generate Hugo workflow", async () => {
110 |       const result = await deployPages({
111 |         repository: testTempDir,
112 |         ssg: "hugo",
113 |       });
114 | 
115 |       expect(result.content).toBeDefined();
116 | 
117 |       const workflowPath = path.join(
118 |         testTempDir,
119 |         ".github",
120 |         "workflows",
121 |         "deploy-docs.yml",
122 |       );
123 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
124 | 
125 |       expect(workflowContent).toContain("Deploy Hugo to GitHub Pages");
126 |       expect(workflowContent).toContain("peaceiris/actions-hugo@v2");
127 |       expect(workflowContent).toContain("hugo --minify");
128 |     });
129 | 
130 |     it("should generate Docusaurus workflow", async () => {
131 |       const result = await deployPages({
132 |         repository: testTempDir,
133 |         ssg: "docusaurus",
134 |       });
135 | 
136 |       expect(result.content).toBeDefined();
137 | 
138 |       const workflowPath = path.join(
139 |         testTempDir,
140 |         ".github",
141 |         "workflows",
142 |         "deploy-docs.yml",
143 |       );
144 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
145 | 
146 |       expect(workflowContent).toContain("Deploy Docusaurus to GitHub Pages");
147 |       expect(workflowContent).toContain("actions/setup-node@v4");
148 |       expect(workflowContent).toContain("./build");
149 |     });
150 | 
151 |     it("should generate MkDocs workflow", async () => {
152 |       const result = await deployPages({
153 |         repository: testTempDir,
154 |         ssg: "mkdocs",
155 |       });
156 | 
157 |       expect(result.content).toBeDefined();
158 | 
159 |       const workflowPath = path.join(
160 |         testTempDir,
161 |         ".github",
162 |         "workflows",
163 |         "deploy-docs.yml",
164 |       );
165 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
166 | 
167 |       expect(workflowContent).toContain("Deploy MkDocs to GitHub Pages");
168 |       expect(workflowContent).toContain("actions/setup-python@v4");
169 |       expect(workflowContent).toContain("mkdocs gh-deploy");
170 |     });
171 | 
172 |     it("should generate Eleventy workflow", async () => {
173 |       const result = await deployPages({
174 |         repository: testTempDir,
175 |         ssg: "eleventy",
176 |       });
177 | 
178 |       expect(result.content).toBeDefined();
179 | 
180 |       const workflowPath = path.join(
181 |         testTempDir,
182 |         ".github",
183 |         "workflows",
184 |         "deploy-docs.yml",
185 |       );
186 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
187 | 
188 |       expect(workflowContent).toContain("Deploy Eleventy to GitHub Pages");
189 |       expect(workflowContent).toContain("actions/setup-node@v4");
190 |       expect(workflowContent).toContain("./_site");
191 |     });
192 | 
193 |     it("should use custom branch in MkDocs workflow", async () => {
194 |       const customBranch = "custom-pages";
195 |       const result = await deployPages({
196 |         repository: testTempDir,
197 |         ssg: "mkdocs",
198 |         branch: customBranch,
199 |       });
200 | 
201 |       expect(result.content).toBeDefined();
202 | 
203 |       const workflowPath = path.join(
204 |         testTempDir,
205 |         ".github",
206 |         "workflows",
207 |         "deploy-docs.yml",
208 |       );
209 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
210 | 
211 |       expect(workflowContent).toContain(`--branch ${customBranch}`);
212 |     });
213 | 
214 |     it("should fallback to Jekyll for unknown SSG", async () => {
215 |       // This tests the fallback logic in generateWorkflow
216 |       const result = await deployPages({
217 |         repository: testTempDir,
218 |         ssg: "jekyll", // Using valid SSG but testing fallback logic
219 |       });
220 | 
221 |       expect(result.content).toBeDefined();
222 | 
223 |       const workflowPath = path.join(
224 |         testTempDir,
225 |         ".github",
226 |         "workflows",
227 |         "deploy-docs.yml",
228 |       );
229 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
230 | 
231 |       expect(workflowContent).toContain("Deploy Jekyll to GitHub Pages");
232 |     });
233 |   });
234 | 
235 |   describe("Custom Domain Support", () => {
236 |     it("should create CNAME file when custom domain is specified", async () => {
237 |       const customDomain = "docs.example.com";
238 |       const result = await deployPages({
239 |         repository: testTempDir,
240 |         ssg: "jekyll",
241 |         customDomain,
242 |       });
243 | 
244 |       expect(result.content).toBeDefined();
245 | 
246 |       // Check CNAME file was created
247 |       const cnamePath = path.join(testTempDir, "CNAME");
248 |       const cnameContent = await fs.readFile(cnamePath, "utf-8");
249 |       expect(cnameContent).toBe(customDomain);
250 | 
251 |       // Check response indicates CNAME was created
252 |       const data = JSON.parse(result.content[0].text);
253 |       expect(data.cnameCreated).toBe(true);
254 |       expect(data.customDomain).toBe(customDomain);
255 |     });
256 | 
257 |     it("should not create CNAME file when custom domain is not specified", async () => {
258 |       const result = await deployPages({
259 |         repository: testTempDir,
260 |         ssg: "jekyll",
261 |       });
262 | 
263 |       expect(result.content).toBeDefined();
264 | 
265 |       // Check CNAME file was not created
266 |       const cnamePath = path.join(testTempDir, "CNAME");
267 |       await expect(fs.access(cnamePath)).rejects.toThrow();
268 | 
269 |       // Check response indicates CNAME was not created
270 |       const data = JSON.parse(result.content[0].text);
271 |       expect(data.cnameCreated).toBe(false);
272 |       expect(data.customDomain).toBeUndefined();
273 |     });
274 | 
275 |     it("should include custom domain recommendation when specified", async () => {
276 |       const customDomain = "docs.example.com";
277 |       const result = await deployPages({
278 |         repository: testTempDir,
279 |         ssg: "jekyll",
280 |         customDomain,
281 |       });
282 | 
283 |       expect(result.content).toBeDefined();
284 | 
285 |       const data = JSON.parse(result.content[0].text);
286 |       expect(data.customDomain).toBe(customDomain);
287 |       expect(data.cnameCreated).toBe(true);
288 |     });
289 | 
290 |     it("should not include custom domain recommendation when not specified", async () => {
291 |       const result = await deployPages({
292 |         repository: testTempDir,
293 |         ssg: "jekyll",
294 |       });
295 | 
296 |       expect(result.content).toBeDefined();
297 | 
298 |       const data = JSON.parse(result.content[0].text);
299 |       expect(data.customDomain).toBeUndefined();
300 |       expect(data.cnameCreated).toBe(false);
301 |     });
302 |   });
303 | 
304 |   describe("Repository Path Handling", () => {
305 |     it("should handle local repository path", async () => {
306 |       const result = await deployPages({
307 |         repository: testTempDir,
308 |         ssg: "jekyll",
309 |       });
310 | 
311 |       expect(result.content).toBeDefined();
312 | 
313 |       const data = JSON.parse(result.content[0].text);
314 |       expect(data.repoPath).toBe(testTempDir);
315 |     });
316 | 
317 |     it("should handle remote repository URL", async () => {
318 |       const remoteRepo = "https://github.com/user/repo.git";
319 |       const result = await deployPages({
320 |         repository: remoteRepo,
321 |         ssg: "jekyll",
322 |       });
323 | 
324 |       expect(result.content).toBeDefined();
325 | 
326 |       const data = JSON.parse(result.content[0].text);
327 |       expect(data.repoPath).toBe(".");
328 |       expect(data.repository).toBe(remoteRepo);
329 |     });
330 | 
331 |     it("should handle HTTP repository URL", async () => {
332 |       const httpRepo = "http://github.com/user/repo.git";
333 |       const result = await deployPages({
334 |         repository: httpRepo,
335 |         ssg: "jekyll",
336 |       });
337 | 
338 |       expect(result.content).toBeDefined();
339 | 
340 |       const data = JSON.parse(result.content[0].text);
341 |       expect(data.repoPath).toBe(".");
342 |     });
343 |   });
344 | 
345 |   describe("Response Structure", () => {
346 |     it("should return properly formatted MCP response", async () => {
347 |       const result = await deployPages({
348 |         repository: testTempDir,
349 |         ssg: "jekyll",
350 |       });
351 | 
352 |       expect(result.content).toBeDefined();
353 |       expect(Array.isArray(result.content)).toBe(true);
354 |       expect(result.content.length).toBeGreaterThan(0);
355 | 
356 |       const data = JSON.parse(result.content[0].text);
357 |       expect(data.repository).toBe(testTempDir);
358 |       expect(data.ssg).toBe("jekyll");
359 |       expect(data.branch).toBe("gh-pages");
360 |       expect(data.workflowPath).toBe("deploy-docs.yml");
361 |     });
362 | 
363 |     it("should include execution metadata", async () => {
364 |       const result = await deployPages({
365 |         repository: testTempDir,
366 |         ssg: "jekyll",
367 |       });
368 | 
369 |       const data = JSON.parse(result.content[0].text);
370 |       expect(data.repository).toBeDefined();
371 |       expect(data.ssg).toBeDefined();
372 |       expect(data.repoPath).toBeDefined();
373 |     });
374 | 
375 |     it("should include deployment recommendations", async () => {
376 |       const result = await deployPages({
377 |         repository: testTempDir,
378 |         ssg: "hugo",
379 |       });
380 | 
381 |       const data = JSON.parse(result.content[0].text);
382 |       expect(data.ssg).toBe("hugo");
383 |       expect(data.workflowPath).toBe("deploy-docs.yml");
384 | 
385 |       // Check that workflow file was created
386 |       const workflowPath = path.join(
387 |         testTempDir,
388 |         ".github",
389 |         "workflows",
390 |         "deploy-docs.yml",
391 |       );
392 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
393 |       expect(workflowContent).toContain("hugo");
394 |     });
395 | 
396 |     it("should include next steps", async () => {
397 |       const result = await deployPages({
398 |         repository: testTempDir,
399 |         ssg: "jekyll",
400 |       });
401 | 
402 |       const data = JSON.parse(result.content[0].text);
403 |       expect(data.ssg).toBe("jekyll");
404 |       expect(data.workflowPath).toBe("deploy-docs.yml");
405 | 
406 |       // Verify workflow file was created
407 |       const workflowPath = path.join(
408 |         testTempDir,
409 |         ".github",
410 |         "workflows",
411 |         "deploy-docs.yml",
412 |       );
413 |       const stats = await fs.stat(workflowPath);
414 |       expect(stats.isFile()).toBe(true);
415 |     });
416 |   });
417 | 
418 |   describe("Error Handling", () => {
419 |     it("should handle file system errors gracefully", async () => {
420 |       // Try to write to a path that doesn't exist and can't be created
421 |       const invalidPath = "/invalid/path/that/cannot/be/created";
422 | 
423 |       const result = await deployPages({
424 |         repository: invalidPath,
425 |         ssg: "jekyll",
426 |       });
427 | 
428 |       expect(result.content).toBeDefined();
429 | 
430 |       const data = JSON.parse(result.content[0].text);
431 |       expect(data.success).toBe(false);
432 |       expect(data.error).toBeDefined();
433 |       expect(data.error.code).toBe("DEPLOYMENT_SETUP_FAILED");
434 |       expect(data.error.message).toContain("Failed to setup deployment");
435 |       expect(data.error.resolution).toContain(
436 |         "Ensure repository path is accessible",
437 |       );
438 |     });
439 | 
440 |     it("should include error metadata in failed responses", async () => {
441 |       const invalidPath = "/invalid/path/that/cannot/be/created";
442 | 
443 |       const result = await deployPages({
444 |         repository: invalidPath,
445 |         ssg: "jekyll",
446 |       });
447 | 
448 |       const data = JSON.parse(result.content[0].text);
449 |       expect(data.success).toBe(false);
450 |       expect(data.error).toBeDefined();
451 |       expect(data.error.code).toBe("DEPLOYMENT_SETUP_FAILED");
452 |     });
453 |   });
454 | 
455 |   describe("Directory Creation", () => {
456 |     it("should create .github/workflows directory structure", async () => {
457 |       const result = await deployPages({
458 |         repository: testTempDir,
459 |         ssg: "jekyll",
460 |       });
461 | 
462 |       expect(result.content).toBeDefined();
463 | 
464 |       // Check directory structure was created
465 |       const workflowsDir = path.join(testTempDir, ".github", "workflows");
466 |       const stats = await fs.stat(workflowsDir);
467 |       expect(stats.isDirectory()).toBe(true);
468 |     });
469 | 
470 |     it("should handle existing .github/workflows directory", async () => {
471 |       // Pre-create the directory
472 |       const workflowsDir = path.join(testTempDir, ".github", "workflows");
473 |       await fs.mkdir(workflowsDir, { recursive: true });
474 | 
475 |       const result = await deployPages({
476 |         repository: testTempDir,
477 |         ssg: "jekyll",
478 |       });
479 | 
480 |       expect(result.content).toBeDefined();
481 | 
482 |       const data = JSON.parse(result.content[0].text);
483 |       expect(data.ssg).toBe("jekyll");
484 |       expect(data.workflowPath).toBe("deploy-docs.yml");
485 |     });
486 |   });
487 | 
488 |   describe("Workflow File Content", () => {
489 |     it("should include proper permissions in workflows", async () => {
490 |       const result = await deployPages({
491 |         repository: testTempDir,
492 |         ssg: "docusaurus",
493 |       });
494 | 
495 |       const workflowPath = path.join(
496 |         testTempDir,
497 |         ".github",
498 |         "workflows",
499 |         "deploy-docs.yml",
500 |       );
501 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
502 | 
503 |       expect(workflowContent).toContain("permissions:");
504 |       expect(workflowContent).toContain("contents: read");
505 |       expect(workflowContent).toContain("pages: write");
506 |       expect(workflowContent).toContain("id-token: write");
507 |     });
508 | 
509 |     it("should include concurrency settings in workflows", async () => {
510 |       const result = await deployPages({
511 |         repository: testTempDir,
512 |         ssg: "hugo",
513 |       });
514 | 
515 |       const workflowPath = path.join(
516 |         testTempDir,
517 |         ".github",
518 |         "workflows",
519 |         "deploy-docs.yml",
520 |       );
521 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
522 | 
523 |       expect(workflowContent).toContain("concurrency:");
524 |       expect(workflowContent).toContain('group: "pages"');
525 |       expect(workflowContent).toContain("cancel-in-progress: false");
526 |     });
527 | 
528 |     it("should include proper triggers in workflows", async () => {
529 |       const result = await deployPages({
530 |         repository: testTempDir,
531 |         ssg: "eleventy",
532 |       });
533 | 
534 |       const workflowPath = path.join(
535 |         testTempDir,
536 |         ".github",
537 |         "workflows",
538 |         "deploy-docs.yml",
539 |       );
540 |       const workflowContent = await fs.readFile(workflowPath, "utf-8");
541 | 
542 |       expect(workflowContent).toContain("on:");
543 |       expect(workflowContent).toContain("push:");
544 |       expect(workflowContent).toContain("branches: [main]");
545 |       expect(workflowContent).toContain("workflow_dispatch:");
546 |     });
547 |   });
548 | });
549 | 
```
Page 10/29FirstPrevNextLast