This is page 17 of 20. Use http://codebase.md/tosin2013/documcp?lines=false&page={x} to view the full context.
# Directory Structure
```
├── .dockerignore
├── .eslintignore
├── .eslintrc.json
├── .github
│ ├── agents
│ │ ├── documcp-ast.md
│ │ ├── documcp-deploy.md
│ │ ├── documcp-memory.md
│ │ ├── documcp-test.md
│ │ └── documcp-tool.md
│ ├── copilot-instructions.md
│ ├── dependabot.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── automated-changelog.md
│ │ ├── bug_report.md
│ │ ├── bug_report.yml
│ │ ├── documentation_issue.md
│ │ ├── feature_request.md
│ │ ├── feature_request.yml
│ │ ├── npm-publishing-fix.md
│ │ └── release_improvements.md
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── release-drafter.yml
│ └── workflows
│ ├── auto-merge.yml
│ ├── ci.yml
│ ├── codeql.yml
│ ├── dependency-review.yml
│ ├── deploy-docs.yml
│ ├── README.md
│ ├── release-drafter.yml
│ └── release.yml
├── .gitignore
├── .husky
│ ├── commit-msg
│ └── pre-commit
├── .linkcheck.config.json
├── .markdown-link-check.json
├── .nvmrc
├── .pre-commit-config.yaml
├── .versionrc.json
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── docker-compose.docs.yml
├── Dockerfile.docs
├── docs
│ ├── .docusaurus
│ │ ├── docusaurus-plugin-content-docs
│ │ │ └── default
│ │ │ └── __mdx-loader-dependency.json
│ │ └── docusaurus-plugin-content-pages
│ │ └── default
│ │ └── __plugin.json
│ ├── adrs
│ │ ├── 001-mcp-server-architecture.md
│ │ ├── 002-repository-analysis-engine.md
│ │ ├── 003-static-site-generator-recommendation-engine.md
│ │ ├── 004-diataxis-framework-integration.md
│ │ ├── 005-github-pages-deployment-automation.md
│ │ ├── 006-mcp-tools-api-design.md
│ │ ├── 007-mcp-prompts-and-resources-integration.md
│ │ ├── 008-intelligent-content-population-engine.md
│ │ ├── 009-content-accuracy-validation-framework.md
│ │ ├── 010-mcp-resource-pattern-redesign.md
│ │ └── README.md
│ ├── api
│ │ ├── .nojekyll
│ │ ├── assets
│ │ │ ├── hierarchy.js
│ │ │ ├── highlight.css
│ │ │ ├── icons.js
│ │ │ ├── icons.svg
│ │ │ ├── main.js
│ │ │ ├── navigation.js
│ │ │ ├── search.js
│ │ │ └── style.css
│ │ ├── hierarchy.html
│ │ ├── index.html
│ │ ├── modules.html
│ │ └── variables
│ │ └── TOOLS.html
│ ├── assets
│ │ └── logo.svg
│ ├── development
│ │ └── MCP_INSPECTOR_TESTING.md
│ ├── docusaurus.config.js
│ ├── explanation
│ │ ├── architecture.md
│ │ └── index.md
│ ├── guides
│ │ ├── link-validation.md
│ │ ├── playwright-integration.md
│ │ └── playwright-testing-workflow.md
│ ├── how-to
│ │ ├── analytics-setup.md
│ │ ├── custom-domains.md
│ │ ├── documentation-freshness-tracking.md
│ │ ├── github-pages-deployment.md
│ │ ├── index.md
│ │ ├── local-testing.md
│ │ ├── performance-optimization.md
│ │ ├── prompting-guide.md
│ │ ├── repository-analysis.md
│ │ ├── seo-optimization.md
│ │ ├── site-monitoring.md
│ │ ├── troubleshooting.md
│ │ └── usage-examples.md
│ ├── index.md
│ ├── knowledge-graph.md
│ ├── package-lock.json
│ ├── package.json
│ ├── phase-2-intelligence.md
│ ├── reference
│ │ ├── api-overview.md
│ │ ├── cli.md
│ │ ├── configuration.md
│ │ ├── deploy-pages.md
│ │ ├── index.md
│ │ ├── mcp-tools.md
│ │ └── prompt-templates.md
│ ├── research
│ │ ├── cross-domain-integration
│ │ │ └── README.md
│ │ ├── domain-1-mcp-architecture
│ │ │ ├── index.md
│ │ │ └── mcp-performance-research.md
│ │ ├── domain-2-repository-analysis
│ │ │ └── README.md
│ │ ├── domain-3-ssg-recommendation
│ │ │ ├── index.md
│ │ │ └── ssg-performance-analysis.md
│ │ ├── domain-4-diataxis-integration
│ │ │ └── README.md
│ │ ├── domain-5-github-deployment
│ │ │ ├── github-pages-security-analysis.md
│ │ │ └── index.md
│ │ ├── domain-6-api-design
│ │ │ └── README.md
│ │ ├── README.md
│ │ ├── research-integration-summary-2025-01-14.md
│ │ ├── research-progress-template.md
│ │ └── research-questions-2025-01-14.md
│ ├── robots.txt
│ ├── sidebars.js
│ ├── sitemap.xml
│ ├── src
│ │ └── css
│ │ └── custom.css
│ └── tutorials
│ ├── development-setup.md
│ ├── environment-setup.md
│ ├── first-deployment.md
│ ├── getting-started.md
│ ├── index.md
│ ├── memory-workflows.md
│ └── user-onboarding.md
├── jest.config.js
├── LICENSE
├── Makefile
├── MCP_PHASE2_IMPLEMENTATION.md
├── mcp-config-example.json
├── mcp.json
├── package-lock.json
├── package.json
├── README.md
├── release.sh
├── scripts
│ └── check-package-structure.cjs
├── SECURITY.md
├── setup-precommit.sh
├── src
│ ├── benchmarks
│ │ └── performance.ts
│ ├── index.ts
│ ├── memory
│ │ ├── contextual-retrieval.ts
│ │ ├── deployment-analytics.ts
│ │ ├── enhanced-manager.ts
│ │ ├── export-import.ts
│ │ ├── freshness-kg-integration.ts
│ │ ├── index.ts
│ │ ├── integration.ts
│ │ ├── kg-code-integration.ts
│ │ ├── kg-health.ts
│ │ ├── kg-integration.ts
│ │ ├── kg-link-validator.ts
│ │ ├── kg-storage.ts
│ │ ├── knowledge-graph.ts
│ │ ├── learning.ts
│ │ ├── manager.ts
│ │ ├── multi-agent-sharing.ts
│ │ ├── pruning.ts
│ │ ├── schemas.ts
│ │ ├── storage.ts
│ │ ├── temporal-analysis.ts
│ │ ├── user-preferences.ts
│ │ └── visualization.ts
│ ├── prompts
│ │ └── technical-writer-prompts.ts
│ ├── scripts
│ │ └── benchmark.ts
│ ├── templates
│ │ └── playwright
│ │ ├── accessibility.spec.template.ts
│ │ ├── Dockerfile.template
│ │ ├── docs-e2e.workflow.template.yml
│ │ ├── link-validation.spec.template.ts
│ │ └── playwright.config.template.ts
│ ├── tools
│ │ ├── analyze-deployments.ts
│ │ ├── analyze-readme.ts
│ │ ├── analyze-repository.ts
│ │ ├── check-documentation-links.ts
│ │ ├── deploy-pages.ts
│ │ ├── detect-gaps.ts
│ │ ├── evaluate-readme-health.ts
│ │ ├── generate-config.ts
│ │ ├── generate-contextual-content.ts
│ │ ├── generate-llm-context.ts
│ │ ├── generate-readme-template.ts
│ │ ├── generate-technical-writer-prompts.ts
│ │ ├── kg-health-check.ts
│ │ ├── manage-preferences.ts
│ │ ├── manage-sitemap.ts
│ │ ├── optimize-readme.ts
│ │ ├── populate-content.ts
│ │ ├── readme-best-practices.ts
│ │ ├── recommend-ssg.ts
│ │ ├── setup-playwright-tests.ts
│ │ ├── setup-structure.ts
│ │ ├── sync-code-to-docs.ts
│ │ ├── test-local-deployment.ts
│ │ ├── track-documentation-freshness.ts
│ │ ├── update-existing-documentation.ts
│ │ ├── validate-content.ts
│ │ ├── validate-documentation-freshness.ts
│ │ ├── validate-readme-checklist.ts
│ │ └── verify-deployment.ts
│ ├── types
│ │ └── api.ts
│ ├── utils
│ │ ├── ast-analyzer.ts
│ │ ├── code-scanner.ts
│ │ ├── content-extractor.ts
│ │ ├── drift-detector.ts
│ │ ├── freshness-tracker.ts
│ │ ├── language-parsers-simple.ts
│ │ ├── permission-checker.ts
│ │ └── sitemap-generator.ts
│ └── workflows
│ └── documentation-workflow.ts
├── test-docs-local.sh
├── tests
│ ├── api
│ │ └── mcp-responses.test.ts
│ ├── benchmarks
│ │ └── performance.test.ts
│ ├── edge-cases
│ │ └── error-handling.test.ts
│ ├── functional
│ │ └── tools.test.ts
│ ├── integration
│ │ ├── kg-documentation-workflow.test.ts
│ │ ├── knowledge-graph-workflow.test.ts
│ │ ├── mcp-readme-tools.test.ts
│ │ ├── memory-mcp-tools.test.ts
│ │ ├── readme-technical-writer.test.ts
│ │ └── workflow.test.ts
│ ├── memory
│ │ ├── contextual-retrieval.test.ts
│ │ ├── enhanced-manager.test.ts
│ │ ├── export-import.test.ts
│ │ ├── freshness-kg-integration.test.ts
│ │ ├── kg-code-integration.test.ts
│ │ ├── kg-health.test.ts
│ │ ├── kg-link-validator.test.ts
│ │ ├── kg-storage-validation.test.ts
│ │ ├── kg-storage.test.ts
│ │ ├── knowledge-graph-enhanced.test.ts
│ │ ├── knowledge-graph.test.ts
│ │ ├── learning.test.ts
│ │ ├── manager-advanced.test.ts
│ │ ├── manager.test.ts
│ │ ├── mcp-resource-integration.test.ts
│ │ ├── mcp-tool-persistence.test.ts
│ │ ├── schemas.test.ts
│ │ ├── storage.test.ts
│ │ ├── temporal-analysis.test.ts
│ │ └── user-preferences.test.ts
│ ├── performance
│ │ ├── memory-load-testing.test.ts
│ │ └── memory-stress-testing.test.ts
│ ├── prompts
│ │ ├── guided-workflow-prompts.test.ts
│ │ └── technical-writer-prompts.test.ts
│ ├── server.test.ts
│ ├── setup.ts
│ ├── tools
│ │ ├── all-tools.test.ts
│ │ ├── analyze-coverage.test.ts
│ │ ├── analyze-deployments.test.ts
│ │ ├── analyze-readme.test.ts
│ │ ├── analyze-repository.test.ts
│ │ ├── check-documentation-links.test.ts
│ │ ├── deploy-pages-kg-retrieval.test.ts
│ │ ├── deploy-pages-tracking.test.ts
│ │ ├── deploy-pages.test.ts
│ │ ├── detect-gaps.test.ts
│ │ ├── evaluate-readme-health.test.ts
│ │ ├── generate-contextual-content.test.ts
│ │ ├── generate-llm-context.test.ts
│ │ ├── generate-readme-template.test.ts
│ │ ├── generate-technical-writer-prompts.test.ts
│ │ ├── kg-health-check.test.ts
│ │ ├── manage-sitemap.test.ts
│ │ ├── optimize-readme.test.ts
│ │ ├── readme-best-practices.test.ts
│ │ ├── recommend-ssg-historical.test.ts
│ │ ├── recommend-ssg-preferences.test.ts
│ │ ├── recommend-ssg.test.ts
│ │ ├── simple-coverage.test.ts
│ │ ├── sync-code-to-docs.test.ts
│ │ ├── test-local-deployment.test.ts
│ │ ├── tool-error-handling.test.ts
│ │ ├── track-documentation-freshness.test.ts
│ │ ├── validate-content.test.ts
│ │ ├── validate-documentation-freshness.test.ts
│ │ └── validate-readme-checklist.test.ts
│ ├── types
│ │ └── type-safety.test.ts
│ └── utils
│ ├── ast-analyzer.test.ts
│ ├── content-extractor.test.ts
│ ├── drift-detector.test.ts
│ ├── freshness-tracker.test.ts
│ └── sitemap-generator.test.ts
├── tsconfig.json
└── typedoc.json
```
# Files
--------------------------------------------------------------------------------
/tests/tools/sync-code-to-docs.test.ts:
--------------------------------------------------------------------------------
```typescript
/**
* Sync Code to Docs Tool Tests (Phase 3)
*/
import { handleSyncCodeToDocs } from "../../src/tools/sync-code-to-docs.js";
import { promises as fs } from "fs";
import { tmpdir } from "os";
import { join } from "path";
import { mkdtemp, rm } from "fs/promises";
import { DriftDetector } from "../../src/utils/drift-detector.js";
describe("sync_code_to_docs tool", () => {
let tempDir: string;
let projectPath: string;
let docsPath: string;
beforeEach(async () => {
tempDir = await mkdtemp(join(tmpdir(), "sync-test-"));
projectPath = join(tempDir, "project");
docsPath = join(tempDir, "docs");
await fs.mkdir(join(projectPath, "src"), { recursive: true });
await fs.mkdir(docsPath, { recursive: true });
});
afterEach(async () => {
await rm(tempDir, { recursive: true, force: true });
});
describe("Detect Mode", () => {
test("should detect drift without making changes", async () => {
// Create source file
const sourceCode = `
export function calculate(x: number): number {
return x * 2;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "calc.ts"), sourceCode);
// Create documentation
const docContent = `
# Calculator
## calculate(x: number): number
Doubles the input.
`.trim();
await fs.writeFile(join(docsPath, "calc.md"), docContent);
// Run in detect mode
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
createSnapshot: true,
});
expect(result).toBeDefined();
expect(result.content).toBeDefined();
expect(result.content[0]).toBeDefined();
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.mode).toBe("detect");
// Verify no changes were made
const docAfter = await fs.readFile(join(docsPath, "calc.md"), "utf-8");
expect(docAfter).toBe(docContent);
});
test("should create baseline snapshot on first run", async () => {
const sourceCode = `export function test(): void {}`;
await fs.writeFile(join(projectPath, "src", "test.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
createSnapshot: true,
});
expect(result).toBeDefined();
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.snapshotId).toBeTruthy();
// Check snapshot was created
const snapshotDir = join(tempDir, "project", ".documcp", "snapshots");
const files = await fs.readdir(snapshotDir);
expect(files.length).toBeGreaterThan(0);
});
test("should report drift statistics", async () => {
// Create initial snapshot
const oldCode = `
export function oldFunction(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "changes.ts"), oldCode);
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
createSnapshot: true,
});
// Make changes
const newCode = `
export function newFunction(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "changes.ts"), newCode);
// Detect drift
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
createSnapshot: true,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.stats).toBeDefined();
expect(data.data.stats.filesAnalyzed).toBeGreaterThanOrEqual(0);
});
});
describe("Apply Mode", () => {
test("should apply high-confidence changes automatically", async () => {
// Create code with JSDoc
const sourceCode = `
/**
* Calculates the sum of two numbers
* @param a First number
* @param b Second number
* @returns The sum
*/
export function add(a: number, b: number): number {
return a + b;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "math.ts"), sourceCode);
// Create minimal documentation
const docContent = `
# Math Module
Documentation needed.
`.trim();
await fs.writeFile(join(docsPath, "math.md"), docContent);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
createSnapshot: true,
});
// Run in apply mode with high threshold
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.9,
createSnapshot: true,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.mode).toBe("apply");
// Stats should show applied or pending changes
const stats = data.data.stats;
expect(
stats.changesApplied + stats.changesPending,
).toBeGreaterThanOrEqual(0);
});
test("should respect confidence threshold", async () => {
// Setup code and docs
const sourceCode = `export function test(): void {}`;
await fs.writeFile(join(projectPath, "src", "test.ts"), sourceCode);
const docContent = `# Test`;
await fs.writeFile(join(docsPath, "test.md"), docContent);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Apply with very high threshold (most changes won't meet it)
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.99,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// With high threshold, most changes should be pending
if (data.data.stats.driftsDetected > 0) {
expect(data.data.pendingChanges.length).toBeGreaterThanOrEqual(0);
}
});
test("should create snapshot before applying changes", async () => {
const sourceCode = `export function test(): void {}`;
await fs.writeFile(
join(projectPath, "src", "snapshot-test.ts"),
sourceCode,
);
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
createSnapshot: true,
});
// Verify snapshot exists
const snapshotDir = join(projectPath, ".documcp", "snapshots");
const files = await fs.readdir(snapshotDir);
expect(files.length).toBeGreaterThan(0);
});
});
describe("Auto Mode", () => {
test("should apply all changes in auto mode", async () => {
const sourceCode = `
export function autoFunction(param: string): string {
return param.toUpperCase();
}
`.trim();
await fs.writeFile(join(projectPath, "src", "auto.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "auto",
createSnapshot: true,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.mode).toBe("auto");
});
});
describe("Error Handling", () => {
test("should handle invalid project path", async () => {
const result = await handleSyncCodeToDocs({
projectPath: "/nonexistent/path",
docsPath,
mode: "detect",
});
expect(result).toBeDefined();
expect(result.content).toBeDefined();
const data = JSON.parse(result.content[0].text);
// Should either fail gracefully or handle missing path
expect(data).toBeDefined();
});
test("should handle invalid docs path", async () => {
const sourceCode = `export function test(): void {}`;
await fs.writeFile(join(projectPath, "src", "test.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath: "/nonexistent/docs",
mode: "detect",
});
expect(result).toBeDefined();
const data = JSON.parse(result.content[0].text);
expect(data).toBeDefined();
});
test("should handle empty project", async () => {
// Empty project directory
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
expect(result).toBeDefined();
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.stats.filesAnalyzed).toBe(0);
});
});
describe("Recommendations and Next Steps", () => {
test("should provide recommendations based on results", async () => {
const sourceCode = `
export function critical(param: number): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "critical.ts"), sourceCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Make breaking change
const newCode = `
export function critical(param: string, extra: boolean): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "critical.ts"), newCode);
// Detect changes
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.recommendations).toBeDefined();
expect(Array.isArray(data.recommendations)).toBe(true);
});
test("should provide next steps", async () => {
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.nextSteps).toBeDefined();
expect(Array.isArray(data.nextSteps)).toBe(true);
});
});
describe("Integration with Knowledge Graph", () => {
test("should store sync events", async () => {
const sourceCode = `export function kgTest(): void {}`;
await fs.writeFile(join(projectPath, "src", "kg-test.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Sync event should be created (even if storage fails, shouldn't error)
expect(data.data).toBeDefined();
});
});
describe("Preview Mode", () => {
test("should show changes in preview mode without applying", async () => {
const sourceCode = `
export function previewFunc(x: number): number {
return x * 3;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "preview.ts"), sourceCode);
const docContent = `
# Preview
Old documentation.
`.trim();
await fs.writeFile(join(docsPath, "preview.md"), docContent);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Change code
const newCode = `
export function previewFunc(x: number, y: number): number {
return x * y;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "preview.ts"), newCode);
// Preview changes
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "preview",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.mode).toBe("preview");
// Verify documentation wasn't changed
const docAfter = await fs.readFile(join(docsPath, "preview.md"), "utf-8");
expect(docAfter).toBe(docContent);
});
});
describe("Documentation Change Application", () => {
test("should apply changes when low-confidence changes exist in auto mode", async () => {
// Create a source file with documentation
const sourceCode = `
/**
* Multiplies two numbers together
* @param x First number
* @param y Second number
*/
export function multiply(x: number, y: number): number {
return x * y;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "math.ts"), sourceCode);
// Create outdated documentation
const docContent = `
# Math Module
## multiply
Adds two numbers.
`.trim();
await fs.writeFile(join(docsPath, "math.md"), docContent);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Run in auto mode (applies all changes)
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "auto",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.mode).toBe("auto");
});
test("should handle apply errors gracefully", async () => {
// Create source file
const sourceCode = `export function testFunc(): void {}`;
await fs.writeFile(join(projectPath, "src", "test.ts"), sourceCode);
// Create documentation in a read-only parent directory would fail
// But for this test, we'll just verify the error handling path exists
const docContent = `# Test`;
await fs.writeFile(join(docsPath, "test.md"), docContent);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Modify code
const newCode = `export function testFunc(param: string): void {}`;
await fs.writeFile(join(projectPath, "src", "test.ts"), newCode);
// Try to apply changes
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.0, // Very low threshold
});
// Should complete without crashing
expect(result).toBeDefined();
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
});
});
describe("Recommendation Edge Cases", () => {
test("should recommend review for breaking changes", async () => {
// Create initial code
const oldCode = `
export function oldApi(x: number): string {
return x.toString();
}
`.trim();
await fs.writeFile(join(projectPath, "src", "api.ts"), oldCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Make breaking change
const newCode = `
export function newApi(x: number, y: string): boolean {
return x > 0;
}
`.trim();
await fs.writeFile(join(projectPath, "src", "api.ts"), newCode);
// Detect changes
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Should have recommendations
expect(data.recommendations).toBeDefined();
expect(Array.isArray(data.recommendations)).toBe(true);
});
test("should show info when no drift detected", async () => {
// Create code
const sourceCode = `export function stable(): void {}`;
await fs.writeFile(join(projectPath, "src", "stable.ts"), sourceCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Run again without changes
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.recommendations).toBeDefined();
// Should have "No Drift Detected" recommendation
const noDriftRec = data.recommendations.find(
(r: any) => r.title?.includes("No Drift"),
);
expect(noDriftRec).toBeDefined();
});
test("should recommend validation after applying changes", async () => {
const sourceCode = `
/**
* Test function
*/
export function test(): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "validated.ts"), sourceCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Modify code
const newCode = `
/**
* Modified test function
*/
export function test(param: string): void {}
`.trim();
await fs.writeFile(join(projectPath, "src", "validated.ts"), newCode);
// Apply changes
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "auto",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Should have next steps
expect(data.nextSteps).toBeDefined();
expect(Array.isArray(data.nextSteps)).toBe(true);
});
});
describe("Next Steps Generation", () => {
test("should suggest apply mode when in detect mode with pending changes", async () => {
const sourceCode = `export function needsSync(): void {}`;
await fs.writeFile(join(projectPath, "src", "sync.ts"), sourceCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Change code
const newCode = `export function needsSync(param: number): void {}`;
await fs.writeFile(join(projectPath, "src", "sync.ts"), newCode);
// Detect in detect mode
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.nextSteps).toBeDefined();
// If there are pending changes, should suggest apply mode
if (data.data.pendingChanges?.length > 0) {
const applyStep = data.nextSteps.find(
(s: any) => s.action?.includes("Apply"),
);
expect(applyStep).toBeDefined();
}
});
test("should suggest review for pending manual changes", async () => {
const sourceCode = `export function complex(): void {}`;
await fs.writeFile(join(projectPath, "src", "complex.ts"), sourceCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Change code
const newCode = `export function complex(a: number, b: string): boolean { return true; }`;
await fs.writeFile(join(projectPath, "src", "complex.ts"), newCode);
// Detect with very high threshold (forces manual review)
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.99,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.nextSteps).toBeDefined();
});
});
describe("Snapshot Management", () => {
test("should not create snapshot when createSnapshot is false in detect mode", async () => {
const sourceCode = `export function noSnapshot(): void {}`;
await fs.writeFile(join(projectPath, "src", "nosnapshot.ts"), sourceCode);
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
createSnapshot: false,
});
// Should still work even without snapshot
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
createSnapshot: false,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
});
});
describe("Error Path Coverage", () => {
test("should handle KG storage failures gracefully", async () => {
const sourceCode = `export function kgError(): void {}`;
await fs.writeFile(join(projectPath, "src", "kg-error.ts"), sourceCode);
// The tool should complete even if KG storage fails
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
});
test("should handle zero drift detections", async () => {
const sourceCode = `export function stable(): void {}`;
await fs.writeFile(join(projectPath, "src", "stable.ts"), sourceCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Run again with no changes
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.stats.driftsDetected).toBe(0);
});
test("should handle files with no drift suggestions", async () => {
const sourceCode = `export function noDrift(): void {}`;
await fs.writeFile(join(projectPath, "src", "nodrift.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.5,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Should handle case with no drift gracefully
expect(data.data.appliedChanges).toBeDefined();
expect(data.data.pendingChanges).toBeDefined();
});
test("should handle recommendations with zero breaking changes", async () => {
const sourceCode = `export function minor(): void {}`;
await fs.writeFile(join(projectPath, "src", "minor.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Should not have critical recommendations for no breaking changes
const criticalRecs = data.recommendations?.filter(
(r: any) => r.type === "critical",
);
expect(criticalRecs || []).toHaveLength(0);
});
test("should handle pending changes without manual review", async () => {
const sourceCode = `export function autoApply(): void {}`;
await fs.writeFile(join(projectPath, "src", "autoapply.ts"), sourceCode);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Modify
const newCode = `export function autoApply(x: number): number { return x; }`;
await fs.writeFile(join(projectPath, "src", "autoapply.ts"), newCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "auto", // Auto applies all
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
});
test("should handle next steps when no breaking changes exist", async () => {
const sourceCode = `export function noBreaking(): void {}`;
await fs.writeFile(join(projectPath, "src", "nobreaking.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Should not suggest reviewing breaking changes when there are none
const breakingStep = data.nextSteps?.find(
(s: any) => s.action?.toLowerCase().includes("breaking"),
);
expect(breakingStep).toBeUndefined();
});
test("should handle next steps when no changes were applied", async () => {
const sourceCode = `export function noApplied(): void {}`;
await fs.writeFile(join(projectPath, "src", "noapplied.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect", // Detect mode doesn't apply
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.appliedChanges).toHaveLength(0);
});
test("should handle next steps when no pending changes require review", async () => {
const sourceCode = `export function noPending(): void {}`;
await fs.writeFile(join(projectPath, "src", "nopending.ts"), sourceCode);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "auto", // Auto applies everything
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
});
test("should handle apply mode with suggestions below threshold", async () => {
const sourceCode = `export function lowConfidence(): void {}`;
await fs.writeFile(
join(projectPath, "src", "lowconfidence.ts"),
sourceCode,
);
// Create baseline
await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
// Modify
const newCode = `export function lowConfidence(param: string): void {}`;
await fs.writeFile(join(projectPath, "src", "lowconfidence.ts"), newCode);
// Very high threshold - suggestions won't meet it
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 1.0,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
});
test("should handle context parameter with info logging", async () => {
const sourceCode = `export function withContext(): void {}`;
await fs.writeFile(join(projectPath, "src", "context.ts"), sourceCode);
const mockContext = {
info: jest.fn(),
warn: jest.fn(),
};
const result = await handleSyncCodeToDocs(
{
projectPath,
docsPath,
mode: "detect",
},
mockContext,
);
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Context info should have been called
expect(mockContext.info).toHaveBeenCalled();
});
test("should handle snapshot creation in non-detect modes", async () => {
const sourceCode = `export function modeSnapshot(): void {}`;
await fs.writeFile(
join(projectPath, "src", "modesnapshot.ts"),
sourceCode,
);
// Apply mode should create snapshot even if createSnapshot not specified
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
createSnapshot: false, // But mode !== "detect" overrides this
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
});
});
describe("Mocked Drift Detector Tests", () => {
let mockDetector: jest.Mocked<DriftDetector>;
beforeEach(() => {
// Create a real detector instance but spy on its methods
mockDetector = new DriftDetector(
projectPath,
) as jest.Mocked<DriftDetector>;
});
test("should apply high-confidence suggestions automatically", async () => {
// Create real documentation file
const docPath = join(docsPath, "api.md");
const originalDoc = `# API
## oldFunction
This is outdated.`;
await fs.writeFile(docPath, originalDoc);
// Mock drift detector to return suggestions
jest.spyOn(DriftDetector.prototype, "initialize").mockResolvedValue();
jest.spyOn(DriftDetector.prototype, "createSnapshot").mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map([
[
"src/api.ts",
{
filePath: "src/api.ts",
language: "typescript",
functions: [],
classes: [],
interfaces: [],
types: [],
imports: [],
exports: [],
contentHash: "abc123",
lastModified: "2025-01-01T00:00:00.000Z",
linesOfCode: 10,
complexity: 1,
},
],
]),
documentation: new Map(),
});
jest
.spyOn(DriftDetector.prototype, "loadLatestSnapshot")
.mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest.spyOn(DriftDetector.prototype, "detectDrift").mockResolvedValue([
{
filePath: "src/api.ts",
hasDrift: true,
severity: "medium",
drifts: [
{
type: "outdated",
affectedDocs: [docPath],
codeChanges: [
{
type: "modified",
category: "function",
name: "oldFunction",
details: "Function renamed to newFunction",
impactLevel: "major",
},
],
description: "Function signature changed",
detectedAt: "2025-01-01T00:00:00.000Z",
severity: "medium",
},
],
impactAnalysis: {
breakingChanges: 0,
majorChanges: 1,
minorChanges: 0,
affectedDocFiles: [docPath],
estimatedUpdateEffort: "medium",
requiresManualReview: false,
},
suggestions: [
{
docFile: docPath,
section: "oldFunction",
currentContent: "This is outdated.",
suggestedContent: `## newFunction
Updated documentation for new function.`,
reasoning: "Function was renamed from oldFunction to newFunction",
confidence: 0.95,
autoApplicable: true,
},
],
},
]);
// Run in apply mode
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.8,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.appliedChanges.length).toBeGreaterThan(0);
// Verify the file was actually modified
const updatedDoc = await fs.readFile(docPath, "utf-8");
expect(updatedDoc).toContain("newFunction");
});
test("should not apply low-confidence suggestions", async () => {
const docPath = join(docsPath, "lowconf.md");
const originalDoc = `# Low Confidence
## someFunction
Original content.`;
await fs.writeFile(docPath, originalDoc);
jest.spyOn(DriftDetector.prototype, "initialize").mockResolvedValue();
jest.spyOn(DriftDetector.prototype, "createSnapshot").mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest
.spyOn(DriftDetector.prototype, "loadLatestSnapshot")
.mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest.spyOn(DriftDetector.prototype, "detectDrift").mockResolvedValue([
{
filePath: "src/lowconf.ts",
hasDrift: true,
severity: "low",
drifts: [
{
type: "outdated",
affectedDocs: [docPath],
codeChanges: [
{
type: "modified",
category: "function",
name: "someFunction",
details: "Minor change",
impactLevel: "minor",
},
],
description: "Minor change",
detectedAt: "2025-01-01T00:00:00.000Z",
severity: "low",
},
],
impactAnalysis: {
breakingChanges: 0,
majorChanges: 0,
minorChanges: 1,
affectedDocFiles: [docPath],
estimatedUpdateEffort: "low",
requiresManualReview: false,
},
suggestions: [
{
docFile: docPath,
section: "someFunction",
currentContent: "Original content.",
suggestedContent: "Suggested content.",
reasoning: "Minor update needed",
confidence: 0.5, // Low confidence
autoApplicable: true,
},
],
},
]);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.8, // Higher than suggestion confidence
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.pendingChanges.length).toBeGreaterThan(0);
expect(data.data.appliedChanges.length).toBe(0);
// Verify file was NOT modified
const unchangedDoc = await fs.readFile(docPath, "utf-8");
expect(unchangedDoc).toBe(originalDoc);
});
test("should apply all suggestions in auto mode regardless of confidence", async () => {
const docPath = join(docsPath, "auto.md");
const originalDoc = `# Auto Mode
## function1
Old content.`;
await fs.writeFile(docPath, originalDoc);
jest.spyOn(DriftDetector.prototype, "initialize").mockResolvedValue();
jest.spyOn(DriftDetector.prototype, "createSnapshot").mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest
.spyOn(DriftDetector.prototype, "loadLatestSnapshot")
.mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest.spyOn(DriftDetector.prototype, "detectDrift").mockResolvedValue([
{
filePath: "src/auto.ts",
hasDrift: true,
severity: "low",
drifts: [
{
type: "outdated",
affectedDocs: [docPath],
codeChanges: [
{
type: "modified",
category: "function",
name: "function1",
details: "Change",
impactLevel: "minor",
},
],
description: "Change",
detectedAt: "2025-01-01T00:00:00.000Z",
severity: "low",
},
],
impactAnalysis: {
breakingChanges: 0,
majorChanges: 0,
minorChanges: 1,
affectedDocFiles: [docPath],
estimatedUpdateEffort: "low",
requiresManualReview: false,
},
suggestions: [
{
docFile: docPath,
section: "function1",
currentContent: "Old content.",
suggestedContent: `## function1
New content from auto mode.`,
reasoning: "Auto-applied update",
confidence: 0.3, // Very low confidence
autoApplicable: false, // Not auto-applicable
},
],
},
]);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "auto", // Auto mode applies everything
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.appliedChanges.length).toBeGreaterThan(0);
// Verify file was modified
const updatedDoc = await fs.readFile(docPath, "utf-8");
expect(updatedDoc).toContain("New content from auto mode");
});
test("should handle apply errors and mark as pending", async () => {
const docPath = join(docsPath, "error.md");
// Don't create the file - this will cause an error
jest.spyOn(DriftDetector.prototype, "initialize").mockResolvedValue();
jest.spyOn(DriftDetector.prototype, "createSnapshot").mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest
.spyOn(DriftDetector.prototype, "loadLatestSnapshot")
.mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest.spyOn(DriftDetector.prototype, "detectDrift").mockResolvedValue([
{
filePath: "src/error.ts",
hasDrift: true,
severity: "medium",
drifts: [
{
type: "outdated",
affectedDocs: [docPath],
codeChanges: [
{
type: "modified",
category: "function",
name: "errorFunction",
details: "Change",
impactLevel: "minor",
},
],
description: "Change",
detectedAt: "2025-01-01T00:00:00.000Z",
severity: "medium",
},
],
impactAnalysis: {
breakingChanges: 0,
majorChanges: 1,
minorChanges: 0,
affectedDocFiles: [docPath],
estimatedUpdateEffort: "medium",
requiresManualReview: false,
},
suggestions: [
{
docFile: docPath, // File doesn't exist
section: "errorSection",
currentContent: "N/A",
suggestedContent: "New content",
reasoning: "Should fail",
confidence: 0.95,
autoApplicable: true,
},
],
},
]);
const mockContext = {
info: jest.fn(),
warn: jest.fn(),
};
const result = await handleSyncCodeToDocs(
{
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.8,
},
mockContext,
);
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
// Failed applies should be added to pending changes
expect(data.data.pendingChanges.length).toBeGreaterThan(0);
expect(mockContext.warn).toHaveBeenCalled();
});
test("should add new section when section doesn't exist", async () => {
const docPath = join(docsPath, "newsection.md");
const originalDoc = `# New Section Test
## existingSection
Existing content.`;
await fs.writeFile(docPath, originalDoc);
jest.spyOn(DriftDetector.prototype, "initialize").mockResolvedValue();
jest.spyOn(DriftDetector.prototype, "createSnapshot").mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest
.spyOn(DriftDetector.prototype, "loadLatestSnapshot")
.mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest.spyOn(DriftDetector.prototype, "detectDrift").mockResolvedValue([
{
filePath: "src/new.ts",
hasDrift: true,
severity: "low",
drifts: [
{
type: "missing",
affectedDocs: [docPath],
codeChanges: [
{
type: "added",
category: "function",
name: "newFunction",
details: "New function added",
impactLevel: "minor",
},
],
description: "New function",
detectedAt: "2025-01-01T00:00:00.000Z",
severity: "low",
},
],
impactAnalysis: {
breakingChanges: 0,
majorChanges: 0,
minorChanges: 1,
affectedDocFiles: [docPath],
estimatedUpdateEffort: "low",
requiresManualReview: false,
},
suggestions: [
{
docFile: docPath,
section: "newSection", // This section doesn't exist
currentContent: "",
suggestedContent: `## newSection
This is a brand new section.`,
reasoning: "New function added",
confidence: 0.9,
autoApplicable: true,
},
],
},
]);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "apply",
autoApplyThreshold: 0.8,
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.appliedChanges.length).toBeGreaterThan(0);
// Verify new section was appended
const updatedDoc = await fs.readFile(docPath, "utf-8");
expect(updatedDoc).toContain("newSection");
expect(updatedDoc).toContain("brand new section");
});
test("should handle breaking changes recommendation", async () => {
const docPath = join(docsPath, "breaking.md");
await fs.writeFile(docPath, "# Breaking");
jest.spyOn(DriftDetector.prototype, "initialize").mockResolvedValue();
jest.spyOn(DriftDetector.prototype, "createSnapshot").mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest
.spyOn(DriftDetector.prototype, "loadLatestSnapshot")
.mockResolvedValue({
timestamp: "2025-01-01T00:00:00.000Z",
projectPath,
files: new Map(),
documentation: new Map(),
});
jest.spyOn(DriftDetector.prototype, "detectDrift").mockResolvedValue([
{
filePath: "src/breaking.ts",
hasDrift: true,
severity: "critical",
drifts: [
{
type: "breaking",
affectedDocs: [docPath],
codeChanges: [
{
type: "removed",
category: "function",
name: "oldAPI",
details: "Breaking change",
impactLevel: "breaking",
},
],
description: "Breaking change",
detectedAt: "2025-01-01T00:00:00.000Z",
severity: "critical",
},
],
impactAnalysis: {
breakingChanges: 2, // Multiple breaking changes
majorChanges: 0,
minorChanges: 0,
affectedDocFiles: [docPath],
estimatedUpdateEffort: "high",
requiresManualReview: true,
},
suggestions: [
{
docFile: docPath,
section: "API",
currentContent: "Old API",
suggestedContent: "New API",
reasoning: "Breaking change",
confidence: 0.9,
autoApplicable: true,
},
],
},
]);
const result = await handleSyncCodeToDocs({
projectPath,
docsPath,
mode: "detect",
});
const data = JSON.parse(result.content[0].text);
expect(data.success).toBe(true);
expect(data.data.stats.breakingChanges).toBe(2);
// Should have critical recommendation
const criticalRec = data.recommendations.find(
(r: any) => r.type === "critical",
);
expect(criticalRec).toBeDefined();
expect(criticalRec.title).toContain("Breaking");
});
afterEach(() => {
jest.restoreAllMocks();
});
});
});
```
--------------------------------------------------------------------------------
/docs/api/assets/style.css:
--------------------------------------------------------------------------------
```css
@layer typedoc {
:root {
--dim-toolbar-contents-height: 2.5rem;
--dim-toolbar-border-bottom-width: 1px;
--dim-header-height: calc(
var(--dim-toolbar-border-bottom-width) +
var(--dim-toolbar-contents-height)
);
/* 0rem For mobile; unit is required for calculation in `calc` */
--dim-container-main-margin-y: 0rem;
--dim-footer-height: 3.5rem;
--modal-animation-duration: 0.2s;
}
:root {
/* Light */
--light-color-background: #f2f4f8;
--light-color-background-secondary: #eff0f1;
/* Not to be confused with [:active](https://developer.mozilla.org/en-US/docs/Web/CSS/:active) */
--light-color-background-active: #d6d8da;
--light-color-background-warning: #e6e600;
--light-color-warning-text: #222;
--light-color-accent: #c5c7c9;
--light-color-active-menu-item: var(--light-color-background-active);
--light-color-text: #222;
--light-color-contrast-text: #000;
--light-color-text-aside: #5e5e5e;
--light-color-icon-background: var(--light-color-background);
--light-color-icon-text: var(--light-color-text);
--light-color-comment-tag-text: var(--light-color-text);
--light-color-comment-tag: var(--light-color-background);
--light-color-link: #1f70c2;
--light-color-focus-outline: #3584e4;
--light-color-ts-keyword: #056bd6;
--light-color-ts-project: #b111c9;
--light-color-ts-module: var(--light-color-ts-project);
--light-color-ts-namespace: var(--light-color-ts-project);
--light-color-ts-enum: #7e6f15;
--light-color-ts-enum-member: var(--light-color-ts-enum);
--light-color-ts-variable: #4760ec;
--light-color-ts-function: #572be7;
--light-color-ts-class: #1f70c2;
--light-color-ts-interface: #108024;
--light-color-ts-constructor: var(--light-color-ts-class);
--light-color-ts-property: #9f5f30;
--light-color-ts-method: #be3989;
--light-color-ts-reference: #ff4d82;
--light-color-ts-call-signature: var(--light-color-ts-method);
--light-color-ts-index-signature: var(--light-color-ts-property);
--light-color-ts-constructor-signature: var(
--light-color-ts-constructor
);
--light-color-ts-parameter: var(--light-color-ts-variable);
/* type literal not included as links will never be generated to it */
--light-color-ts-type-parameter: #a55c0e;
--light-color-ts-accessor: #c73c3c;
--light-color-ts-get-signature: var(--light-color-ts-accessor);
--light-color-ts-set-signature: var(--light-color-ts-accessor);
--light-color-ts-type-alias: #d51270;
/* reference not included as links will be colored with the kind that it points to */
--light-color-document: #000000;
--light-color-alert-note: #0969d9;
--light-color-alert-tip: #1a7f37;
--light-color-alert-important: #8250df;
--light-color-alert-warning: #9a6700;
--light-color-alert-caution: #cf222e;
--light-external-icon: url("data:image/svg+xml;utf8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100' width='10' height='10'><path fill-opacity='0' stroke='%23000' stroke-width='10' d='m43,35H5v60h60V57M45,5v10l10,10-30,30 20,20 30-30 10,10h10V5z'/></svg>");
--light-color-scheme: light;
}
:root {
/* Dark */
--dark-color-background: #2b2e33;
--dark-color-background-secondary: #1e2024;
/* Not to be confused with [:active](https://developer.mozilla.org/en-US/docs/Web/CSS/:active) */
--dark-color-background-active: #5d5d6a;
--dark-color-background-warning: #bebe00;
--dark-color-warning-text: #222;
--dark-color-accent: #9096a2;
--dark-color-active-menu-item: var(--dark-color-background-active);
--dark-color-text: #f5f5f5;
--dark-color-contrast-text: #ffffff;
--dark-color-text-aside: #dddddd;
--dark-color-icon-background: var(--dark-color-background-secondary);
--dark-color-icon-text: var(--dark-color-text);
--dark-color-comment-tag-text: var(--dark-color-text);
--dark-color-comment-tag: var(--dark-color-background);
--dark-color-link: #00aff4;
--dark-color-focus-outline: #4c97f2;
--dark-color-ts-keyword: #3399ff;
--dark-color-ts-project: #e358ff;
--dark-color-ts-module: var(--dark-color-ts-project);
--dark-color-ts-namespace: var(--dark-color-ts-project);
--dark-color-ts-enum: #f4d93e;
--dark-color-ts-enum-member: var(--dark-color-ts-enum);
--dark-color-ts-variable: #798dff;
--dark-color-ts-function: #a280ff;
--dark-color-ts-class: #8ac4ff;
--dark-color-ts-interface: #6cff87;
--dark-color-ts-constructor: var(--dark-color-ts-class);
--dark-color-ts-property: #ff984d;
--dark-color-ts-method: #ff4db8;
--dark-color-ts-reference: #ff4d82;
--dark-color-ts-call-signature: var(--dark-color-ts-method);
--dark-color-ts-index-signature: var(--dark-color-ts-property);
--dark-color-ts-constructor-signature: var(--dark-color-ts-constructor);
--dark-color-ts-parameter: var(--dark-color-ts-variable);
/* type literal not included as links will never be generated to it */
--dark-color-ts-type-parameter: #e07d13;
--dark-color-ts-accessor: #ff6060;
--dark-color-ts-get-signature: var(--dark-color-ts-accessor);
--dark-color-ts-set-signature: var(--dark-color-ts-accessor);
--dark-color-ts-type-alias: #ff6492;
/* reference not included as links will be colored with the kind that it points to */
--dark-color-document: #ffffff;
--dark-color-alert-note: #0969d9;
--dark-color-alert-tip: #1a7f37;
--dark-color-alert-important: #8250df;
--dark-color-alert-warning: #9a6700;
--dark-color-alert-caution: #cf222e;
--dark-external-icon: url("data:image/svg+xml;utf8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100' width='10' height='10'><path fill-opacity='0' stroke='%23fff' stroke-width='10' d='m43,35H5v60h60V57M45,5v10l10,10-30,30 20,20 30-30 10,10h10V5z'/></svg>");
--dark-color-scheme: dark;
}
@media (prefers-color-scheme: light) {
:root {
--color-background: var(--light-color-background);
--color-background-secondary: var(
--light-color-background-secondary
);
--color-background-active: var(--light-color-background-active);
--color-background-warning: var(--light-color-background-warning);
--color-warning-text: var(--light-color-warning-text);
--color-accent: var(--light-color-accent);
--color-active-menu-item: var(--light-color-active-menu-item);
--color-text: var(--light-color-text);
--color-contrast-text: var(--light-color-contrast-text);
--color-text-aside: var(--light-color-text-aside);
--color-icon-background: var(--light-color-icon-background);
--color-icon-text: var(--light-color-icon-text);
--color-comment-tag-text: var(--light-color-text);
--color-comment-tag: var(--light-color-background);
--color-link: var(--light-color-link);
--color-focus-outline: var(--light-color-focus-outline);
--color-ts-keyword: var(--light-color-ts-keyword);
--color-ts-project: var(--light-color-ts-project);
--color-ts-module: var(--light-color-ts-module);
--color-ts-namespace: var(--light-color-ts-namespace);
--color-ts-enum: var(--light-color-ts-enum);
--color-ts-enum-member: var(--light-color-ts-enum-member);
--color-ts-variable: var(--light-color-ts-variable);
--color-ts-function: var(--light-color-ts-function);
--color-ts-class: var(--light-color-ts-class);
--color-ts-interface: var(--light-color-ts-interface);
--color-ts-constructor: var(--light-color-ts-constructor);
--color-ts-property: var(--light-color-ts-property);
--color-ts-method: var(--light-color-ts-method);
--color-ts-reference: var(--light-color-ts-reference);
--color-ts-call-signature: var(--light-color-ts-call-signature);
--color-ts-index-signature: var(--light-color-ts-index-signature);
--color-ts-constructor-signature: var(
--light-color-ts-constructor-signature
);
--color-ts-parameter: var(--light-color-ts-parameter);
--color-ts-type-parameter: var(--light-color-ts-type-parameter);
--color-ts-accessor: var(--light-color-ts-accessor);
--color-ts-get-signature: var(--light-color-ts-get-signature);
--color-ts-set-signature: var(--light-color-ts-set-signature);
--color-ts-type-alias: var(--light-color-ts-type-alias);
--color-document: var(--light-color-document);
--color-alert-note: var(--light-color-alert-note);
--color-alert-tip: var(--light-color-alert-tip);
--color-alert-important: var(--light-color-alert-important);
--color-alert-warning: var(--light-color-alert-warning);
--color-alert-caution: var(--light-color-alert-caution);
--external-icon: var(--light-external-icon);
--color-scheme: var(--light-color-scheme);
}
}
@media (prefers-color-scheme: dark) {
:root {
--color-background: var(--dark-color-background);
--color-background-secondary: var(
--dark-color-background-secondary
);
--color-background-active: var(--dark-color-background-active);
--color-background-warning: var(--dark-color-background-warning);
--color-warning-text: var(--dark-color-warning-text);
--color-accent: var(--dark-color-accent);
--color-active-menu-item: var(--dark-color-active-menu-item);
--color-text: var(--dark-color-text);
--color-contrast-text: var(--dark-color-contrast-text);
--color-text-aside: var(--dark-color-text-aside);
--color-icon-background: var(--dark-color-icon-background);
--color-icon-text: var(--dark-color-icon-text);
--color-comment-tag-text: var(--dark-color-text);
--color-comment-tag: var(--dark-color-background);
--color-link: var(--dark-color-link);
--color-focus-outline: var(--dark-color-focus-outline);
--color-ts-keyword: var(--dark-color-ts-keyword);
--color-ts-project: var(--dark-color-ts-project);
--color-ts-module: var(--dark-color-ts-module);
--color-ts-namespace: var(--dark-color-ts-namespace);
--color-ts-enum: var(--dark-color-ts-enum);
--color-ts-enum-member: var(--dark-color-ts-enum-member);
--color-ts-variable: var(--dark-color-ts-variable);
--color-ts-function: var(--dark-color-ts-function);
--color-ts-class: var(--dark-color-ts-class);
--color-ts-interface: var(--dark-color-ts-interface);
--color-ts-constructor: var(--dark-color-ts-constructor);
--color-ts-property: var(--dark-color-ts-property);
--color-ts-method: var(--dark-color-ts-method);
--color-ts-reference: var(--dark-color-ts-reference);
--color-ts-call-signature: var(--dark-color-ts-call-signature);
--color-ts-index-signature: var(--dark-color-ts-index-signature);
--color-ts-constructor-signature: var(
--dark-color-ts-constructor-signature
);
--color-ts-parameter: var(--dark-color-ts-parameter);
--color-ts-type-parameter: var(--dark-color-ts-type-parameter);
--color-ts-accessor: var(--dark-color-ts-accessor);
--color-ts-get-signature: var(--dark-color-ts-get-signature);
--color-ts-set-signature: var(--dark-color-ts-set-signature);
--color-ts-type-alias: var(--dark-color-ts-type-alias);
--color-document: var(--dark-color-document);
--color-alert-note: var(--dark-color-alert-note);
--color-alert-tip: var(--dark-color-alert-tip);
--color-alert-important: var(--dark-color-alert-important);
--color-alert-warning: var(--dark-color-alert-warning);
--color-alert-caution: var(--dark-color-alert-caution);
--external-icon: var(--dark-external-icon);
--color-scheme: var(--dark-color-scheme);
}
}
:root[data-theme="light"] {
--color-background: var(--light-color-background);
--color-background-secondary: var(--light-color-background-secondary);
--color-background-active: var(--light-color-background-active);
--color-background-warning: var(--light-color-background-warning);
--color-warning-text: var(--light-color-warning-text);
--color-icon-background: var(--light-color-icon-background);
--color-accent: var(--light-color-accent);
--color-active-menu-item: var(--light-color-active-menu-item);
--color-text: var(--light-color-text);
--color-contrast-text: var(--light-color-contrast-text);
--color-text-aside: var(--light-color-text-aside);
--color-icon-text: var(--light-color-icon-text);
--color-comment-tag-text: var(--light-color-text);
--color-comment-tag: var(--light-color-background);
--color-link: var(--light-color-link);
--color-focus-outline: var(--light-color-focus-outline);
--color-ts-keyword: var(--light-color-ts-keyword);
--color-ts-project: var(--light-color-ts-project);
--color-ts-module: var(--light-color-ts-module);
--color-ts-namespace: var(--light-color-ts-namespace);
--color-ts-enum: var(--light-color-ts-enum);
--color-ts-enum-member: var(--light-color-ts-enum-member);
--color-ts-variable: var(--light-color-ts-variable);
--color-ts-function: var(--light-color-ts-function);
--color-ts-class: var(--light-color-ts-class);
--color-ts-interface: var(--light-color-ts-interface);
--color-ts-constructor: var(--light-color-ts-constructor);
--color-ts-property: var(--light-color-ts-property);
--color-ts-method: var(--light-color-ts-method);
--color-ts-reference: var(--light-color-ts-reference);
--color-ts-call-signature: var(--light-color-ts-call-signature);
--color-ts-index-signature: var(--light-color-ts-index-signature);
--color-ts-constructor-signature: var(
--light-color-ts-constructor-signature
);
--color-ts-parameter: var(--light-color-ts-parameter);
--color-ts-type-parameter: var(--light-color-ts-type-parameter);
--color-ts-accessor: var(--light-color-ts-accessor);
--color-ts-get-signature: var(--light-color-ts-get-signature);
--color-ts-set-signature: var(--light-color-ts-set-signature);
--color-ts-type-alias: var(--light-color-ts-type-alias);
--color-document: var(--light-color-document);
--color-note: var(--light-color-note);
--color-tip: var(--light-color-tip);
--color-important: var(--light-color-important);
--color-warning: var(--light-color-warning);
--color-caution: var(--light-color-caution);
--external-icon: var(--light-external-icon);
--color-scheme: var(--light-color-scheme);
}
:root[data-theme="dark"] {
--color-background: var(--dark-color-background);
--color-background-secondary: var(--dark-color-background-secondary);
--color-background-active: var(--dark-color-background-active);
--color-background-warning: var(--dark-color-background-warning);
--color-warning-text: var(--dark-color-warning-text);
--color-icon-background: var(--dark-color-icon-background);
--color-accent: var(--dark-color-accent);
--color-active-menu-item: var(--dark-color-active-menu-item);
--color-text: var(--dark-color-text);
--color-contrast-text: var(--dark-color-contrast-text);
--color-text-aside: var(--dark-color-text-aside);
--color-icon-text: var(--dark-color-icon-text);
--color-comment-tag-text: var(--dark-color-text);
--color-comment-tag: var(--dark-color-background);
--color-link: var(--dark-color-link);
--color-focus-outline: var(--dark-color-focus-outline);
--color-ts-keyword: var(--dark-color-ts-keyword);
--color-ts-project: var(--dark-color-ts-project);
--color-ts-module: var(--dark-color-ts-module);
--color-ts-namespace: var(--dark-color-ts-namespace);
--color-ts-enum: var(--dark-color-ts-enum);
--color-ts-enum-member: var(--dark-color-ts-enum-member);
--color-ts-variable: var(--dark-color-ts-variable);
--color-ts-function: var(--dark-color-ts-function);
--color-ts-class: var(--dark-color-ts-class);
--color-ts-interface: var(--dark-color-ts-interface);
--color-ts-constructor: var(--dark-color-ts-constructor);
--color-ts-property: var(--dark-color-ts-property);
--color-ts-method: var(--dark-color-ts-method);
--color-ts-reference: var(--dark-color-ts-reference);
--color-ts-call-signature: var(--dark-color-ts-call-signature);
--color-ts-index-signature: var(--dark-color-ts-index-signature);
--color-ts-constructor-signature: var(
--dark-color-ts-constructor-signature
);
--color-ts-parameter: var(--dark-color-ts-parameter);
--color-ts-type-parameter: var(--dark-color-ts-type-parameter);
--color-ts-accessor: var(--dark-color-ts-accessor);
--color-ts-get-signature: var(--dark-color-ts-get-signature);
--color-ts-set-signature: var(--dark-color-ts-set-signature);
--color-ts-type-alias: var(--dark-color-ts-type-alias);
--color-document: var(--dark-color-document);
--color-note: var(--dark-color-note);
--color-tip: var(--dark-color-tip);
--color-important: var(--dark-color-important);
--color-warning: var(--dark-color-warning);
--color-caution: var(--dark-color-caution);
--external-icon: var(--dark-external-icon);
--color-scheme: var(--dark-color-scheme);
}
html {
color-scheme: var(--color-scheme);
@media (prefers-reduced-motion: no-preference) {
scroll-behavior: smooth;
}
}
*:focus-visible,
.tsd-accordion-summary:focus-visible svg {
outline: 2px solid var(--color-focus-outline);
}
.always-visible,
.always-visible .tsd-signatures {
display: inherit !important;
}
h1,
h2,
h3,
h4,
h5,
h6 {
line-height: 1.2;
}
h1 {
font-size: 1.875rem;
margin: 0.67rem 0;
}
h2 {
font-size: 1.5rem;
margin: 0.83rem 0;
}
h3 {
font-size: 1.25rem;
margin: 1rem 0;
}
h4 {
font-size: 1.05rem;
margin: 1.33rem 0;
}
h5 {
font-size: 1rem;
margin: 1.5rem 0;
}
h6 {
font-size: 0.875rem;
margin: 2.33rem 0;
}
dl,
menu,
ol,
ul {
margin: 1em 0;
}
dd {
margin: 0 0 0 34px;
}
.container {
max-width: 1700px;
padding: 0 2rem;
}
/* Footer */
footer {
border-top: 1px solid var(--color-accent);
padding-top: 1rem;
padding-bottom: 1rem;
max-height: var(--dim-footer-height);
}
footer > p {
margin: 0 1em;
}
.container-main {
margin: var(--dim-container-main-margin-y) auto;
/* toolbar, footer, margin */
min-height: calc(
100svh - var(--dim-header-height) - var(--dim-footer-height) -
2 * var(--dim-container-main-margin-y)
);
}
@keyframes fade-in {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
@keyframes fade-out {
from {
opacity: 1;
visibility: visible;
}
to {
opacity: 0;
}
}
@keyframes pop-in-from-right {
from {
transform: translate(100%, 0);
}
to {
transform: translate(0, 0);
}
}
@keyframes pop-out-to-right {
from {
transform: translate(0, 0);
visibility: visible;
}
to {
transform: translate(100%, 0);
}
}
body {
background: var(--color-background);
font-family:
-apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans",
Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji";
font-size: 16px;
color: var(--color-text);
margin: 0;
}
a {
color: var(--color-link);
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
a.external[target="_blank"] {
background-image: var(--external-icon);
background-position: top 3px right;
background-repeat: no-repeat;
padding-right: 13px;
}
a.tsd-anchor-link {
color: var(--color-text);
}
:target {
scroll-margin-block: calc(var(--dim-header-height) + 0.5rem);
}
code,
pre {
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
padding: 0.2em;
margin: 0;
font-size: 0.875rem;
border-radius: 0.8em;
}
pre {
position: relative;
white-space: pre-wrap;
word-wrap: break-word;
padding: 10px;
border: 1px solid var(--color-accent);
margin-bottom: 8px;
}
pre code {
padding: 0;
font-size: 100%;
}
pre > button {
position: absolute;
top: 10px;
right: 10px;
opacity: 0;
transition: opacity 0.1s;
box-sizing: border-box;
}
pre:hover > button,
pre > button.visible,
pre > button:focus-visible {
opacity: 1;
}
blockquote {
margin: 1em 0;
padding-left: 1em;
border-left: 4px solid gray;
}
img {
max-width: 100%;
}
* {
scrollbar-width: thin;
scrollbar-color: var(--color-accent) var(--color-icon-background);
}
*::-webkit-scrollbar {
width: 0.75rem;
}
*::-webkit-scrollbar-track {
background: var(--color-icon-background);
}
*::-webkit-scrollbar-thumb {
background-color: var(--color-accent);
border-radius: 999rem;
border: 0.25rem solid var(--color-icon-background);
}
dialog {
border: none;
outline: none;
padding: 0;
background-color: var(--color-background);
}
dialog::backdrop {
display: none;
}
#tsd-overlay {
background-color: rgba(0, 0, 0, 0.5);
position: fixed;
z-index: 9999;
top: 0;
left: 0;
right: 0;
bottom: 0;
animation: fade-in var(--modal-animation-duration) forwards;
}
#tsd-overlay.closing {
animation-name: fade-out;
}
.tsd-typography {
line-height: 1.333em;
}
.tsd-typography ul {
list-style: square;
padding: 0 0 0 20px;
margin: 0;
}
.tsd-typography .tsd-index-panel h3,
.tsd-index-panel .tsd-typography h3,
.tsd-typography h4,
.tsd-typography h5,
.tsd-typography h6 {
font-size: 1em;
}
.tsd-typography h5,
.tsd-typography h6 {
font-weight: normal;
}
.tsd-typography p,
.tsd-typography ul,
.tsd-typography ol {
margin: 1em 0;
}
.tsd-typography table {
border-collapse: collapse;
border: none;
}
.tsd-typography td,
.tsd-typography th {
padding: 6px 13px;
border: 1px solid var(--color-accent);
}
.tsd-typography thead,
.tsd-typography tr:nth-child(even) {
background-color: var(--color-background-secondary);
}
.tsd-alert {
padding: 8px 16px;
margin-bottom: 16px;
border-left: 0.25em solid var(--alert-color);
}
.tsd-alert blockquote > :last-child,
.tsd-alert > :last-child {
margin-bottom: 0;
}
.tsd-alert-title {
color: var(--alert-color);
display: inline-flex;
align-items: center;
}
.tsd-alert-title span {
margin-left: 4px;
}
.tsd-alert-note {
--alert-color: var(--color-alert-note);
}
.tsd-alert-tip {
--alert-color: var(--color-alert-tip);
}
.tsd-alert-important {
--alert-color: var(--color-alert-important);
}
.tsd-alert-warning {
--alert-color: var(--color-alert-warning);
}
.tsd-alert-caution {
--alert-color: var(--color-alert-caution);
}
.tsd-breadcrumb {
margin: 0;
margin-top: 1rem;
padding: 0;
color: var(--color-text-aside);
}
.tsd-breadcrumb a {
color: var(--color-text-aside);
text-decoration: none;
}
.tsd-breadcrumb a:hover {
text-decoration: underline;
}
.tsd-breadcrumb li {
display: inline;
}
.tsd-breadcrumb li:after {
content: " / ";
}
.tsd-comment-tags {
display: flex;
flex-direction: column;
}
dl.tsd-comment-tag-group {
display: flex;
align-items: center;
overflow: hidden;
margin: 0.5em 0;
}
dl.tsd-comment-tag-group dt {
display: flex;
margin-right: 0.5em;
font-size: 0.875em;
font-weight: normal;
}
dl.tsd-comment-tag-group dd {
margin: 0;
}
code.tsd-tag {
padding: 0.25em 0.4em;
border: 0.1em solid var(--color-accent);
margin-right: 0.25em;
font-size: 70%;
}
h1 code.tsd-tag:first-of-type {
margin-left: 0.25em;
}
dl.tsd-comment-tag-group dd:before,
dl.tsd-comment-tag-group dd:after {
content: " ";
}
dl.tsd-comment-tag-group dd pre,
dl.tsd-comment-tag-group dd:after {
clear: both;
}
dl.tsd-comment-tag-group p {
margin: 0;
}
.tsd-panel.tsd-comment .lead {
font-size: 1.1em;
line-height: 1.333em;
margin-bottom: 2em;
}
.tsd-panel.tsd-comment .lead:last-child {
margin-bottom: 0;
}
.tsd-filter-visibility h4 {
font-size: 1rem;
padding-top: 0.75rem;
padding-bottom: 0.5rem;
margin: 0;
}
.tsd-filter-item:not(:last-child) {
margin-bottom: 0.5rem;
}
.tsd-filter-input {
display: flex;
width: -moz-fit-content;
width: fit-content;
align-items: center;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
cursor: pointer;
}
.tsd-filter-input input[type="checkbox"] {
cursor: pointer;
position: absolute;
width: 1.5em;
height: 1.5em;
opacity: 0;
}
.tsd-filter-input input[type="checkbox"]:disabled {
pointer-events: none;
}
.tsd-filter-input svg {
cursor: pointer;
width: 1.5em;
height: 1.5em;
margin-right: 0.5em;
border-radius: 0.33em;
/* Leaving this at full opacity breaks event listeners on Firefox.
Don't remove unless you know what you're doing. */
opacity: 0.99;
}
.tsd-filter-input input[type="checkbox"]:focus-visible + svg {
outline: 2px solid var(--color-focus-outline);
}
.tsd-checkbox-background {
fill: var(--color-accent);
}
input[type="checkbox"]:checked ~ svg .tsd-checkbox-checkmark {
stroke: var(--color-text);
}
.tsd-filter-input input:disabled ~ svg > .tsd-checkbox-background {
fill: var(--color-background);
stroke: var(--color-accent);
stroke-width: 0.25rem;
}
.tsd-filter-input input:disabled ~ svg > .tsd-checkbox-checkmark {
stroke: var(--color-accent);
}
.settings-label {
font-weight: bold;
text-transform: uppercase;
display: inline-block;
}
.tsd-filter-visibility .settings-label {
margin: 0.75rem 0 0.5rem 0;
}
.tsd-theme-toggle .settings-label {
margin: 0.75rem 0.75rem 0 0;
}
.tsd-hierarchy h4 label:hover span {
text-decoration: underline;
}
.tsd-hierarchy {
list-style: square;
margin: 0;
}
.tsd-hierarchy-target {
font-weight: bold;
}
.tsd-hierarchy-toggle {
color: var(--color-link);
cursor: pointer;
}
.tsd-full-hierarchy:not(:last-child) {
margin-bottom: 1em;
padding-bottom: 1em;
border-bottom: 1px solid var(--color-accent);
}
.tsd-full-hierarchy,
.tsd-full-hierarchy ul {
list-style: none;
margin: 0;
padding: 0;
}
.tsd-full-hierarchy ul {
padding-left: 1.5rem;
}
.tsd-full-hierarchy a {
padding: 0.25rem 0 !important;
font-size: 1rem;
display: inline-flex;
align-items: center;
color: var(--color-text);
}
.tsd-full-hierarchy svg[data-dropdown] {
cursor: pointer;
}
.tsd-full-hierarchy svg[data-dropdown="false"] {
transform: rotate(-90deg);
}
.tsd-full-hierarchy svg[data-dropdown="false"] ~ ul {
display: none;
}
.tsd-panel-group.tsd-index-group {
margin-bottom: 0;
}
.tsd-index-panel .tsd-index-list {
list-style: none;
line-height: 1.333em;
margin: 0;
padding: 0.25rem 0 0 0;
overflow: hidden;
display: grid;
grid-template-columns: repeat(3, 1fr);
column-gap: 1rem;
grid-template-rows: auto;
}
@media (max-width: 1024px) {
.tsd-index-panel .tsd-index-list {
grid-template-columns: repeat(2, 1fr);
}
}
@media (max-width: 768px) {
.tsd-index-panel .tsd-index-list {
grid-template-columns: repeat(1, 1fr);
}
}
.tsd-index-panel .tsd-index-list li {
-webkit-page-break-inside: avoid;
-moz-page-break-inside: avoid;
-ms-page-break-inside: avoid;
-o-page-break-inside: avoid;
page-break-inside: avoid;
}
.tsd-flag {
display: inline-block;
padding: 0.25em 0.4em;
border-radius: 4px;
color: var(--color-comment-tag-text);
background-color: var(--color-comment-tag);
text-indent: 0;
font-size: 75%;
line-height: 1;
font-weight: normal;
}
.tsd-anchor {
position: relative;
top: -100px;
}
.tsd-member {
position: relative;
}
.tsd-member .tsd-anchor + h3 {
display: flex;
align-items: center;
margin-top: 0;
margin-bottom: 0;
border-bottom: none;
}
.tsd-navigation.settings {
margin: 0;
margin-bottom: 1rem;
}
.tsd-navigation > a,
.tsd-navigation .tsd-accordion-summary {
width: calc(100% - 0.25rem);
display: flex;
align-items: center;
}
.tsd-navigation a,
.tsd-navigation summary > span,
.tsd-page-navigation a {
display: flex;
width: calc(100% - 0.25rem);
align-items: center;
padding: 0.25rem;
color: var(--color-text);
text-decoration: none;
box-sizing: border-box;
}
.tsd-navigation a.current,
.tsd-page-navigation a.current {
background: var(--color-active-menu-item);
color: var(--color-contrast-text);
}
.tsd-navigation a:hover,
.tsd-page-navigation a:hover {
text-decoration: underline;
}
.tsd-navigation ul,
.tsd-page-navigation ul {
margin-top: 0;
margin-bottom: 0;
padding: 0;
list-style: none;
}
.tsd-navigation li,
.tsd-page-navigation li {
padding: 0;
max-width: 100%;
}
.tsd-navigation .tsd-nav-link {
display: none;
}
.tsd-nested-navigation {
margin-left: 3rem;
}
.tsd-nested-navigation > li > details {
margin-left: -1.5rem;
}
.tsd-small-nested-navigation {
margin-left: 1.5rem;
}
.tsd-small-nested-navigation > li > details {
margin-left: -1.5rem;
}
.tsd-page-navigation-section > summary {
padding: 0.25rem;
}
.tsd-page-navigation-section > summary > svg {
margin-right: 0.25rem;
}
.tsd-page-navigation-section > div {
margin-left: 30px;
}
.tsd-page-navigation ul {
padding-left: 1.75rem;
}
#tsd-sidebar-links a {
margin-top: 0;
margin-bottom: 0.5rem;
line-height: 1.25rem;
}
#tsd-sidebar-links a:last-of-type {
margin-bottom: 0;
}
a.tsd-index-link {
padding: 0.25rem 0 !important;
font-size: 1rem;
line-height: 1.25rem;
display: inline-flex;
align-items: center;
color: var(--color-text);
}
.tsd-accordion-summary {
list-style-type: none; /* hide marker on non-safari */
outline: none; /* broken on safari, so just hide it */
display: flex;
align-items: center;
gap: 0.25rem;
box-sizing: border-box;
}
.tsd-accordion-summary::-webkit-details-marker {
display: none; /* hide marker on safari */
}
.tsd-accordion-summary,
.tsd-accordion-summary a {
-moz-user-select: none;
-webkit-user-select: none;
-ms-user-select: none;
user-select: none;
cursor: pointer;
}
.tsd-accordion-summary a {
width: calc(100% - 1.5rem);
}
.tsd-accordion-summary > * {
margin-top: 0;
margin-bottom: 0;
padding-top: 0;
padding-bottom: 0;
}
/*
* We need to be careful to target the arrow indicating whether the accordion
* is open, but not any other SVGs included in the details element.
*/
.tsd-accordion:not([open]) > .tsd-accordion-summary > svg:first-child {
transform: rotate(-90deg);
}
.tsd-index-content > :not(:first-child) {
margin-top: 0.75rem;
}
.tsd-index-summary {
margin-top: 1.5rem;
margin-bottom: 0.75rem;
display: flex;
align-content: center;
}
.tsd-no-select {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
.tsd-kind-icon {
margin-right: 0.5rem;
width: 1.25rem;
height: 1.25rem;
min-width: 1.25rem;
min-height: 1.25rem;
}
.tsd-signature > .tsd-kind-icon {
margin-right: 0.8rem;
}
.tsd-panel {
margin-bottom: 2.5rem;
}
.tsd-panel.tsd-member {
margin-bottom: 4rem;
}
.tsd-panel:empty {
display: none;
}
.tsd-panel > h1,
.tsd-panel > h2,
.tsd-panel > h3 {
margin: 1.5rem -1.5rem 0.75rem -1.5rem;
padding: 0 1.5rem 0.75rem 1.5rem;
}
.tsd-panel > h1.tsd-before-signature,
.tsd-panel > h2.tsd-before-signature,
.tsd-panel > h3.tsd-before-signature {
margin-bottom: 0;
border-bottom: none;
}
.tsd-panel-group {
margin: 2rem 0;
}
.tsd-panel-group.tsd-index-group {
margin: 2rem 0;
}
.tsd-panel-group.tsd-index-group details {
margin: 2rem 0;
}
.tsd-panel-group > .tsd-accordion-summary {
margin-bottom: 1rem;
}
#tsd-search[open] {
animation: fade-in var(--modal-animation-duration) ease-out forwards;
}
#tsd-search[open].closing {
animation-name: fade-out;
}
/* Avoid setting `display` on closed dialog */
#tsd-search[open] {
display: flex;
flex-direction: column;
padding: 1rem;
width: 32rem;
max-width: 90vw;
max-height: calc(100vh - env(keyboard-inset-height, 0px) - 25vh);
/* Anchor dialog to top */
margin-top: 10vh;
border-radius: 6px;
will-change: max-height;
}
#tsd-search-input {
box-sizing: border-box;
width: 100%;
padding: 0 0.625rem; /* 10px */
outline: 0;
border: 2px solid var(--color-accent);
background-color: transparent;
color: var(--color-text);
border-radius: 4px;
height: 2.5rem;
flex: 0 0 auto;
font-size: 0.875rem;
transition: border-color 0.2s, background-color 0.2s;
}
#tsd-search-input:focus-visible {
background-color: var(--color-background-active);
border-color: transparent;
color: var(--color-contrast-text);
}
#tsd-search-input::placeholder {
color: inherit;
opacity: 0.8;
}
#tsd-search-results {
margin: 0;
padding: 0;
list-style: none;
flex: 1 1 auto;
display: flex;
flex-direction: column;
overflow-y: auto;
}
#tsd-search-results:not(:empty) {
margin-top: 0.5rem;
}
#tsd-search-results > li {
background-color: var(--color-background);
line-height: 1.5;
box-sizing: border-box;
border-radius: 4px;
}
#tsd-search-results > li:nth-child(even) {
background-color: var(--color-background-secondary);
}
#tsd-search-results > li:is(:hover, [aria-selected="true"]) {
background-color: var(--color-background-active);
color: var(--color-contrast-text);
}
/* It's important that this takes full size of parent `li`, to capture a click on `li` */
#tsd-search-results > li > a {
display: flex;
align-items: center;
padding: 0.5rem 0.25rem;
box-sizing: border-box;
width: 100%;
}
#tsd-search-results > li > a > .text {
flex: 1 1 auto;
min-width: 0;
overflow-wrap: anywhere;
}
#tsd-search-results > li > a .parent {
color: var(--color-text-aside);
}
#tsd-search-results > li > a mark {
color: inherit;
background-color: inherit;
font-weight: bold;
}
#tsd-search-status {
flex: 1;
display: grid;
place-content: center;
text-align: center;
overflow-wrap: anywhere;
}
#tsd-search-status:not(:empty) {
min-height: 6rem;
}
.tsd-signature {
margin: 0 0 1rem 0;
padding: 1rem 0.5rem;
border: 1px solid var(--color-accent);
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
font-size: 14px;
overflow-x: auto;
}
.tsd-signature-keyword {
color: var(--color-ts-keyword);
font-weight: normal;
}
.tsd-signature-symbol {
color: var(--color-text-aside);
font-weight: normal;
}
.tsd-signature-type {
font-style: italic;
font-weight: normal;
}
.tsd-signatures {
padding: 0;
margin: 0 0 1em 0;
list-style-type: none;
}
.tsd-signatures .tsd-signature {
margin: 0;
border-color: var(--color-accent);
border-width: 1px 0;
transition: background-color 0.1s;
}
.tsd-signatures .tsd-index-signature:not(:last-child) {
margin-bottom: 1em;
}
.tsd-signatures .tsd-index-signature .tsd-signature {
border-width: 1px;
}
.tsd-description .tsd-signatures .tsd-signature {
border-width: 1px;
}
ul.tsd-parameter-list,
ul.tsd-type-parameter-list {
list-style: square;
margin: 0;
padding-left: 20px;
}
ul.tsd-parameter-list > li.tsd-parameter-signature,
ul.tsd-type-parameter-list > li.tsd-parameter-signature {
list-style: none;
margin-left: -20px;
}
ul.tsd-parameter-list h5,
ul.tsd-type-parameter-list h5 {
font-size: 16px;
margin: 1em 0 0.5em 0;
}
.tsd-sources {
margin-top: 1rem;
font-size: 0.875em;
}
.tsd-sources a {
color: var(--color-text-aside);
text-decoration: underline;
}
.tsd-sources ul {
list-style: none;
padding: 0;
}
.tsd-page-toolbar {
position: sticky;
z-index: 1;
top: 0;
left: 0;
width: 100%;
color: var(--color-text);
background: var(--color-background-secondary);
border-bottom: var(--dim-toolbar-border-bottom-width)
var(--color-accent) solid;
transition: transform 0.3s ease-in-out;
}
.tsd-page-toolbar a {
color: var(--color-text);
}
.tsd-toolbar-contents {
display: flex;
align-items: center;
height: var(--dim-toolbar-contents-height);
margin: 0 auto;
}
.tsd-toolbar-contents > .title {
font-weight: bold;
margin-right: auto;
}
#tsd-toolbar-links {
display: flex;
align-items: center;
gap: 1.5rem;
margin-right: 1rem;
}
.tsd-widget {
box-sizing: border-box;
display: inline-block;
opacity: 0.8;
height: 2.5rem;
width: 2.5rem;
transition: opacity 0.1s, background-color 0.1s;
text-align: center;
cursor: pointer;
border: none;
background-color: transparent;
}
.tsd-widget:hover {
opacity: 0.9;
}
.tsd-widget:active {
opacity: 1;
background-color: var(--color-accent);
}
#tsd-toolbar-menu-trigger {
display: none;
}
.tsd-member-summary-name {
display: inline-flex;
align-items: center;
padding: 0.25rem;
text-decoration: none;
}
.tsd-anchor-icon {
display: inline-flex;
align-items: center;
margin-left: 0.5rem;
color: var(--color-text);
vertical-align: middle;
}
.tsd-anchor-icon svg {
width: 1em;
height: 1em;
visibility: hidden;
}
.tsd-member-summary-name:hover > .tsd-anchor-icon svg,
.tsd-anchor-link:hover > .tsd-anchor-icon svg,
.tsd-anchor-icon:focus-visible svg {
visibility: visible;
}
.deprecated {
text-decoration: line-through !important;
}
.warning {
padding: 1rem;
color: var(--color-warning-text);
background: var(--color-background-warning);
}
.tsd-kind-project {
color: var(--color-ts-project);
}
.tsd-kind-module {
color: var(--color-ts-module);
}
.tsd-kind-namespace {
color: var(--color-ts-namespace);
}
.tsd-kind-enum {
color: var(--color-ts-enum);
}
.tsd-kind-enum-member {
color: var(--color-ts-enum-member);
}
.tsd-kind-variable {
color: var(--color-ts-variable);
}
.tsd-kind-function {
color: var(--color-ts-function);
}
.tsd-kind-class {
color: var(--color-ts-class);
}
.tsd-kind-interface {
color: var(--color-ts-interface);
}
.tsd-kind-constructor {
color: var(--color-ts-constructor);
}
.tsd-kind-property {
color: var(--color-ts-property);
}
.tsd-kind-method {
color: var(--color-ts-method);
}
.tsd-kind-reference {
color: var(--color-ts-reference);
}
.tsd-kind-call-signature {
color: var(--color-ts-call-signature);
}
.tsd-kind-index-signature {
color: var(--color-ts-index-signature);
}
.tsd-kind-constructor-signature {
color: var(--color-ts-constructor-signature);
}
.tsd-kind-parameter {
color: var(--color-ts-parameter);
}
.tsd-kind-type-parameter {
color: var(--color-ts-type-parameter);
}
.tsd-kind-accessor {
color: var(--color-ts-accessor);
}
.tsd-kind-get-signature {
color: var(--color-ts-get-signature);
}
.tsd-kind-set-signature {
color: var(--color-ts-set-signature);
}
.tsd-kind-type-alias {
color: var(--color-ts-type-alias);
}
/* if we have a kind icon, don't color the text by kind */
.tsd-kind-icon ~ span {
color: var(--color-text);
}
/* mobile */
@media (max-width: 769px) {
#tsd-toolbar-menu-trigger {
display: inline-block;
/* temporary fix to vertically align, for compatibility */
line-height: 2.5;
}
#tsd-toolbar-links {
display: none;
}
.container-main {
display: flex;
}
.col-content {
float: none;
max-width: 100%;
width: 100%;
}
.col-sidebar {
position: fixed !important;
overflow-y: auto;
-webkit-overflow-scrolling: touch;
z-index: 1024;
top: 0 !important;
bottom: 0 !important;
left: auto !important;
right: 0 !important;
padding: 1.5rem 1.5rem 0 0;
width: 75vw;
visibility: hidden;
background-color: var(--color-background);
transform: translate(100%, 0);
}
.col-sidebar > *:last-child {
padding-bottom: 20px;
}
.overlay {
content: "";
display: block;
position: fixed;
z-index: 1023;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: rgba(0, 0, 0, 0.75);
visibility: hidden;
}
.to-has-menu .overlay {
animation: fade-in 0.4s;
}
.to-has-menu .col-sidebar {
animation: pop-in-from-right 0.4s;
}
.from-has-menu .overlay {
animation: fade-out 0.4s;
}
.from-has-menu .col-sidebar {
animation: pop-out-to-right 0.4s;
}
.has-menu body {
overflow: hidden;
}
.has-menu .overlay {
visibility: visible;
}
.has-menu .col-sidebar {
visibility: visible;
transform: translate(0, 0);
display: flex;
flex-direction: column;
gap: 1.5rem;
max-height: 100vh;
padding: 1rem 2rem;
}
.has-menu .tsd-navigation {
max-height: 100%;
}
.tsd-navigation .tsd-nav-link {
display: flex;
}
}
/* one sidebar */
@media (min-width: 770px) {
.container-main {
display: grid;
grid-template-columns: minmax(0, 1fr) minmax(0, 2fr);
grid-template-areas: "sidebar content";
--dim-container-main-margin-y: 2rem;
}
.tsd-breadcrumb {
margin-top: 0;
}
.col-sidebar {
grid-area: sidebar;
}
.col-content {
grid-area: content;
padding: 0 1rem;
}
}
@media (min-width: 770px) and (max-width: 1399px) {
.col-sidebar {
max-height: calc(
100vh - var(--dim-header-height) - var(--dim-footer-height) -
2 * var(--dim-container-main-margin-y)
);
overflow: auto;
position: sticky;
top: calc(
var(--dim-header-height) + var(--dim-container-main-margin-y)
);
}
.site-menu {
margin-top: 1rem;
}
}
/* two sidebars */
@media (min-width: 1200px) {
.container-main {
grid-template-columns:
minmax(0, 1fr) minmax(0, 2.5fr) minmax(
0,
20rem
);
grid-template-areas: "sidebar content toc";
}
.col-sidebar {
display: contents;
}
.page-menu {
grid-area: toc;
padding-left: 1rem;
}
.site-menu {
grid-area: sidebar;
}
.site-menu {
margin-top: 0rem;
}
.page-menu,
.site-menu {
max-height: calc(
100vh - var(--dim-header-height) - var(--dim-footer-height) -
2 * var(--dim-container-main-margin-y)
);
overflow: auto;
position: sticky;
top: calc(
var(--dim-header-height) + var(--dim-container-main-margin-y)
);
}
}
}
```
--------------------------------------------------------------------------------
/docs/adrs/008-intelligent-content-population-engine.md:
--------------------------------------------------------------------------------
```markdown
---
id: 008-intelligent-content-population-engine
title: "ADR-008: Intelligent Content Population Engine"
sidebar_label: "ADR-8: Intelligent Content Population Engine"
sidebar_position: 8
documcp:
last_updated: "2025-11-20T00:46:21.942Z"
last_validated: "2025-11-20T00:46:21.942Z"
auto_updated: false
update_frequency: monthly
---
# ADR-008: Intelligent Content Population Engine for Diataxis Documentation
## Status
Accepted
## Context
DocuMCP currently creates excellent Diataxis-compliant documentation structures through ADR-004 and ADR-006, but produces only skeleton content with placeholder text. This creates a significant gap between the framework's potential and delivered value, requiring users to manually populate all documentation content despite having comprehensive repository analysis data available.
The current `setup-structure` tool (from ADR-006) provides:
- ✅ Professional Diataxis directory structure
- ✅ SSG-specific configuration and frontmatter
- ✅ Basic template content explaining Diataxis categories
- ❌ **Missing**: Project-specific content analysis and intelligent population
- ❌ **Missing**: Repository analysis integration for content suggestions
- ❌ **Missing**: Technology-specific documentation generation
**Current User Journey:**
1. Repository analysis identifies TypeScript project with Express.js, PostgreSQL, Jest tests
2. Diataxis structure created with generic placeholder content
3. User must manually research and write all tutorials, how-to guides, reference docs, and explanations
4. **Result**: 8-20 hours of manual documentation work despite intelligent analysis
**Target User Journey:**
1. Repository analysis identifies project characteristics and technology stack
2. Intelligent content population generates project-specific documentation
3. User reviews and refines 60-80% pre-populated, contextually relevant content
4. **Result**: 1-2 hours of refinement work with professional-quality starting point
Key gaps identified:
- Repository analysis data (125 files, TypeScript/JavaScript ecosystem, test infrastructure) not leveraged for content generation
- Extensive technology detection capabilities underutilized for creating relevant examples
- Diataxis framework implementation incomplete without intelligent content planning (ADR-004, lines 153-192)
- Competitive disadvantage: users get empty templates instead of intelligent assistance
## Decision
We will implement an Intelligent Content Population Engine that bridges repository analysis with Diataxis content generation, creating the missing layer between structural generation and user-ready documentation.
### Architecture Overview:
#### 1. Content Intelligence Engine
**Purpose**: Transform repository analysis into structured content plans
**Core Capabilities**:
- Project characteristic analysis (technology stack, architecture patterns, API surfaces)
- User journey mapping to appropriate Diataxis categories
- Content gap identification and priority assignment
- Technology-specific example and code snippet generation
#### 2. Project-Aware Content Generators
**Purpose**: Create contextually relevant content for each Diataxis category
**Scope**: Four specialized generators aligned with Diataxis framework:
##### Tutorial Content Generator
- **Getting Started**: Framework-specific installation, setup, and first success
- **Feature Tutorials**: Based on detected APIs, key dependencies, and project complexity
- **Integration Tutorials**: For detected services, databases, and external dependencies
##### How-To Guide Generator
- **Common Tasks**: Derived from project type and technology stack
- **Troubleshooting**: Based on detected tools, frameworks, and common pain points
- **Deployment Guides**: Technology-specific deployment patterns and best practices
##### Reference Documentation Generator
- **API Documentation**: Auto-generate from detected API surfaces and endpoints
- **Configuration Reference**: Based on identified config files and environment variables
- **CLI Reference**: For detected command-line tools and scripts
##### Explanation Content Generator
- **Architecture Overview**: Based on detected patterns, dependencies, and project structure
- **Design Decisions**: Technology choices and their implications
- **Concept Explanations**: Framework and domain-specific concepts
#### 3. Repository Analysis Integration Layer
**Purpose**: Bridge analysis data with content generation
**Integration Points**:
- Language ecosystem analysis → Technology-specific content
- Dependency analysis → Framework integration guides
- Project structure analysis → Architecture documentation
- Complexity assessment → Content depth and sophistication level
### Implementation Architecture:
```typescript
interface ContentPopulationEngine {
// Core engine interface
populateContent(
analysisId: string,
docsPath: string,
options: PopulationOptions,
): Promise<PopulationResult>;
// Content planning
generateContentPlan(analysis: RepositoryAnalysis): ContentPlan;
identifyContentGaps(
existing: ExistingContent,
plan: ContentPlan,
): ContentGap[];
// Content generation
generateTutorialContent(
plan: TutorialPlan,
context: ProjectContext,
): TutorialContent;
generateHowToContent(plan: HowToPlan, context: ProjectContext): HowToContent;
generateReferenceContent(
plan: ReferencePlan,
context: ProjectContext,
): ReferenceContent;
generateExplanationContent(
plan: ExplanationPlan,
context: ProjectContext,
): ExplanationContent;
}
interface PopulationOptions {
level: "basic" | "comprehensive" | "intelligent";
includeCodeExamples: boolean;
projectSpecific: boolean;
preserveExisting: boolean;
customizationProfile?: CustomizationProfile;
}
interface ContentPlan {
tutorials: TutorialSuggestion[];
howToGuides: HowToSuggestion[];
reference: ReferenceSuggestion[];
explanation: ExplanationSuggestion[];
crossReferences: ContentRelationship[];
estimatedEffort: EffortEstimate;
}
interface ProjectContext {
primaryLanguage: string;
frameworks: Framework[];
architecture: ArchitecturePattern;
apiSurfaces: APIAnalysis[];
deploymentTargets: DeploymentTarget[];
testingFrameworks: TestingFramework[];
dependencies: DependencyAnalysis;
}
```
### Content Generation Algorithms:
#### Tutorial Generation Algorithm
```typescript
function generateTutorials(analysis: RepositoryAnalysis): TutorialSuggestion[] {
const suggestions: TutorialSuggestion[] = [];
// Always include getting started
suggestions.push({
title: `Getting Started with ${analysis.metadata.projectName}`,
description: `Learn ${analysis.recommendations.primaryLanguage} development with ${analysis.metadata.projectName}`,
priority: "high",
sections: generateGettingStartedSections(analysis),
codeExamples: generateTechnologySpecificExamples(
analysis.dependencies.ecosystem,
),
});
// Framework-specific tutorials
if (analysis.dependencies.packages.includes("express")) {
suggestions.push({
title: "Building REST APIs with Express.js",
description: "Complete guide to creating RESTful services",
priority: "high",
sections: generateExpressTutorialSections(analysis),
});
}
// Database integration tutorials
const dbDeps = detectDatabaseDependencies(analysis.dependencies.packages);
dbDeps.forEach((db) => {
suggestions.push({
title: `Database Integration with ${db.name}`,
description: `Connect and interact with ${db.name} databases`,
priority: "medium",
sections: generateDatabaseTutorialSections(db, analysis),
});
});
return suggestions;
}
```
#### Reference Generation Algorithm
```typescript
function generateReference(
analysis: RepositoryAnalysis,
): ReferenceSuggestion[] {
const suggestions: ReferenceSuggestion[] = [];
// API documentation from detected endpoints
const apiSurfaces = detectAPIEndpoints(analysis);
if (apiSurfaces.length > 0) {
suggestions.push({
title: "API Reference",
description: "Complete API endpoint documentation",
content: generateAPIDocumentation(apiSurfaces),
format: "openapi-spec",
});
}
// Configuration reference from detected config files
const configFiles = detectConfigurationFiles(analysis);
configFiles.forEach((config) => {
suggestions.push({
title: `${config.type} Configuration`,
description: `Configuration options for ${config.name}`,
content: generateConfigurationReference(config),
format: "configuration-table",
});
});
// CLI reference from detected scripts
const cliCommands = detectCLICommands(analysis);
if (cliCommands.length > 0) {
suggestions.push({
title: "Command Line Interface",
description: "Available commands and options",
content: generateCLIReference(cliCommands),
format: "cli-documentation",
});
}
return suggestions;
}
```
### Technology-Specific Content Templates:
#### JavaScript/TypeScript Ecosystem
```typescript
const JAVASCRIPT_TEMPLATES = {
gettingStarted: {
prerequisites: ["Node.js 20.0.0+", "npm or yarn", "Git"],
installationSteps: [
"Clone the repository",
"Install dependencies with npm install",
"Copy environment variables",
"Run development server",
],
verificationSteps: [
"Check server starts successfully",
"Access application in browser",
"Run test suite to verify setup",
],
},
expressAPI: {
sections: [
"Project Structure Overview",
"Creating Your First Route",
"Middleware Configuration",
"Database Integration",
"Error Handling",
"Testing Your API",
],
codeExamples: generateExpressCodeExamples,
},
testingGuides: {
jest: generateJestHowToGuides,
cypress: generateCypressHowToGuides,
playwright: generatePlaywrightHowToGuides,
},
};
```
#### Multi-Language Framework Support
##### JavaScript/TypeScript Ecosystem
```typescript
const JAVASCRIPT_TEMPLATES = {
gettingStarted: {
prerequisites: ["Node.js 20.0.0+", "npm or yarn", "Git"],
installationSteps: [
"Clone the repository",
"Install dependencies with npm install",
"Copy environment variables",
"Run development server",
],
verificationSteps: [
"Check server starts successfully",
"Access application in browser",
"Run test suite to verify setup",
],
},
frameworks: {
express: {
tutorials: [
"REST API Development",
"Middleware Configuration",
"Database Integration",
],
howToGuides: [
"Performance Optimization",
"Error Handling",
"Authentication Setup",
],
reference: [
"Route Configuration",
"Middleware Reference",
"Configuration Options",
],
explanation: [
"Express Architecture",
"Middleware Pattern",
"Async Handling",
],
},
react: {
tutorials: ["Component Development", "State Management", "React Router"],
howToGuides: [
"Performance Optimization",
"Testing Components",
"Deployment",
],
reference: ["Component API", "Hooks Reference", "Build Configuration"],
explanation: [
"Component Architecture",
"State Flow",
"Rendering Lifecycle",
],
},
nestjs: {
tutorials: [
"Dependency Injection",
"Controllers and Services",
"Database Integration",
],
howToGuides: [
"Custom Decorators",
"Microservices",
"GraphQL Integration",
],
reference: ["Decorator Reference", "Module System", "Configuration"],
explanation: ["DI Architecture", "Module Design", "Enterprise Patterns"],
},
},
};
```
##### Python Ecosystem Support
```typescript
const PYTHON_TEMPLATES = {
gettingStarted: {
prerequisites: ["Python 3.8+", "pip or poetry", "Virtual environment"],
installationSteps: [
"Create virtual environment",
"Activate virtual environment",
"Install dependencies from requirements.txt/pyproject.toml",
"Set up environment variables",
"Run development server",
],
verificationSteps: [
"Check application starts successfully",
"Run test suite with pytest",
"Verify API endpoints respond correctly",
],
},
frameworks: {
django: {
tutorials: [
"Django Project Setup and Configuration",
"Models and Database Integration",
"Views and URL Routing",
"Django REST Framework APIs",
"User Authentication and Permissions",
],
howToGuides: [
"Deploy Django to Production",
"Optimize Database Queries",
"Implement Caching Strategies",
"Handle File Uploads",
"Configure CORS and Security",
],
reference: [
"Django Settings Reference",
"Model Field Types",
"URL Configuration Patterns",
"Middleware Reference",
"Management Commands",
],
explanation: [
"Django MTV Architecture",
"ORM Design Decisions",
"Security Model",
"Scalability Patterns",
],
},
fastapi: {
tutorials: [
"FastAPI Application Structure",
"Pydantic Models and Validation",
"Dependency Injection System",
"Database Integration with SQLAlchemy",
"Authentication and Security",
],
howToGuides: [
"Optimize FastAPI Performance",
"Implement Background Tasks",
"Handle File Processing",
"Set up Monitoring and Logging",
"Deploy with Docker and Kubernetes",
],
reference: [
"FastAPI Decorators Reference",
"Pydantic Model Configuration",
"Dependency System Reference",
"Security Utilities",
"Testing Utilities",
],
explanation: [
"ASGI vs WSGI Architecture",
"Type Hints and Validation",
"Dependency Injection Benefits",
"Performance Characteristics",
],
},
flask: {
tutorials: [
"Flask Application Factory Pattern",
"Blueprint Organization",
"Database Integration with SQLAlchemy",
"User Session Management",
"RESTful API Development",
],
howToGuides: [
"Structure Large Flask Applications",
"Implement Rate Limiting",
"Handle Background Jobs",
"Configure Production Deployment",
"Debug Flask Applications",
],
reference: [
"Flask Configuration Reference",
"Request and Response Objects",
"Template Engine Reference",
"Extension Integration",
"CLI Commands",
],
explanation: [
"Flask Philosophy and Design",
"WSGI Application Structure",
"Extension Ecosystem",
"Microframework Benefits",
],
},
},
};
class PythonContentGenerator implements FrameworkContentGenerator {
detectFramework(analysis: RepositoryAnalysis): Framework[] {
const frameworks: Framework[] = [];
// Django detection
if (
this.hasDependency(analysis, "django") ||
this.hasFile(analysis, "manage.py") ||
this.hasFile(analysis, "settings.py")
) {
frameworks.push({
name: "django",
version: this.extractVersion(analysis, "django"),
configFiles: ["settings.py", "urls.py", "wsgi.py"],
appStructure: this.analyzeDjangoApps(analysis),
});
}
// FastAPI detection
if (
this.hasDependency(analysis, "fastapi") ||
this.hasImport(analysis, "from fastapi import")
) {
frameworks.push({
name: "fastapi",
version: this.extractVersion(analysis, "fastapi"),
configFiles: this.getFastAPIConfigFiles(analysis),
routerStructure: this.analyzeFastAPIRouters(analysis),
});
}
// Flask detection
if (
this.hasDependency(analysis, "flask") ||
this.hasImport(analysis, "from flask import")
) {
frameworks.push({
name: "flask",
version: this.extractVersion(analysis, "flask"),
configFiles: this.getFlaskConfigFiles(analysis),
blueprintStructure: this.analyzeFlaskBlueprints(analysis),
});
}
return frameworks;
}
generateFrameworkContent(
framework: Framework,
context: ProjectContext,
): FrameworkContent {
const templates = PYTHON_TEMPLATES.frameworks[framework.name];
return {
tutorials: templates.tutorials.map((title) => ({
title: `${title} for ${context.projectName}`,
content: this.generatePythonTutorialContent(framework, title, context),
codeExamples: this.generatePythonCodeExamples(
framework,
title,
context,
),
})),
howToGuides: templates.howToGuides.map((title) => ({
title,
content: this.generatePythonHowToContent(framework, title, context),
tasks: this.generatePythonTasks(framework, title, context),
})),
reference: templates.reference.map((title) => ({
title,
content: this.generatePythonReferenceContent(framework, title, context),
})),
explanation: templates.explanation.map((title) => ({
title,
content: this.generatePythonExplanationContent(
framework,
title,
context,
),
})),
};
}
}
```
#### Framework-Specific Content Generation
```typescript
interface FrameworkContentGenerator {
detectFramework(dependencies: string[]): Framework | null;
generateFrameworkContent(
framework: Framework,
context: ProjectContext,
): FrameworkContent;
}
const FRAMEWORK_GENERATORS: Record<string, FrameworkContentGenerator> = {
// JavaScript/TypeScript frameworks
express: new ExpressContentGenerator(),
react: new ReactContentGenerator(),
vue: new VueContentGenerator(),
angular: new AngularContentGenerator(),
nestjs: new NestJSContentGenerator(),
fastify: new FastifyContentGenerator(),
// Python frameworks
django: new DjangoContentGenerator(),
fastapi: new FastAPIContentGenerator(),
flask: new FlaskContentGenerator(),
pyramid: new PyramidContentGenerator(),
// Future language support
"spring-boot": new SpringBootContentGenerator(), // Java
gin: new GinContentGenerator(), // Go
"actix-web": new ActixContentGenerator(), // Rust
};
```
## Alternatives Considered
### Manual Content Creation Only
- **Pros**: Simple implementation, full user control, no AI dependency
- **Cons**: Massive user effort, inconsistent quality, underutilizes analysis capabilities
- **Decision**: Rejected - provides minimal value over generic templates
### AI-Generated Content via External APIs
- **Pros**: Advanced content generation, natural language processing
- **Cons**: External dependencies, costs, inconsistent quality, latency issues
- **Decision**: Rejected for initial version - adds complexity without guaranteed quality
### Community-Contributed Content Templates
- **Pros**: Diverse perspectives, battle-tested content, community engagement
- **Cons**: Quality control challenges, maintenance overhead, incomplete coverage
- **Decision**: Considered for future enhancement - focus on algorithmic generation first
### Generic Template Expansion
- **Pros**: Easier implementation, consistent structure
- **Cons**: Still requires significant manual work, doesn't leverage analysis intelligence
- **Decision**: Rejected - doesn't address core value proposition gap
## Consequences
### Positive
- **Dramatic User Value Increase**: 60-80% content pre-population vs. empty templates
- **Competitive Differentiation**: Only documentation tool with intelligent content generation
- **Analysis ROI**: Comprehensive repository analysis finally delivers proportional value
- **Framework Completion**: Fulfills ADR-004 vision for content planning intelligence
- **User Experience**: Transform from "structure generator" to "documentation assistant"
### Negative
- **Implementation Complexity**: Significant engineering effort for content generation algorithms
- **Content Quality Risk**: Generated content may require refinement for accuracy
- **Technology Coverage**: Initial version limited to well-known frameworks and patterns
- **Maintenance Overhead**: Content templates require updates as technologies evolve
### Risks and Mitigations
- **Quality Control**: Implement content validation and user review workflows
- **Technology Coverage**: Start with most common frameworks, expand based on usage
- **Algorithm Accuracy**: Validate generated content against project reality
- **User Expectations**: Clear communication about generated vs. curated content
## Implementation Details
### MCP Tool Interface
```typescript
// New tool: populate_diataxis_content
interface PopulateDiataxisContentTool {
name: "populate_diataxis_content";
description: "Intelligently populate Diataxis documentation with project-specific content";
inputSchema: {
type: "object";
properties: {
analysisId: {
type: "string";
description: "Repository analysis ID from analyze_repository tool";
};
docsPath: {
type: "string";
description: "Path to documentation directory";
};
populationLevel: {
type: "string";
enum: ["basic", "comprehensive", "intelligent"];
default: "comprehensive";
description: "Level of content generation detail";
};
includeProjectSpecific: {
type: "boolean";
default: true;
description: "Generate project-specific examples and code";
};
preserveExisting: {
type: "boolean";
default: true;
description: "Preserve any existing content";
};
technologyFocus: {
type: "array";
items: { type: "string" };
description: "Specific technologies to emphasize in content";
};
};
required: ["analysisId", "docsPath"];
};
}
```
### Content Generation Pipeline
```typescript
class ContentPopulationEngine {
async populateContent(args: PopulationArgs): Promise<PopulationResult> {
try {
// 1. Retrieve and validate repository analysis
const analysis = await this.getRepositoryAnalysis(args.analysisId);
this.validateAnalysis(analysis);
// 2. Generate content plan based on project characteristics
const contentPlan = await this.generateContentPlan(
analysis,
args.populationLevel,
);
// 3. Generate content for each Diataxis category
const [tutorials, howTos, reference, explanation] = await Promise.all([
this.generateTutorialContent(contentPlan.tutorials, analysis),
this.generateHowToContent(contentPlan.howToGuides, analysis),
this.generateReferenceContent(contentPlan.reference, analysis),
this.generateExplanationContent(contentPlan.explanation, analysis),
]);
// 4. Write content to documentation structure
const filesCreated = await this.writeContentToStructure(
args.docsPath,
{ tutorials, howTos, reference, explanation },
args.preserveExisting,
);
// 5. Generate cross-references and navigation updates
await this.updateNavigationAndCrossReferences(args.docsPath, contentPlan);
return {
success: true,
filesCreated,
contentPlan,
populationMetrics: this.calculatePopulationMetrics(filesCreated),
nextSteps: this.generateNextSteps(analysis, contentPlan),
};
} catch (error) {
console.error("Content population failed:", error);
return {
success: false,
error: {
code: "CONTENT_POPULATION_FAILED",
message: `Failed to populate content: ${
error instanceof Error ? error.message : "Unknown error"
}`,
resolution:
"Check repository analysis and documentation path accessibility",
},
filesCreated: [],
populationMetrics: { totalFiles: 0, totalWords: 0, totalSections: 0 },
};
}
}
}
```
### Technology Detection and Content Mapping
```typescript
interface TechnologyMapper {
detectTechnologies(analysis: RepositoryAnalysis): TechnologyProfile;
mapToContentTemplates(technologies: TechnologyProfile): ContentTemplateSet;
generateTechnologySpecificExamples(
technology: Technology,
context: ProjectContext,
): CodeExample[];
}
class JavaScriptTechnologyMapper implements TechnologyMapper {
detectTechnologies(analysis: RepositoryAnalysis): TechnologyProfile {
const profile: TechnologyProfile = {
runtime: this.detectRuntime(analysis), // Node.js, Deno, Bun
framework: this.detectFramework(analysis), // Express, Fastify, Koa
frontend: this.detectFrontend(analysis), // React, Vue, Angular
database: this.detectDatabase(analysis), // PostgreSQL, MongoDB, Redis
testing: this.detectTesting(analysis), // Jest, Mocha, Playwright
deployment: this.detectDeployment(analysis), // Docker, Kubernetes, Vercel
devops: this.detectDevOpsTools(analysis), // Ansible, Tekton, OpenShift, Podman
};
return profile;
}
mapToContentTemplates(technologies: TechnologyProfile): ContentTemplateSet {
return {
tutorials: this.generateTutorialTemplates(technologies),
howToGuides: this.generateHowToTemplates(technologies),
reference: this.generateReferenceTemplates(technologies),
explanation: this.generateExplanationTemplates(technologies),
};
}
}
```
### DevOps and Infrastructure Tooling Support
#### DevOps Tool Detection and Content Generation
```typescript
interface DevOpsToolMapper {
detectDevOpsTools(analysis: RepositoryAnalysis): DevOpsToolProfile;
generateDevOpsContent(
tools: DevOpsToolProfile,
context: ProjectContext,
): DevOpsContent;
createInfrastructureDocumentation(
infrastructure: InfrastructureProfile,
deploymentPattern: DeploymentPattern,
): InfrastructureDocumentation;
}
interface DevOpsToolProfile {
containerization: ContainerTechnology[]; // Docker, Podman, Buildah
orchestration: OrchestrationTechnology[]; // Kubernetes, OpenShift, Nomad
cicd: CICDTechnology[]; // Tekton, GitHub Actions, Jenkins, GitLab CI
configuration: ConfigManagementTechnology[]; // Ansible, Terraform, Helm
monitoring: MonitoringTechnology[]; // Prometheus, Grafana, Jaeger
security: SecurityTechnology[]; // Falco, OPA, Vault
}
class DevOpsContentGenerator implements DevOpsToolMapper {
detectDevOpsTools(analysis: RepositoryAnalysis): DevOpsToolProfile {
return {
containerization: this.detectContainerization(analysis),
orchestration: this.detectOrchestration(analysis),
cicd: this.detectCICD(analysis),
configuration: this.detectConfigManagement(analysis),
monitoring: this.detectMonitoring(analysis),
security: this.detectSecurity(analysis),
};
}
private detectContainerization(
analysis: RepositoryAnalysis,
): ContainerTechnology[] {
const detected: ContainerTechnology[] = [];
// Docker detection
if (
this.hasFile(analysis, "Dockerfile") ||
this.hasFile(analysis, "docker-compose.yml") ||
this.hasFile(analysis, "docker-compose.yaml")
) {
detected.push({
name: "docker",
version: this.extractDockerVersion(analysis),
configFiles: this.getDockerFiles(analysis),
usage: this.analyzeDockerUsage(analysis),
});
}
// Podman detection
if (
this.hasFile(analysis, "Containerfile") ||
this.hasReference(analysis, "podman") ||
this.hasFile(analysis, "podman-compose.yml")
) {
detected.push({
name: "podman",
version: this.extractPodmanVersion(analysis),
configFiles: this.getPodmanFiles(analysis),
usage: this.analyzePodmanUsage(analysis),
});
}
return detected;
}
private detectOrchestration(
analysis: RepositoryAnalysis,
): OrchestrationTechnology[] {
const detected: OrchestrationTechnology[] = [];
// Kubernetes detection
if (
this.hasDirectory(analysis, "k8s/") ||
this.hasDirectory(analysis, "kubernetes/") ||
this.hasFilePattern(analysis, "*.yaml", "apiVersion: apps/v1") ||
this.hasFilePattern(analysis, "*.yml", "kind: Deployment")
) {
detected.push({
name: "kubernetes",
manifests: this.getKubernetesManifests(analysis),
resources: this.analyzeKubernetesResources(analysis),
namespaces: this.extractNamespaces(analysis),
});
}
// OpenShift detection
if (
this.hasDirectory(analysis, ".s2i/") ||
this.hasReference(analysis, "openshift") ||
this.hasFileContent(analysis, "kind: DeploymentConfig") ||
this.hasFileContent(analysis, "kind: Route")
) {
detected.push({
name: "openshift",
templates: this.getOpenShiftTemplates(analysis),
buildConfigs: this.getBuildConfigs(analysis),
routes: this.getRoutes(analysis),
});
}
return detected;
}
private detectCICD(analysis: RepositoryAnalysis): CICDTechnology[] {
const detected: CICDTechnology[] = [];
// Tekton detection
if (
this.hasDirectory(analysis, ".tekton/") ||
this.hasFileContent(analysis, "apiVersion: tekton.dev") ||
this.hasFilePattern(analysis, "*.yaml", "kind: Pipeline")
) {
detected.push({
name: "tekton",
pipelines: this.getTektonPipelines(analysis),
tasks: this.getTektonTasks(analysis),
triggers: this.getTektonTriggers(analysis),
});
}
return detected;
}
private detectConfigManagement(
analysis: RepositoryAnalysis,
): ConfigManagementTechnology[] {
const detected: ConfigManagementTechnology[] = [];
// Ansible detection
if (
this.hasFile(analysis, "ansible.cfg") ||
this.hasDirectory(analysis, "playbooks/") ||
this.hasDirectory(analysis, "roles/") ||
this.hasFile(analysis, "inventory") ||
this.hasFilePattern(analysis, "*.yml", "hosts:") ||
this.hasFilePattern(analysis, "*.yaml", "tasks:")
) {
detected.push({
name: "ansible",
playbooks: this.getAnsiblePlaybooks(analysis),
roles: this.getAnsibleRoles(analysis),
inventory: this.getAnsibleInventory(analysis),
vaultFiles: this.getAnsibleVault(analysis),
});
}
return detected;
}
}
```
#### DevOps-Specific Content Templates and Generation
**Key DevOps Documentation Patterns**:
- **Container Tutorials**: Project-specific Dockerfile optimization, multi-stage builds
- **Orchestration Guides**: Kubernetes/OpenShift deployment strategies
- **Infrastructure as Code**: Ansible playbooks for application deployment
- **CI/CD Pipelines**: Tekton pipeline configuration and best practices
```typescript
const DEVOPS_CONTENT_TEMPLATES = {
docker: {
tutorial: "Containerizing {projectName} with Docker",
howto: ["Optimize Docker Images", "Debug Container Issues"],
reference: "Dockerfile Configuration Reference",
explanation: "Container Architecture Decisions",
},
kubernetes: {
tutorial: "Deploying {projectName} to Kubernetes",
howto: ["Scale Applications", "Troubleshoot Deployments"],
reference: "Kubernetes Manifest Specifications",
explanation: "Orchestration Strategy",
},
ansible: {
tutorial: "Infrastructure as Code with Ansible",
howto: ["Automate Deployment", "Manage Multi-Environment"],
reference: "Playbook and Role Reference",
explanation: "Configuration Management Strategy",
},
tekton: {
tutorial: "CI/CD Pipeline with Tekton",
howto: ["Build and Deploy", "Manage Secrets"],
reference: "Pipeline Specifications",
explanation: "Cloud Native CI/CD Architecture",
},
};
function generateDevOpsContent(
devopsProfile: DevOpsToolProfile,
projectContext: ProjectContext,
): DevOpsContentPlan {
// Generate project-specific DevOps documentation
// based on detected tools and project characteristics
}
```
### Community Contribution Framework for Language and Tool Support
#### Language Extension Architecture
```typescript
interface LanguageExtension {
name: string;
ecosystem: string;
packageManagers: string[];
detectionPatterns: DetectionPattern[];
frameworks: FrameworkDefinition[];
contentTemplates: LanguageContentTemplates;
validationRules: ValidationRule[];
}
interface DetectionPattern {
type: "file" | "dependency" | "import" | "content";
pattern: string | RegExp;
weight: number; // 1-10, higher = more confident
description: string;
}
interface FrameworkDefinition {
name: string;
detectionPatterns: DetectionPattern[];
contentTemplates: FrameworkContentTemplates;
codeExamples: CodeExampleGenerator;
bestPractices: BestPractice[];
}
```
#### Contribution Guidelines for New Language Support
##### Step 1: Language Detection Implementation
```typescript
// Example: Adding Go language support
const GO_LANGUAGE_EXTENSION: LanguageExtension = {
name: "go",
ecosystem: "go",
packageManagers: ["go mod", "dep"],
detectionPatterns: [
{
type: "file",
pattern: "go.mod",
weight: 10,
description: "Go module definition file",
},
{
type: "file",
pattern: "go.sum",
weight: 8,
description: "Go module checksums",
},
{
type: "file",
pattern: /.*\.go$/,
weight: 6,
description: "Go source files",
},
{
type: "content",
pattern: /^package main$/m,
weight: 7,
description: "Go main package declaration",
},
],
frameworks: [
// Framework definitions...
],
contentTemplates: {
// Content templates...
},
};
```
##### Step 2: Framework-Specific Content Templates
```typescript
// Example: Adding Gin framework support for Go
const GIN_FRAMEWORK: FrameworkDefinition = {
name: "gin",
detectionPatterns: [
{
type: "dependency",
pattern: "github.com/gin-gonic/gin",
weight: 10,
description: "Gin framework dependency",
},
{
type: "import",
pattern: 'gin "github.com/gin-gonic/gin"',
weight: 9,
description: "Gin framework import",
},
],
contentTemplates: {
tutorials: [
{
title: "Building REST APIs with Gin",
diataxisType: "tutorial",
sections: [
"Setting up Gin Application",
"Defining Routes and Handlers",
"Middleware Configuration",
"Database Integration",
"Testing Gin Applications",
],
prerequisites: [
"Go installed (1.19+)",
"Basic Go language knowledge",
"Understanding of HTTP concepts",
],
estimatedTime: "60 minutes",
difficulty: "beginner",
},
],
howToGuides: [
{
title: "Optimize Gin Performance",
diataxisType: "how-to",
tasks: [
"Configure connection pooling",
"Implement caching strategies",
"Set up rate limiting",
"Profile and benchmark endpoints",
],
},
],
reference: [
{
title: "Gin Router Configuration",
diataxisType: "reference",
sections: [
"Route definition patterns",
"Middleware registration",
"Context object methods",
"Error handling patterns",
],
},
],
explanation: [
{
title: "Gin Architecture and Design Decisions",
diataxisType: "explanation",
topics: [
"HTTP router performance characteristics",
"Middleware pipeline design",
"Context lifecycle management",
"Comparison with other Go frameworks",
],
},
],
},
codeExamples: {
basicServer: `package main
import (
"net/http"
"github.com/gin-gonic/gin"
)
func main() {
r := gin.Default()
r.GET("/health", func(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"status": "healthy",
})
})
r.Run(":8080")
}`,
middleware: `func LoggerMiddleware() gin.HandlerFunc {
return func(c *gin.Context) {
start := time.Now()
c.Next()
duration := time.Since(start)
log.Printf("%s %s %v", c.Request.Method, c.Request.URL.Path, duration)
}
}`,
},
};
```
##### Step 3: Content Generation Logic
```typescript
class GoContentGenerator implements FrameworkContentGenerator {
detectFramework(analysis: RepositoryAnalysis): Framework[] {
const frameworks: Framework[] = [];
// Check for Gin framework
if (this.hasGoModule(analysis, "github.com/gin-gonic/gin")) {
frameworks.push({
name: "gin",
version: this.extractGoModuleVersion(
analysis,
"github.com/gin-gonic/gin",
),
configFiles: this.getGinConfigFiles(analysis),
routeStructure: this.analyzeGinRoutes(analysis),
});
}
// Check for Echo framework
if (this.hasGoModule(analysis, "github.com/labstack/echo")) {
frameworks.push({
name: "echo",
version: this.extractGoModuleVersion(
analysis,
"github.com/labstack/echo",
),
configFiles: this.getEchoConfigFiles(analysis),
routeStructure: this.analyzeEchoRoutes(analysis),
});
}
return frameworks;
}
generateFrameworkContent(
framework: Framework,
context: ProjectContext,
): FrameworkContent {
const templates = GO_LANGUAGE_EXTENSION.frameworks.find(
(f) => f.name === framework.name,
)?.contentTemplates;
if (!templates) return this.generateGenericGoContent(framework, context);
return this.populateTemplatesWithProjectContext(
templates,
framework,
context,
);
}
private generateProjectSpecificGoDockerfile(context: ProjectContext): string {
return `# Multi-stage build for ${context.projectName}
FROM golang:1.21-alpine AS builder
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY . .
RUN CGO_ENABLED=0 GOOS=linux go build -o main .
# Final stage
FROM alpine:latest
RUN apk --no-cache add ca-certificates
WORKDIR /root/
COPY --from=builder /app/main .
EXPOSE 8080
CMD ["./main"]`;
}
}
```
#### Contribution Process and Standards
##### Community Contribution Workflow
1. **Language Proposal**: Submit GitHub issue with language/framework proposal
2. **Detection Patterns**: Define comprehensive detection patterns
3. **Content Templates**: Create Diataxis-compliant content templates
4. **Code Examples**: Provide working, project-specific code examples
5. **Testing**: Include validation tests for detection and generation
6. **Documentation**: Document contribution for future maintainers
7. **Review Process**: Community and maintainer review
8. **Integration**: Merge into main extension registry
##### Quality Standards for Contributions
```typescript
interface ContributionStandards {
detection: {
minimumPatterns: 3;
requiredTypes: ["file", "dependency"];
weightDistribution: "balanced"; // No single pattern > 70% weight
falsePositiveRate: "<5%";
};
content: {
diataxisCompliance: "strict";
tutorialCount: "minimum 2";
howToGuideCount: "minimum 3";
referenceCompleteness: "80%";
explanationDepth: "architectural decisions covered";
};
codeExamples: {
compilationSuccess: "100%";
projectSpecific: "true";
bestPractices: "current industry standards";
securityConsiderations: "included";
};
testing: {
detectionAccuracy: ">90%";
contentGeneration: "functional tests";
integrationTests: "with existing systems";
performanceImpact: "<10% generation time increase";
};
}
```
##### Template Contribution Format
```typescript
// Required structure for new language contributions
interface LanguageContributionTemplate {
metadata: {
contributorName: string;
contributorEmail: string;
languageName: string;
version: string;
lastUpdated: string;
maintenanceCommitment: "ongoing" | "initial-only";
};
detection: DetectionPatternSet;
frameworks: FrameworkDefinition[];
contentTemplates: ContentTemplateSet;
validation: ValidationTestSuite;
documentation: ContributionDocumentation;
}
// Example contribution file structure:
// src/languages/
// ├── go/
// │ ├── detection.ts
// │ ├── frameworks/
// │ │ ├── gin.ts
// │ │ ├── echo.ts
// │ │ └── fiber.ts
// │ ├── templates/
// │ │ ├── tutorials.ts
// │ │ ├── howto.ts
// │ │ ├── reference.ts
// │ │ └── explanation.ts
// │ ├── tests/
// │ │ ├── detection.test.ts
// │ │ └── generation.test.ts
// │ └── README.md
```
#### Community Validation and Review Process
##### Automated Validation Pipeline
```typescript
interface ContributionValidation {
// Automated checks
syntaxValidation: "TypeScript compilation success";
patternTesting: "Detection accuracy against test repositories";
contentValidation: "Diataxis compliance checking";
performanceImpact: "Generation time benchmarking";
// Community review
peerReview: "Two community developer approvals";
maintainerReview: "Core team architectural review";
expertValidation: "Language expert accuracy verification";
// Integration testing
endToEndTesting: "Full workflow validation";
regressionTesting: "No impact on existing languages";
documentationReview: "Contribution documentation completeness";
}
```
##### Long-term Maintenance Framework
```typescript
interface MaintenanceFramework {
languageUpdates: {
frameworkVersions: "automated dependency tracking";
newFrameworks: "community contribution process";
deprecatedPatterns: "automated detection and flagging";
};
communityGovernance: {
languageMaintainers: "designated community experts";
updateProcess: "structured enhancement proposals";
qualityAssurance: "continuous validation and testing";
};
toolingSupport: {
contributionCLI: "automated scaffolding for new languages";
validationTools: "automated testing and verification";
documentationGeneration: "automated API documentation";
};
}
```
## Quality Assurance
### Content Validation Framework
```typescript
interface ContentValidator {
validateAccuracy(
content: GeneratedContent,
analysis: RepositoryAnalysis,
): ValidationResult;
checkDiataxisCompliance(content: GeneratedContent): ComplianceResult;
verifyCodeExamples(
examples: CodeExample[],
projectContext: ProjectContext,
): ValidationResult;
assessContentCompleteness(
content: GeneratedContent,
plan: ContentPlan,
): CompletenessResult;
}
interface ValidationResult {
isValid: boolean;
issues: ValidationIssue[];
suggestions: ImprovementSuggestion[];
confidence: number;
}
```
### Testing Strategy
```typescript
describe("ContentPopulationEngine", () => {
describe("Tutorial Generation", () => {
it(
"should generate appropriate getting started tutorial for Express.js project",
);
it("should include technology-specific setup steps");
it("should provide working code examples");
it("should maintain Diataxis tutorial principles");
});
describe("Technology Detection", () => {
it("should correctly identify primary framework from package.json");
it("should detect database dependencies and generate appropriate content");
it("should handle multi-framework projects appropriately");
});
describe("Content Quality", () => {
it("should generate accurate code examples that match project structure");
it("should maintain consistent tone and style across content types");
it("should create appropriate cross-references between content sections");
});
});
```
### Performance Requirements
- **Content Generation Time**: < 30 seconds for comprehensive population
- **Memory Usage**: < 500MB for large repository analysis and content generation
- **Content Quality**: 80%+ accuracy for generated technical content
- **Coverage**: Support for 15+ major JavaScript/TypeScript frameworks initially
## Integration Points
### Repository Analysis Integration (ADR-002)
- Leverage multi-layered analysis results for informed content generation
- Use complexity assessment to determine content depth and sophistication
- Integrate dependency analysis for framework-specific content selection
### Diataxis Framework Integration (ADR-004)
- Implement content planning intelligence outlined in ADR-004 lines 153-192
- Generate content that strictly adheres to Diataxis category principles
- Create appropriate cross-references and user journey flows
### MCP Tools API Integration (ADR-006)
- Add populate_diataxis_content as seventh core MCP tool
- Maintain consistent error handling and response format patterns
- Integrate with existing setup_structure tool for seamless workflow
### SSG Configuration Integration (ADR-006)
- Generate content with appropriate frontmatter for target SSG
- Adapt content format and structure to SSG capabilities
- Ensure generated content renders correctly across all supported SSGs
## Future Enhancements
### Advanced AI Integration
- **Large Language Model Integration**: Use specialized models for content refinement
- **Code Analysis AI**: Advanced analysis of project patterns for more accurate content
- **Natural Language Generation**: Improve content quality and readability
### Extended Technology Support
#### Python Ecosystem (Priority Implementation)
- **Web Frameworks**: Django, Flask, FastAPI, Pyramid, Bottle
- **Data Science**: Jupyter, Pandas, NumPy, SciPy documentation patterns
- **ML/AI**: TensorFlow, PyTorch, Scikit-learn integration guides
- **API Development**: Django REST Framework, FastAPI advanced patterns
- **Testing**: pytest, unittest, behave testing documentation
- **Deployment**: Gunicorn, uWSGI, Celery configuration guides
#### Additional Language Ecosystems
- **Go Ecosystem**: Gin, Echo, Fiber, Buffalo framework support
- **Rust Ecosystem**: Actix-web, Warp, Rocket, Axum content generation
- **Java Ecosystem**: Spring Boot, Quarkus, Micronaut, Play Framework
- **C# Ecosystem**: ASP.NET Core, Entity Framework, Blazor
- **Ruby Ecosystem**: Rails, Sinatra, Hanami framework support
- **PHP Ecosystem**: Laravel, Symfony, CodeIgniter patterns
### DevOps and Infrastructure Expansion
- **Extended Container Support**: Buildah, Skopeo, LXC/LXD integration
- **Advanced Orchestration**: Nomad, Docker Swarm, Cloud Foundry support
- **CI/CD Platforms**: Jenkins, GitLab CI, Azure DevOps, CircleCI integration
- **Infrastructure Tools**: Terraform, Pulumi, CloudFormation content generation
- **Service Mesh**: Istio, Linkerd, Consul Connect documentation patterns
- **Monitoring Stack**: Prometheus, Grafana, ELK Stack, Jaeger integration guides
### Community and Learning Features
- **Content Quality Feedback**: User ratings and improvement suggestions
- **Template Sharing**: Community-contributed content templates
- **Usage Analytics**: Track which content types provide most value
- **Personalization**: Adapt content style to team preferences and expertise level
### Community Ecosystem and Contributions
- **Language Extension Registry**: Centralized repository for community language support
- **Contribution Tooling**: CLI tools for scaffolding new language extensions
- **Validation Pipeline**: Automated testing and quality assurance for contributions
- **Community Governance**: Language maintainer program and review processes
- **Documentation Portal**: Comprehensive guides for extending DocuMCP capabilities
- **Template Marketplace**: Sharing and discovery of specialized content templates
### Enterprise Features
- **Custom Content Standards**: Organization-specific content templates and style guides
- **Multi-language Support**: Generate content in multiple languages
- **Integration APIs**: Connect with existing documentation management systems
- **Approval Workflows**: Review and approval processes for generated content
## Success Metrics
### User Value Metrics
- **Time to Usable Documentation**: Target < 30 minutes (vs. 8-20 hours manually)
- **Content Completeness**: 60-80% populated content out of the box
- **User Satisfaction**: 85%+ positive feedback on generated content quality
- **Adoption Rate**: 90%+ of users use content population vs. structure-only
### Technical Metrics
- **Content Accuracy**: 80%+ technical accuracy for generated code examples
- **Framework Coverage**: Support 95% of detected JavaScript/TypeScript frameworks
- **DevOps Tool Coverage**: Support 90% of detected containerization and orchestration tools
- **Performance**: Content generation completes within 30 seconds
- **Error Rate**: < 5% content generation failures
### Business Metrics
- **Competitive Differentiation**: Only tool providing intelligent content population
- **Market Position**: Establish DocuMCP as "intelligent documentation assistant"
- **User Retention**: Increase from documentation structure to full workflow adoption
- **Community Growth**: Attract technical writers and documentation specialists
## References
- [ADR-002: Multi-Layered Repository Analysis Engine Design](002-repository-analysis-engine.md)
- [ADR-004: Diataxis Framework Integration](004-diataxis-framework-integration.md)
- [ADR-006: MCP Tools API Design](006-mcp-tools-api-design.md)
- [Diataxis Framework Documentation](https://diataxis.fr/)
- [Technical Writing Best Practices](https://developers.google.com/tech-writing)
- [Documentation as Code Principles](https://www.writethedocs.org/guide/docs-as-code/)
```
--------------------------------------------------------------------------------
/src/memory/export-import.ts:
--------------------------------------------------------------------------------
```typescript
/**
* Memory Export/Import System for DocuMCP
* Comprehensive data portability, backup, and migration capabilities
*/
import { EventEmitter } from "events";
import { promises as fs } from "fs";
import { createWriteStream } from "fs";
import { MemoryEntry, JSONLStorage } from "./storage.js";
import { MemoryManager } from "./manager.js";
import { IncrementalLearningSystem } from "./learning.js";
import { KnowledgeGraph } from "./knowledge-graph.js";
import { MemoryPruningSystem } from "./pruning.js";
export interface ExportOptions {
format: "json" | "jsonl" | "csv" | "xml" | "yaml" | "sqlite" | "archive";
compression?: "gzip" | "zip" | "none";
includeMetadata: boolean;
includeLearning: boolean;
includeKnowledgeGraph: boolean;
filters?: {
types?: string[];
dateRange?: { start: Date; end: Date };
projects?: string[];
tags?: string[];
outcomes?: string[];
};
anonymize?: {
enabled: boolean;
fields: string[];
method: "hash" | "remove" | "pseudonymize";
};
encryption?: {
enabled: boolean;
algorithm: "aes-256-gcm" | "aes-192-gcm" | "aes-128-gcm";
password?: string;
};
}
export interface ImportOptions {
format: "json" | "jsonl" | "csv" | "xml" | "yaml" | "sqlite" | "archive";
mode: "merge" | "replace" | "append" | "update";
validation: "strict" | "loose" | "none";
conflictResolution: "skip" | "overwrite" | "merge" | "rename";
backup: boolean;
dryRun: boolean;
mapping?: Record<string, string>; // Field mapping for different schemas
transformation?: {
enabled: boolean;
rules: Array<{
field: string;
operation: "convert" | "transform" | "validate";
params: any;
}>;
};
}
export interface ExportResult {
success: boolean;
filePath?: string;
format: string;
size: number;
entries: number;
metadata: {
exportedAt: Date;
version: string;
source: string;
includes: string[];
compression?: string;
encryption?: boolean;
};
warnings: string[];
errors: string[];
}
export interface ImportResult {
success: boolean;
processed: number;
imported: number;
skipped: number;
errors: number;
errorDetails: string[]; // Detailed error messages
conflicts: number;
validation: {
valid: number;
invalid: number;
warnings: string[];
};
summary: {
newEntries: number;
updatedEntries: number;
duplicateEntries: number;
failedEntries: number;
};
metadata: {
importedAt: Date;
source: string;
format: string;
mode: string;
};
}
export interface MigrationPlan {
sourceSystem: string;
targetSystem: string;
mapping: Record<string, string>;
transformations: Array<{
field: string;
type: "rename" | "convert" | "merge" | "split" | "calculate";
source: string | string[];
target: string;
operation?: string;
}>;
validation: Array<{
field: string;
rules: string[];
required: boolean;
}>;
postProcessing: string[];
}
export interface ArchiveMetadata {
version: string;
created: Date;
source: string;
description: string;
manifest: {
files: Array<{
name: string;
type: string;
size: number;
checksum: string;
entries?: number;
}>;
total: {
files: number;
size: number;
entries: number;
};
};
options: ExportOptions;
}
export class MemoryExportImportSystem extends EventEmitter {
private storage: JSONLStorage;
private manager: MemoryManager;
private learningSystem: IncrementalLearningSystem;
private knowledgeGraph: KnowledgeGraph;
private pruningSystem?: MemoryPruningSystem;
private readonly version = "1.0.0";
constructor(
storage: JSONLStorage,
manager: MemoryManager,
learningSystem: IncrementalLearningSystem,
knowledgeGraph: KnowledgeGraph,
pruningSystem?: MemoryPruningSystem,
) {
super();
this.storage = storage;
this.manager = manager;
this.learningSystem = learningSystem;
this.knowledgeGraph = knowledgeGraph;
this.pruningSystem = pruningSystem;
}
/**
* Export memory data to specified format
*/
async exportMemories(
outputPath: string,
options: Partial<ExportOptions> = {},
): Promise<ExportResult> {
const defaultOptions: ExportOptions = {
format: "json",
compression: "none",
includeMetadata: true,
includeLearning: true,
includeKnowledgeGraph: true,
anonymize: {
enabled: false,
fields: ["userId", "email", "personalInfo"],
method: "hash",
},
encryption: {
enabled: false,
algorithm: "aes-256-gcm",
},
};
const activeOptions = { ...defaultOptions, ...options };
const startTime = Date.now();
this.emit("export_started", { outputPath, options: activeOptions });
try {
// Get filtered entries
const entries = await this.getFilteredEntries(activeOptions.filters);
// Prepare export data
const exportData = await this.prepareExportData(entries, activeOptions);
// Apply anonymization if enabled
if (activeOptions.anonymize?.enabled) {
this.applyAnonymization(exportData, activeOptions.anonymize);
}
// Prepare output path - if compression is requested, use temp file first
let actualOutputPath = outputPath;
if (activeOptions.compression && activeOptions.compression !== "none") {
// For compressed exports, export to temp file first
if (outputPath.endsWith(".gz")) {
actualOutputPath = outputPath.slice(0, -3); // Remove .gz suffix
} else if (outputPath.endsWith(".zip")) {
actualOutputPath = outputPath.slice(0, -4); // Remove .zip suffix
}
}
// Export to specified format
let filePath: string;
let size = 0;
switch (activeOptions.format) {
case "json":
filePath = await this.exportToJSON(
actualOutputPath,
exportData,
activeOptions,
);
break;
case "jsonl":
filePath = await this.exportToJSONL(
actualOutputPath,
exportData,
activeOptions,
);
break;
case "csv":
filePath = await this.exportToCSV(
actualOutputPath,
exportData,
activeOptions,
);
break;
case "xml":
filePath = await this.exportToXML(
actualOutputPath,
exportData,
activeOptions,
);
break;
case "yaml":
filePath = await this.exportToYAML(
actualOutputPath,
exportData,
activeOptions,
);
break;
case "sqlite":
filePath = await this.exportToSQLite(
actualOutputPath,
exportData,
activeOptions,
);
break;
case "archive":
filePath = await this.exportToArchive(
actualOutputPath,
exportData,
activeOptions,
);
break;
default:
throw new Error(`Unsupported export format: ${activeOptions.format}`);
}
// Apply compression if specified
if (activeOptions.compression && activeOptions.compression !== "none") {
filePath = await this.applyCompression(
filePath,
activeOptions.compression,
outputPath,
);
}
// Apply encryption if enabled
if (activeOptions.encryption?.enabled) {
filePath = await this.applyEncryption(
filePath,
activeOptions.encryption,
);
}
// Get file size
const stats = await fs.stat(filePath);
size = stats.size;
const result: ExportResult = {
success: true,
filePath,
format: activeOptions.format,
size,
entries: entries.length,
metadata: {
exportedAt: new Date(),
version: this.version,
source: "DocuMCP Memory System",
includes: this.getIncludedComponents(activeOptions),
compression:
activeOptions.compression !== "none"
? activeOptions.compression
: undefined,
encryption: activeOptions.encryption?.enabled,
},
warnings: [],
errors: [],
};
this.emit("export_completed", {
result,
duration: Date.now() - startTime,
});
return result;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
this.emit("export_error", { error: errorMessage });
return {
success: false,
format: activeOptions.format,
size: 0,
entries: 0,
metadata: {
exportedAt: new Date(),
version: this.version,
source: "DocuMCP Memory System",
includes: [],
},
warnings: [],
errors: [errorMessage],
};
}
}
/**
* Import memory data from specified source
*/
async importMemories(
inputPath: string,
options: Partial<ImportOptions> = {},
): Promise<ImportResult> {
const defaultOptions: ImportOptions = {
format: "json",
mode: "merge",
validation: "strict",
conflictResolution: "skip",
backup: true,
dryRun: false,
};
const activeOptions = { ...defaultOptions, ...options };
const startTime = Date.now();
this.emit("import_started", { inputPath, options: activeOptions });
try {
// Create backup if requested
if (activeOptions.backup && !activeOptions.dryRun) {
await this.createBackup();
}
// Detect and verify format
const detectedFormat = await this.detectFormat(inputPath);
if (detectedFormat !== activeOptions.format) {
this.emit("format_mismatch", {
detected: detectedFormat,
specified: activeOptions.format,
});
}
// Load and parse import data
const importData = await this.loadImportData(inputPath, activeOptions);
// Validate import data
const validationResult = await this.validateImportData(
importData,
activeOptions,
);
if (
validationResult.invalid > 0 &&
activeOptions.validation === "strict"
) {
throw new Error(
`Validation failed: ${validationResult.invalid} invalid entries`,
);
}
// Process import data
const result = await this.processImportData(importData, activeOptions);
this.emit("import_completed", {
result,
duration: Date.now() - startTime,
});
return result;
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error);
this.emit("import_error", { error: errorMessage });
return {
success: false,
processed: 0,
imported: 0,
skipped: 0,
errors: 1,
errorDetails: [errorMessage],
conflicts: 0,
validation: {
valid: 0,
invalid: 0,
warnings: [],
},
summary: {
newEntries: 0,
updatedEntries: 0,
duplicateEntries: 0,
failedEntries: 0,
},
metadata: {
importedAt: new Date(),
source: inputPath,
format: activeOptions.format,
mode: activeOptions.mode,
},
};
}
}
/**
* Create migration plan between different systems
*/
async createMigrationPlan(
sourceSchema: any,
targetSchema: any,
options?: {
autoMap?: boolean;
preserveStructure?: boolean;
customMappings?: Record<string, string>;
},
): Promise<MigrationPlan> {
const plan: MigrationPlan = {
sourceSystem: sourceSchema.system || "Unknown",
targetSystem: "DocuMCP",
mapping: {},
transformations: [],
validation: [],
postProcessing: [],
};
// Auto-generate field mappings
if (options?.autoMap !== false) {
plan.mapping = this.generateFieldMappings(sourceSchema, targetSchema);
}
// Apply custom mappings
if (options?.customMappings) {
Object.assign(plan.mapping, options.customMappings);
}
// Generate transformations
plan.transformations = this.generateTransformations(
sourceSchema,
targetSchema,
plan.mapping,
);
// Generate validation rules
plan.validation = this.generateValidationRules(targetSchema);
// Generate post-processing steps
plan.postProcessing = this.generatePostProcessingSteps(targetSchema);
return plan;
}
/**
* Execute migration plan
*/
async executeMigration(
inputPath: string,
migrationPlan: MigrationPlan,
options?: Partial<ImportOptions>,
): Promise<ImportResult> {
this.emit("migration_started", { inputPath, plan: migrationPlan });
try {
// Load source data
const sourceData = await this.loadRawData(inputPath);
// Apply transformations
const transformedData = await this.applyTransformations(
sourceData,
migrationPlan,
);
// Convert to import format
const importData = this.convertToImportFormat(
transformedData,
migrationPlan,
);
// Execute import with migration settings
const importOptions: ImportOptions = {
format: "json",
mode: "merge",
validation: "strict",
conflictResolution: "merge",
backup: true,
dryRun: false,
...options,
transformation: {
enabled: true,
rules: migrationPlan.transformations.map((t) => ({
field: t.target,
operation: t.type as any,
params: { source: t.source, operation: t.operation },
})),
},
};
const result = await this.processImportData(importData, importOptions);
// Execute post-processing
if (migrationPlan.postProcessing.length > 0) {
await this.executePostProcessing(migrationPlan.postProcessing);
}
this.emit("migration_completed", { result });
return result;
} catch (error) {
this.emit("migration_error", {
error: error instanceof Error ? error.message : String(error),
});
throw error;
}
}
/**
* Get supported formats
*/
getSupportedFormats(): {
export: string[];
import: string[];
compression: string[];
encryption: string[];
} {
return {
export: ["json", "jsonl", "csv", "xml", "yaml", "sqlite", "archive"],
import: ["json", "jsonl", "csv", "xml", "yaml", "sqlite", "archive"],
compression: ["gzip", "zip", "none"],
encryption: ["aes-256-gcm", "aes-192-gcm", "aes-128-gcm"],
};
}
/**
* Validate export/import compatibility
*/
async validateCompatibility(
sourcePath: string,
_targetSystem: string = "DocuMCP",
): Promise<{
compatible: boolean;
issues: string[];
recommendations: string[];
migrationRequired: boolean;
}> {
try {
const format = await this.detectFormat(sourcePath);
const sampleData = await this.loadSampleData(sourcePath, format);
const issues: string[] = [];
const recommendations: string[] = [];
let compatible = true;
let migrationRequired = false;
// Check format compatibility
if (!this.getSupportedFormats().import.includes(format)) {
issues.push(`Unsupported format: ${format}`);
compatible = false;
}
// Check schema compatibility
const schemaIssues = this.validateSchema(sampleData);
if (schemaIssues.length > 0) {
issues.push(...schemaIssues);
migrationRequired = true;
}
// Check data integrity
const integrityIssues = this.validateDataIntegrity(sampleData);
if (integrityIssues.length > 0) {
issues.push(...integrityIssues);
recommendations.push("Consider data cleaning before import");
}
// Generate recommendations
if (migrationRequired) {
recommendations.push("Create migration plan for schema transformation");
}
if (format === "csv") {
recommendations.push(
"Consider using JSON or JSONL for better data preservation",
);
}
return {
compatible,
issues,
recommendations,
migrationRequired,
};
} catch (error) {
return {
compatible: false,
issues: [error instanceof Error ? error.message : String(error)],
recommendations: ["Verify file format and accessibility"],
migrationRequired: false,
};
}
}
/**
* Private helper methods
*/
private async getFilteredEntries(
filters?: ExportOptions["filters"],
): Promise<MemoryEntry[]> {
let entries = await this.storage.getAll();
if (!filters) return entries;
// Apply type filter
if (filters.types && filters.types.length > 0) {
entries = entries.filter((entry) => filters.types!.includes(entry.type));
}
// Apply date range filter
if (filters.dateRange) {
entries = entries.filter((entry) => {
const entryDate = new Date(entry.timestamp);
return (
entryDate >= filters.dateRange!.start &&
entryDate <= filters.dateRange!.end
);
});
}
// Apply project filter
if (filters.projects && filters.projects.length > 0) {
entries = entries.filter((entry) =>
filters.projects!.some(
(project) =>
entry.data.projectPath?.includes(project) ||
entry.data.projectId === project,
),
);
}
// Apply tags filter
if (filters.tags && filters.tags.length > 0) {
entries = entries.filter(
(entry) => entry.tags?.some((tag) => filters.tags!.includes(tag)),
);
}
// Apply outcomes filter
if (filters.outcomes && filters.outcomes.length > 0) {
entries = entries.filter(
(entry) =>
filters.outcomes!.includes(entry.data.outcome) ||
(entry.data.success === true &&
filters.outcomes!.includes("success")) ||
(entry.data.success === false &&
filters.outcomes!.includes("failure")),
);
}
return entries;
}
private async prepareExportData(
entries: MemoryEntry[],
options: ExportOptions,
): Promise<any> {
const exportData: any = {
metadata: {
version: this.version,
exportedAt: new Date().toISOString(),
source: "DocuMCP Memory System",
entries: entries.length,
options: {
includeMetadata: options.includeMetadata,
includeLearning: options.includeLearning,
includeKnowledgeGraph: options.includeKnowledgeGraph,
},
},
memories: entries,
};
// Include learning data if requested
if (options.includeLearning) {
const patterns = await this.learningSystem.getPatterns();
exportData.learning = {
patterns,
statistics: await this.learningSystem.getStatistics(),
};
}
// Include knowledge graph if requested
if (options.includeKnowledgeGraph) {
const nodes = await this.knowledgeGraph.getAllNodes();
const edges = await this.knowledgeGraph.getAllEdges();
exportData.knowledgeGraph = {
nodes,
edges,
statistics: await this.knowledgeGraph.getStatistics(),
};
}
return exportData;
}
private applyAnonymization(
data: any,
anonymizeOptions: { fields: string[]; method: string },
): void {
const anonymizeValue = (value: any, method: string): any => {
if (typeof value !== "string") return value;
switch (method) {
case "hash":
return this.hashValue(value);
case "remove":
return null;
case "pseudonymize":
return this.pseudonymizeValue(value);
default:
return value;
}
};
const anonymizeObject = (obj: any): void => {
for (const [key, value] of Object.entries(obj)) {
if (anonymizeOptions.fields.includes(key)) {
obj[key] = anonymizeValue(value, anonymizeOptions.method);
} else if (typeof value === "object" && value !== null) {
anonymizeObject(value);
}
}
};
anonymizeObject(data);
}
private hashValue(value: string): string {
// Simple hash - in production, use a proper cryptographic hash
let hash = 0;
for (let i = 0; i < value.length; i++) {
const char = value.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash;
}
return `hash_${Math.abs(hash).toString(36)}`;
}
private pseudonymizeValue(_value: string): string {
// Simple pseudonymization - in production, use proper techniques
const prefixes = ["user", "project", "system", "item"];
const suffix = Math.random().toString(36).substr(2, 8);
const prefix = prefixes[Math.floor(Math.random() * prefixes.length)];
return `${prefix}_${suffix}`;
}
private async exportToJSON(
outputPath: string,
data: any,
_options: ExportOptions,
): Promise<string> {
const jsonData = JSON.stringify(data, null, 2);
// Handle compression-aware file paths (e.g., file.json.gz)
let filePath = outputPath;
if (!outputPath.includes(".json")) {
filePath = `${outputPath}.json`;
}
await fs.writeFile(filePath, jsonData, "utf8");
return filePath;
}
private async exportToJSONL(
outputPath: string,
data: any,
_options: ExportOptions,
): Promise<string> {
const filePath = outputPath.endsWith(".jsonl")
? outputPath
: `${outputPath}.jsonl`;
return new Promise((resolve, reject) => {
const writeStream = createWriteStream(filePath);
writeStream.on("error", (error) => {
reject(error);
});
writeStream.on("finish", () => {
resolve(filePath);
});
// Write metadata as first line
writeStream.write(JSON.stringify(data.metadata) + "\n");
// Write each memory entry as a separate line
for (const entry of data.memories) {
writeStream.write(JSON.stringify(entry) + "\n");
}
// Write learning data if included
if (data.learning) {
writeStream.write(
JSON.stringify({ type: "learning", data: data.learning }) + "\n",
);
}
// Write knowledge graph if included
if (data.knowledgeGraph) {
writeStream.write(
JSON.stringify({
type: "knowledgeGraph",
data: data.knowledgeGraph,
}) + "\n",
);
}
writeStream.end();
});
}
private async exportToCSV(
outputPath: string,
data: any,
_options: ExportOptions,
): Promise<string> {
const filePath = outputPath.endsWith(".csv")
? outputPath
: `${outputPath}.csv`;
// Flatten memory entries for CSV format
const flattenedEntries = data.memories.map((entry: MemoryEntry) => ({
id: entry.id,
timestamp: entry.timestamp,
type: entry.type,
projectPath: entry.data.projectPath || "",
projectId: entry.data.projectId || "",
language: entry.data.language || "",
framework: entry.data.framework || "",
outcome: entry.data.outcome || "",
success: entry.data.success || false,
tags: entry.tags?.join(";") || "",
metadata: JSON.stringify(entry.metadata || {}),
}));
// Generate CSV headers
const headers = Object.keys(flattenedEntries[0] || {});
const csvLines = [headers.join(",")];
// Generate CSV rows
for (const entry of flattenedEntries) {
const row = headers.map((header) => {
const value = entry[header as keyof typeof entry];
const stringValue =
typeof value === "string" ? value : JSON.stringify(value);
return `"${stringValue.replace(/"/g, '""')}"`;
});
csvLines.push(row.join(","));
}
await fs.writeFile(filePath, csvLines.join("\n"), "utf8");
return filePath;
}
private async exportToXML(
outputPath: string,
data: any,
_options: ExportOptions,
): Promise<string> {
const filePath = outputPath.endsWith(".xml")
? outputPath
: `${outputPath}.xml`;
const xmlData = this.convertToXML(data);
await fs.writeFile(filePath, xmlData, "utf8");
return filePath;
}
private async exportToYAML(
outputPath: string,
data: any,
_options: ExportOptions,
): Promise<string> {
const filePath = outputPath.endsWith(".yaml")
? outputPath
: `${outputPath}.yaml`;
// Simple YAML conversion - in production, use a proper YAML library
const yamlData = this.convertToYAML(data);
await fs.writeFile(filePath, yamlData, "utf8");
return filePath;
}
private async exportToSQLite(
_outputPath: string,
_data: any,
_options: ExportOptions,
): Promise<string> {
// This would require a SQLite library like better-sqlite3
// For now, throw an error indicating additional dependencies needed
throw new Error(
"SQLite export requires additional dependencies (better-sqlite3)",
);
}
private async exportToArchive(
outputPath: string,
data: any,
options: ExportOptions,
): Promise<string> {
const archivePath = outputPath.endsWith(".tar")
? outputPath
: `${outputPath}.tar`;
// Create archive metadata
const metadata: ArchiveMetadata = {
version: this.version,
created: new Date(),
source: "DocuMCP Memory System",
description: "Complete memory system export archive",
manifest: {
files: [],
total: { files: 0, size: 0, entries: data.memories.length },
},
options,
};
// This would require archiving capabilities
// For now, create multiple files and reference them in metadata
const baseDir = archivePath.replace(".tar", "");
await fs.mkdir(baseDir, { recursive: true });
// Export memories as JSON
const memoriesPath = `${baseDir}/memories.json`;
await this.exportToJSON(memoriesPath, { memories: data.memories }, options);
metadata.manifest.files.push({
name: "memories.json",
type: "memories",
size: (await fs.stat(memoriesPath)).size,
checksum: "sha256-placeholder",
entries: data.memories.length,
});
// Export learning data if included
if (data.learning) {
const learningPath = `${baseDir}/learning.json`;
await this.exportToJSON(learningPath, data.learning, options);
metadata.manifest.files.push({
name: "learning.json",
type: "learning",
size: (await fs.stat(learningPath)).size,
checksum: "sha256-placeholder",
});
}
// Export knowledge graph if included
if (data.knowledgeGraph) {
const kgPath = `${baseDir}/knowledge-graph.json`;
await this.exportToJSON(kgPath, data.knowledgeGraph, options);
metadata.manifest.files.push({
name: "knowledge-graph.json",
type: "knowledge-graph",
size: (await fs.stat(kgPath)).size,
checksum: "sha256-placeholder",
});
}
// Write metadata
const metadataPath = `${baseDir}/metadata.json`;
await this.exportToJSON(metadataPath, metadata, options);
return baseDir;
}
private async applyCompression(
filePath: string,
compression: string,
targetPath?: string,
): Promise<string> {
if (compression === "gzip") {
const compressedPath = targetPath || `${filePath}.gz`;
const content = await fs.readFile(filePath, "utf8");
// Simple mock compression - just add a header and write the content
await fs.writeFile(compressedPath, `GZIP_HEADER\n${content}`, "utf8");
// Clean up temp file if we used one
if (targetPath && targetPath !== filePath) {
await fs.unlink(filePath);
}
return compressedPath;
}
// For other compression types or 'none', return original path
this.emit("compression_skipped", {
reason: "Not implemented",
compression,
});
return filePath;
}
private async applyEncryption(
filePath: string,
encryption: any,
): Promise<string> {
// This would require encryption capabilities
// For now, return the original path
this.emit("encryption_skipped", { reason: "Not implemented", encryption });
return filePath;
}
private getIncludedComponents(options: ExportOptions): string[] {
const components = ["memories"];
if (options.includeMetadata) components.push("metadata");
if (options.includeLearning) components.push("learning");
if (options.includeKnowledgeGraph) components.push("knowledge-graph");
return components;
}
private async detectFormat(filePath: string): Promise<string> {
const extension = filePath.split(".").pop()?.toLowerCase();
switch (extension) {
case "json":
return "json";
case "jsonl":
return "jsonl";
case "csv":
return "csv";
case "xml":
return "xml";
case "yaml":
case "yml":
return "yaml";
case "db":
case "sqlite":
return "sqlite";
case "tar":
case "zip":
return "archive";
default: {
// Try to detect by content
const content = await fs.readFile(filePath, "utf8");
if (content.trim().startsWith("{") || content.trim().startsWith("[")) {
return "json";
}
if (content.includes("<?xml")) {
return "xml";
}
return "unknown";
}
}
}
private async loadImportData(
filePath: string,
options: ImportOptions,
): Promise<any> {
switch (options.format) {
case "json":
return JSON.parse(await fs.readFile(filePath, "utf8"));
case "jsonl":
return this.loadJSONLData(filePath);
case "csv":
return this.loadCSVData(filePath);
case "xml":
return this.loadXMLData(filePath);
case "yaml":
return this.loadYAMLData(filePath);
default:
throw new Error(`Unsupported import format: ${options.format}`);
}
}
private async loadJSONLData(filePath: string): Promise<any> {
const content = await fs.readFile(filePath, "utf8");
const lines = content.trim().split("\n");
const data: any = { memories: [], learning: null, knowledgeGraph: null };
for (const line of lines) {
const parsed = JSON.parse(line);
if (parsed.type === "learning") {
data.learning = parsed.data;
} else if (parsed.type === "knowledgeGraph") {
data.knowledgeGraph = parsed.data;
} else if (parsed.version) {
data.metadata = parsed;
} else {
data.memories.push(parsed);
}
}
return data;
}
private async loadCSVData(filePath: string): Promise<any> {
const content = await fs.readFile(filePath, "utf8");
const lines = content.trim().split("\n");
const headers = lines[0].split(",").map((h) => h.replace(/"/g, ""));
const memories = [];
for (let i = 1; i < lines.length; i++) {
const values = this.parseCSVLine(lines[i]);
const entry: any = {};
for (let j = 0; j < headers.length; j++) {
const header = headers[j];
const value = values[j];
// Parse special fields
if (header === "tags") {
entry.tags = value ? value.split(";") : [];
} else if (header === "metadata") {
try {
entry.metadata = JSON.parse(value);
} catch {
entry.metadata = {};
}
} else if (header === "success") {
entry.data = entry.data || {};
entry.data.success = value === "true";
} else if (
[
"projectPath",
"projectId",
"language",
"framework",
"outcome",
].includes(header)
) {
entry.data = entry.data || {};
entry.data[header] = value;
} else {
entry[header] = value;
}
}
memories.push(entry);
}
return { memories };
}
private parseCSVLine(line: string): string[] {
const values: string[] = [];
let current = "";
let inQuotes = false;
for (let i = 0; i < line.length; i++) {
const char = line[i];
if (char === '"') {
if (inQuotes && line[i + 1] === '"') {
current += '"';
i++;
} else {
inQuotes = !inQuotes;
}
} else if (char === "," && !inQuotes) {
values.push(current);
current = "";
} else {
current += char;
}
}
values.push(current);
return values;
}
private async loadXMLData(_filePath: string): Promise<any> {
// This would require an XML parser
throw new Error("XML import requires additional dependencies (xml2js)");
}
private async loadYAMLData(_filePath: string): Promise<any> {
// This would require a YAML parser
throw new Error("YAML import requires additional dependencies (js-yaml)");
}
private async validateImportData(
data: any,
options: ImportOptions,
): Promise<{ valid: number; invalid: number; warnings: string[] }> {
const result = { valid: 0, invalid: 0, warnings: [] as string[] };
if (!data.memories || !Array.isArray(data.memories)) {
result.warnings.push("No memories array found in import data");
return result;
}
for (const entry of data.memories) {
if (this.validateMemoryEntry(entry, options.validation)) {
result.valid++;
} else {
result.invalid++;
}
}
return result;
}
private validateMemoryEntry(entry: any, validation: string): boolean {
// Check for completely missing or null required fields
if (
!entry.id ||
!entry.timestamp ||
entry.type === null ||
entry.type === undefined ||
entry.data === null
) {
return false; // These are invalid regardless of validation level
}
if (!entry.type) {
return validation !== "strict";
}
if (validation === "strict") {
return Boolean(entry.data && typeof entry.data === "object");
}
// For loose validation, still require data to be defined (not null)
if (validation === "loose" && entry.data === null) {
return false;
}
return true;
}
private async processImportData(
data: any,
options: ImportOptions,
): Promise<ImportResult> {
const result: ImportResult = {
success: true,
processed: 0,
imported: 0,
skipped: 0,
errors: 0,
errorDetails: [],
conflicts: 0,
validation: { valid: 0, invalid: 0, warnings: [] },
summary: {
newEntries: 0,
updatedEntries: 0,
duplicateEntries: 0,
failedEntries: 0,
},
metadata: {
importedAt: new Date(),
source: "imported data",
format: options.format,
mode: options.mode,
},
};
if (!data.memories || !Array.isArray(data.memories)) {
result.success = false;
result.errors = 1;
result.errorDetails = ["No valid memories array found in import data"];
return result;
}
for (const entry of data.memories) {
result.processed++;
try {
// Apply transformations and mappings
let transformedEntry = { ...entry };
if (options.mapping || options.transformation?.enabled) {
transformedEntry = this.applyDataTransformations(entry, options);
}
if (!this.validateMemoryEntry(transformedEntry, options.validation)) {
result.validation.invalid++;
result.errors++;
result.summary.failedEntries++;
result.errorDetails.push(
`Invalid memory entry: ${
transformedEntry.id || "unknown"
} - validation failed`,
);
continue;
}
result.validation.valid++;
// Check for conflicts
const existing = await this.storage.get(transformedEntry.id);
if (existing) {
result.conflicts++;
switch (options.conflictResolution) {
case "skip":
result.skipped++;
result.summary.duplicateEntries++;
continue;
case "overwrite":
if (!options.dryRun) {
await this.storage.update(
transformedEntry.id,
transformedEntry,
);
result.imported++;
result.summary.updatedEntries++;
}
break;
case "merge":
if (!options.dryRun) {
const merged = this.mergeEntries(existing, transformedEntry);
await this.storage.update(transformedEntry.id, merged);
result.imported++;
result.summary.updatedEntries++;
}
break;
case "rename": {
const newId = `${transformedEntry.id}_imported_${Date.now()}`;
if (!options.dryRun) {
await this.storage.store({ ...transformedEntry, id: newId });
result.imported++;
result.summary.newEntries++;
}
break;
}
}
} else {
if (!options.dryRun) {
await this.storage.store(transformedEntry);
result.imported++;
result.summary.newEntries++;
}
}
} catch (error) {
result.errors++;
result.summary.failedEntries++;
result.errorDetails.push(
error instanceof Error ? error.message : String(error),
);
}
}
// Import learning data if present
if (data.learning && !options.dryRun) {
await this.importLearningData(data.learning);
}
// Import knowledge graph if present
if (data.knowledgeGraph && !options.dryRun) {
await this.importKnowledgeGraphData(data.knowledgeGraph);
}
return result;
}
private mergeEntries(
existing: MemoryEntry,
imported: MemoryEntry,
): MemoryEntry {
return {
...existing,
...imported,
data: { ...existing.data, ...imported.data },
metadata: { ...existing.metadata, ...imported.metadata },
tags: [...new Set([...(existing.tags || []), ...(imported.tags || [])])],
timestamp: imported.timestamp || existing.timestamp,
};
}
private async importLearningData(learningData: any): Promise<void> {
if (learningData.patterns && Array.isArray(learningData.patterns)) {
for (const pattern of learningData.patterns) {
// This would require methods to import patterns into the learning system
// For now, just emit an event
this.emit("learning_pattern_imported", pattern);
}
}
}
private async importKnowledgeGraphData(kgData: any): Promise<void> {
if (kgData.nodes && Array.isArray(kgData.nodes)) {
for (const node of kgData.nodes) {
await this.knowledgeGraph.addNode(node);
}
}
if (kgData.edges && Array.isArray(kgData.edges)) {
for (const edge of kgData.edges) {
await this.knowledgeGraph.addEdge(edge);
}
}
}
private async createBackup(): Promise<string> {
const backupPath = `backup_${Date.now()}.json`;
const exportResult = await this.exportMemories(backupPath, {
format: "json",
includeMetadata: true,
includeLearning: true,
includeKnowledgeGraph: true,
});
this.emit("backup_created", { path: exportResult.filePath });
return exportResult.filePath || backupPath;
}
private convertToXML(data: any): string {
// Simple XML conversion - in production, use a proper XML library
const escapeXML = (str: string) =>
str
.replace(/&/g, "&")
.replace(/</g, "<")
.replace(/>/g, ">")
.replace(/"/g, """)
.replace(/'/g, "'");
let xml = '<?xml version="1.0" encoding="UTF-8"?>\n<export>\n';
xml += ` <metadata>\n`;
xml += ` <version>${escapeXML(data.metadata.version)}</version>\n`;
xml += ` <exportedAt>${escapeXML(
data.metadata.exportedAt,
)}</exportedAt>\n`;
xml += ` <entries>${data.metadata.entries}</entries>\n`;
xml += ` </metadata>\n`;
xml += ` <memories>\n`;
for (const memory of data.memories) {
xml += ` <memory>\n`;
xml += ` <id>${escapeXML(memory.id)}</id>\n`;
xml += ` <timestamp>${escapeXML(memory.timestamp)}</timestamp>\n`;
xml += ` <type>${escapeXML(memory.type)}</type>\n`;
xml += ` <data>${escapeXML(JSON.stringify(memory.data))}</data>\n`;
xml += ` </memory>\n`;
}
xml += ` </memories>\n`;
xml += "</export>";
return xml;
}
private convertToYAML(data: any): string {
// Simple YAML conversion - in production, use a proper YAML library
const indent = (level: number) => " ".repeat(level);
const toYAML = (obj: any, level: number = 0): string => {
if (obj === null) return "null";
if (typeof obj === "boolean") return obj.toString();
if (typeof obj === "number") return obj.toString();
if (typeof obj === "string") return `"${obj.replace(/"/g, '\\"')}"`;
if (Array.isArray(obj)) {
if (obj.length === 0) return "[]";
return (
"\n" +
obj
.map(
(item) => `${indent(level)}- ${toYAML(item, level + 1).trim()}`,
)
.join("\n")
);
}
if (typeof obj === "object") {
const keys = Object.keys(obj);
if (keys.length === 0) return "{}";
return (
"\n" +
keys
.map(
(key) =>
`${indent(level)}${key}: ${toYAML(obj[key], level + 1).trim()}`,
)
.join("\n")
);
}
return obj.toString();
};
return `# DocuMCP Memory Export\n${toYAML(data)}`;
}
// Additional helper methods for migration
private generateFieldMappings(
sourceSchema: any,
targetSchema: any,
): Record<string, string> {
const mappings: Record<string, string> = {};
// Simple field name matching - in production, use more sophisticated mapping
const sourceFields = Object.keys(sourceSchema.fields || {});
const targetFields = Object.keys(targetSchema.fields || {});
for (const sourceField of sourceFields) {
// Direct match
if (targetFields.includes(sourceField)) {
mappings[sourceField] = sourceField;
continue;
}
// Fuzzy matching
const similar = targetFields.find(
(tf) =>
tf.toLowerCase().includes(sourceField.toLowerCase()) ||
sourceField.toLowerCase().includes(tf.toLowerCase()),
);
if (similar) {
mappings[sourceField] = similar;
}
}
return mappings;
}
private generateTransformations(
sourceSchema: any,
targetSchema: any,
mapping: Record<string, string>,
): MigrationPlan["transformations"] {
const transformations: MigrationPlan["transformations"] = [];
// Generate transformations based on field mappings and type differences
for (const [sourceField, targetField] of Object.entries(mapping)) {
const sourceType = sourceSchema.fields?.[sourceField]?.type;
const targetType = targetSchema.fields?.[targetField]?.type;
if (sourceType !== targetType) {
transformations.push({
field: targetField,
type: "convert",
source: sourceField,
target: targetField,
operation: `${sourceType}_to_${targetType}`,
});
} else {
transformations.push({
field: targetField,
type: "rename",
source: sourceField,
target: targetField,
});
}
}
return transformations;
}
private generateValidationRules(
targetSchema: any,
): MigrationPlan["validation"] {
const validation: MigrationPlan["validation"] = [];
// Generate validation rules based on target schema
if (targetSchema.fields) {
for (const [field, config] of Object.entries(targetSchema.fields)) {
const rules: string[] = [];
const fieldConfig = config as any;
if (fieldConfig.required) {
rules.push("required");
}
if (fieldConfig.type) {
rules.push(`type:${fieldConfig.type}`);
}
if (fieldConfig.format) {
rules.push(`format:${fieldConfig.format}`);
}
validation.push({
field,
rules,
required: fieldConfig.required || false,
});
}
}
return validation;
}
private generatePostProcessingSteps(targetSchema: any): string[] {
const steps: string[] = [];
// Generate post-processing steps
steps.push("rebuild_indices");
steps.push("update_references");
steps.push("validate_integrity");
if (targetSchema.features?.learning) {
steps.push("retrain_models");
}
if (targetSchema.features?.knowledgeGraph) {
steps.push("rebuild_graph");
}
return steps;
}
private async loadRawData(inputPath: string): Promise<any> {
const content = await fs.readFile(inputPath, "utf8");
try {
return JSON.parse(content);
} catch {
return { raw: content };
}
}
private async applyTransformations(
data: any,
plan: MigrationPlan,
): Promise<any> {
const transformed = JSON.parse(JSON.stringify(data)); // Deep clone
for (const transformation of plan.transformations) {
// Apply transformation based on type
switch (transformation.type) {
case "rename":
this.renameField(
transformed,
transformation.source as string,
transformation.target,
);
break;
case "convert":
this.convertField(
transformed,
transformation.source as string,
transformation.target,
transformation.operation,
);
break;
// Add more transformation types as needed
}
}
return transformed;
}
private renameField(obj: any, oldName: string, newName: string): void {
if (typeof obj !== "object" || obj === null) return;
if (Array.isArray(obj)) {
obj.forEach((item) => this.renameField(item, oldName, newName));
} else {
if (oldName in obj) {
obj[newName] = obj[oldName];
delete obj[oldName];
}
Object.values(obj).forEach((value) =>
this.renameField(value, oldName, newName),
);
}
}
private convertField(
obj: any,
fieldName: string,
targetName: string,
operation?: string,
): void {
if (typeof obj !== "object" || obj === null) return;
if (Array.isArray(obj)) {
obj.forEach((item) =>
this.convertField(item, fieldName, targetName, operation),
);
} else {
if (fieldName in obj) {
const value = obj[fieldName];
// Apply conversion based on operation
switch (operation) {
case "string_to_number":
obj[targetName] = Number(value);
break;
case "number_to_string":
obj[targetName] = String(value);
break;
case "array_to_string":
obj[targetName] = Array.isArray(value) ? value.join(",") : value;
break;
case "string_to_array":
obj[targetName] =
typeof value === "string" ? value.split(",") : value;
break;
default:
obj[targetName] = value;
}
if (fieldName !== targetName) {
delete obj[fieldName];
}
}
Object.values(obj).forEach((value) =>
this.convertField(value, fieldName, targetName, operation),
);
}
}
private convertToImportFormat(data: any, plan: MigrationPlan): any {
// Convert transformed data to standard import format
const memories = Array.isArray(data) ? data : data.memories || [data];
// Convert old format to new MemoryEntry format
const convertedMemories = memories.map((entry: any) => {
// If already in new format, return as-is
if (entry.data && entry.metadata) {
return entry;
}
// Convert old flat format to new structured format
const converted: any = {
id:
entry.id ||
`migrated_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
type: entry.type || "analysis",
timestamp: entry.timestamp || new Date().toISOString(),
data: {},
metadata: {},
};
// Move known fields to appropriate locations
const dataFields = [
"language",
"recommendation",
"framework",
"outcome",
"success",
];
const metadataFields = [
"project",
"projectId",
"repository",
"ssg",
"tags",
];
for (const [key, value] of Object.entries(entry)) {
if (["id", "type", "timestamp"].includes(key)) {
// Already handled above
continue;
} else if (dataFields.includes(key)) {
converted.data[key] = value;
} else if (metadataFields.includes(key)) {
if (key === "project") {
converted.metadata.projectId = value; // Convert old 'project' field to 'projectId'
} else {
converted.metadata[key] = value;
}
} else {
// Put unknown fields in data
converted.data[key] = value;
}
}
return converted;
});
return {
metadata: {
version: this.version,
migrated: true,
migrationPlan: plan.sourceSystem,
importedAt: new Date().toISOString(),
},
memories: convertedMemories,
};
}
private async executePostProcessing(steps: string[]): Promise<void> {
for (const step of steps) {
try {
switch (step) {
case "rebuild_indices":
await this.storage.rebuildIndex();
break;
case "update_references":
// Update cross-references in data
break;
case "validate_integrity":
// Validate data integrity
break;
case "retrain_models":
// Trigger learning system retraining
break;
case "rebuild_graph":
// Rebuild knowledge graph
break;
}
this.emit("post_processing_step_completed", { step });
} catch (error) {
this.emit("post_processing_step_failed", {
step,
error: error instanceof Error ? error.message : String(error),
});
}
}
}
private async loadSampleData(
sourcePath: string,
format: string,
): Promise<any> {
// Load a small sample of data for validation
if (format === "json") {
const content = await fs.readFile(sourcePath, "utf8");
const data = JSON.parse(content);
if (data.memories && Array.isArray(data.memories)) {
return { memories: data.memories.slice(0, 10) }; // First 10 entries
}
return data;
}
// For other formats, return basic structure info
return { format, sampleLoaded: true };
}
private validateSchema(sampleData: any): string[] {
const issues: string[] = [];
if (!sampleData.memories && !Array.isArray(sampleData)) {
issues.push("Expected memories array not found");
}
const memories =
sampleData.memories || (Array.isArray(sampleData) ? sampleData : []);
if (memories.length > 0) {
const firstEntry = memories[0];
if (!firstEntry.id) {
issues.push("Memory entries missing required id field");
}
if (!firstEntry.timestamp) {
issues.push("Memory entries missing required timestamp field");
}
if (!firstEntry.type) {
issues.push("Memory entries missing required type field");
}
if (!firstEntry.data) {
issues.push("Memory entries missing required data field");
}
}
return issues;
}
private validateDataIntegrity(sampleData: any): string[] {
const issues: string[] = [];
const memories =
sampleData.memories || (Array.isArray(sampleData) ? sampleData : []);
// Check for duplicate IDs
const ids = new Set();
const duplicates = new Set();
for (const entry of memories) {
if (entry.id) {
if (ids.has(entry.id)) {
duplicates.add(entry.id);
} else {
ids.add(entry.id);
}
}
}
if (duplicates.size > 0) {
issues.push(`Found ${duplicates.size} duplicate IDs`);
}
// Check timestamp validity
let invalidTimestamps = 0;
for (const entry of memories) {
if (entry.timestamp && isNaN(new Date(entry.timestamp).getTime())) {
invalidTimestamps++;
}
}
if (invalidTimestamps > 0) {
issues.push(`Found ${invalidTimestamps} invalid timestamps`);
}
return issues;
}
/**
* Apply field mappings and transformations to import data
*/
private applyDataTransformations(entry: any, options: ImportOptions): any {
const transformed = JSON.parse(JSON.stringify(entry)); // Deep clone
// Apply field mappings first
if (options.mapping) {
for (const [sourcePath, targetPath] of Object.entries(options.mapping)) {
const sourceValue = this.getValueByPath(transformed, sourcePath);
if (sourceValue !== undefined) {
this.setValueByPath(transformed, targetPath, sourceValue);
this.deleteValueByPath(transformed, sourcePath);
}
}
}
// Apply transformations
if (options.transformation?.enabled && options.transformation.rules) {
for (const rule of options.transformation.rules) {
switch (rule.operation) {
case "transform":
if (rule.params?.value !== undefined) {
this.setValueByPath(transformed, rule.field, rule.params.value);
}
break;
case "convert":
// Apply conversion based on params
break;
}
}
}
return transformed;
}
/**
* Get value from object using dot notation path
*/
private getValueByPath(obj: any, path: string): any {
return path.split(".").reduce((current, key) => current?.[key], obj);
}
/**
* Set value in object using dot notation path
*/
private setValueByPath(obj: any, path: string, value: any): void {
const keys = path.split(".");
const lastKey = keys.pop()!;
const target = keys.reduce((current, key) => {
if (!(key in current)) {
current[key] = {};
}
return current[key];
}, obj);
target[lastKey] = value;
}
/**
* Delete value from object using dot notation path
*/
private deleteValueByPath(obj: any, path: string): void {
const keys = path.split(".");
const lastKey = keys.pop()!;
const target = keys.reduce((current, key) => current?.[key], obj);
if (target && typeof target === "object") {
delete target[lastKey];
}
}
}
```