#
tokens: 45640/50000 3/274 files (page 22/29)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 22 of 29. Use http://codebase.md/tosin2013/documcp?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .dockerignore
├── .eslintignore
├── .eslintrc.json
├── .github
│   ├── agents
│   │   ├── documcp-ast.md
│   │   ├── documcp-deploy.md
│   │   ├── documcp-memory.md
│   │   ├── documcp-test.md
│   │   └── documcp-tool.md
│   ├── copilot-instructions.md
│   ├── dependabot.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── automated-changelog.md
│   │   ├── bug_report.md
│   │   ├── bug_report.yml
│   │   ├── documentation_issue.md
│   │   ├── feature_request.md
│   │   ├── feature_request.yml
│   │   ├── npm-publishing-fix.md
│   │   └── release_improvements.md
│   ├── PULL_REQUEST_TEMPLATE.md
│   ├── release-drafter.yml
│   └── workflows
│       ├── auto-merge.yml
│       ├── ci.yml
│       ├── codeql.yml
│       ├── dependency-review.yml
│       ├── deploy-docs.yml
│       ├── README.md
│       ├── release-drafter.yml
│       └── release.yml
├── .gitignore
├── .husky
│   ├── commit-msg
│   └── pre-commit
├── .linkcheck.config.json
├── .markdown-link-check.json
├── .nvmrc
├── .pre-commit-config.yaml
├── .versionrc.json
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── docker-compose.docs.yml
├── Dockerfile.docs
├── docs
│   ├── .docusaurus
│   │   ├── docusaurus-plugin-content-docs
│   │   │   └── default
│   │   │       └── __mdx-loader-dependency.json
│   │   └── docusaurus-plugin-content-pages
│   │       └── default
│   │           └── __plugin.json
│   ├── adrs
│   │   ├── 001-mcp-server-architecture.md
│   │   ├── 002-repository-analysis-engine.md
│   │   ├── 003-static-site-generator-recommendation-engine.md
│   │   ├── 004-diataxis-framework-integration.md
│   │   ├── 005-github-pages-deployment-automation.md
│   │   ├── 006-mcp-tools-api-design.md
│   │   ├── 007-mcp-prompts-and-resources-integration.md
│   │   ├── 008-intelligent-content-population-engine.md
│   │   ├── 009-content-accuracy-validation-framework.md
│   │   ├── 010-mcp-resource-pattern-redesign.md
│   │   └── README.md
│   ├── api
│   │   ├── .nojekyll
│   │   ├── assets
│   │   │   ├── hierarchy.js
│   │   │   ├── highlight.css
│   │   │   ├── icons.js
│   │   │   ├── icons.svg
│   │   │   ├── main.js
│   │   │   ├── navigation.js
│   │   │   ├── search.js
│   │   │   └── style.css
│   │   ├── hierarchy.html
│   │   ├── index.html
│   │   ├── modules.html
│   │   └── variables
│   │       └── TOOLS.html
│   ├── assets
│   │   └── logo.svg
│   ├── development
│   │   └── MCP_INSPECTOR_TESTING.md
│   ├── docusaurus.config.js
│   ├── explanation
│   │   ├── architecture.md
│   │   └── index.md
│   ├── guides
│   │   ├── link-validation.md
│   │   ├── playwright-integration.md
│   │   └── playwright-testing-workflow.md
│   ├── how-to
│   │   ├── analytics-setup.md
│   │   ├── custom-domains.md
│   │   ├── documentation-freshness-tracking.md
│   │   ├── github-pages-deployment.md
│   │   ├── index.md
│   │   ├── local-testing.md
│   │   ├── performance-optimization.md
│   │   ├── prompting-guide.md
│   │   ├── repository-analysis.md
│   │   ├── seo-optimization.md
│   │   ├── site-monitoring.md
│   │   ├── troubleshooting.md
│   │   └── usage-examples.md
│   ├── index.md
│   ├── knowledge-graph.md
│   ├── package-lock.json
│   ├── package.json
│   ├── phase-2-intelligence.md
│   ├── reference
│   │   ├── api-overview.md
│   │   ├── cli.md
│   │   ├── configuration.md
│   │   ├── deploy-pages.md
│   │   ├── index.md
│   │   ├── mcp-tools.md
│   │   └── prompt-templates.md
│   ├── research
│   │   ├── cross-domain-integration
│   │   │   └── README.md
│   │   ├── domain-1-mcp-architecture
│   │   │   ├── index.md
│   │   │   └── mcp-performance-research.md
│   │   ├── domain-2-repository-analysis
│   │   │   └── README.md
│   │   ├── domain-3-ssg-recommendation
│   │   │   ├── index.md
│   │   │   └── ssg-performance-analysis.md
│   │   ├── domain-4-diataxis-integration
│   │   │   └── README.md
│   │   ├── domain-5-github-deployment
│   │   │   ├── github-pages-security-analysis.md
│   │   │   └── index.md
│   │   ├── domain-6-api-design
│   │   │   └── README.md
│   │   ├── README.md
│   │   ├── research-integration-summary-2025-01-14.md
│   │   ├── research-progress-template.md
│   │   └── research-questions-2025-01-14.md
│   ├── robots.txt
│   ├── sidebars.js
│   ├── sitemap.xml
│   ├── src
│   │   └── css
│   │       └── custom.css
│   └── tutorials
│       ├── development-setup.md
│       ├── environment-setup.md
│       ├── first-deployment.md
│       ├── getting-started.md
│       ├── index.md
│       ├── memory-workflows.md
│       └── user-onboarding.md
├── jest.config.js
├── LICENSE
├── Makefile
├── MCP_PHASE2_IMPLEMENTATION.md
├── mcp-config-example.json
├── mcp.json
├── package-lock.json
├── package.json
├── README.md
├── release.sh
├── scripts
│   └── check-package-structure.cjs
├── SECURITY.md
├── setup-precommit.sh
├── src
│   ├── benchmarks
│   │   └── performance.ts
│   ├── index.ts
│   ├── memory
│   │   ├── contextual-retrieval.ts
│   │   ├── deployment-analytics.ts
│   │   ├── enhanced-manager.ts
│   │   ├── export-import.ts
│   │   ├── freshness-kg-integration.ts
│   │   ├── index.ts
│   │   ├── integration.ts
│   │   ├── kg-code-integration.ts
│   │   ├── kg-health.ts
│   │   ├── kg-integration.ts
│   │   ├── kg-link-validator.ts
│   │   ├── kg-storage.ts
│   │   ├── knowledge-graph.ts
│   │   ├── learning.ts
│   │   ├── manager.ts
│   │   ├── multi-agent-sharing.ts
│   │   ├── pruning.ts
│   │   ├── schemas.ts
│   │   ├── storage.ts
│   │   ├── temporal-analysis.ts
│   │   ├── user-preferences.ts
│   │   └── visualization.ts
│   ├── prompts
│   │   └── technical-writer-prompts.ts
│   ├── scripts
│   │   └── benchmark.ts
│   ├── templates
│   │   └── playwright
│   │       ├── accessibility.spec.template.ts
│   │       ├── Dockerfile.template
│   │       ├── docs-e2e.workflow.template.yml
│   │       ├── link-validation.spec.template.ts
│   │       └── playwright.config.template.ts
│   ├── tools
│   │   ├── analyze-deployments.ts
│   │   ├── analyze-readme.ts
│   │   ├── analyze-repository.ts
│   │   ├── check-documentation-links.ts
│   │   ├── deploy-pages.ts
│   │   ├── detect-gaps.ts
│   │   ├── evaluate-readme-health.ts
│   │   ├── generate-config.ts
│   │   ├── generate-contextual-content.ts
│   │   ├── generate-llm-context.ts
│   │   ├── generate-readme-template.ts
│   │   ├── generate-technical-writer-prompts.ts
│   │   ├── kg-health-check.ts
│   │   ├── manage-preferences.ts
│   │   ├── manage-sitemap.ts
│   │   ├── optimize-readme.ts
│   │   ├── populate-content.ts
│   │   ├── readme-best-practices.ts
│   │   ├── recommend-ssg.ts
│   │   ├── setup-playwright-tests.ts
│   │   ├── setup-structure.ts
│   │   ├── sync-code-to-docs.ts
│   │   ├── test-local-deployment.ts
│   │   ├── track-documentation-freshness.ts
│   │   ├── update-existing-documentation.ts
│   │   ├── validate-content.ts
│   │   ├── validate-documentation-freshness.ts
│   │   ├── validate-readme-checklist.ts
│   │   └── verify-deployment.ts
│   ├── types
│   │   └── api.ts
│   ├── utils
│   │   ├── ast-analyzer.ts
│   │   ├── code-scanner.ts
│   │   ├── content-extractor.ts
│   │   ├── drift-detector.ts
│   │   ├── freshness-tracker.ts
│   │   ├── language-parsers-simple.ts
│   │   ├── permission-checker.ts
│   │   └── sitemap-generator.ts
│   └── workflows
│       └── documentation-workflow.ts
├── test-docs-local.sh
├── tests
│   ├── api
│   │   └── mcp-responses.test.ts
│   ├── benchmarks
│   │   └── performance.test.ts
│   ├── edge-cases
│   │   └── error-handling.test.ts
│   ├── functional
│   │   └── tools.test.ts
│   ├── integration
│   │   ├── kg-documentation-workflow.test.ts
│   │   ├── knowledge-graph-workflow.test.ts
│   │   ├── mcp-readme-tools.test.ts
│   │   ├── memory-mcp-tools.test.ts
│   │   ├── readme-technical-writer.test.ts
│   │   └── workflow.test.ts
│   ├── memory
│   │   ├── contextual-retrieval.test.ts
│   │   ├── enhanced-manager.test.ts
│   │   ├── export-import.test.ts
│   │   ├── freshness-kg-integration.test.ts
│   │   ├── kg-code-integration.test.ts
│   │   ├── kg-health.test.ts
│   │   ├── kg-link-validator.test.ts
│   │   ├── kg-storage-validation.test.ts
│   │   ├── kg-storage.test.ts
│   │   ├── knowledge-graph-enhanced.test.ts
│   │   ├── knowledge-graph.test.ts
│   │   ├── learning.test.ts
│   │   ├── manager-advanced.test.ts
│   │   ├── manager.test.ts
│   │   ├── mcp-resource-integration.test.ts
│   │   ├── mcp-tool-persistence.test.ts
│   │   ├── schemas.test.ts
│   │   ├── storage.test.ts
│   │   ├── temporal-analysis.test.ts
│   │   └── user-preferences.test.ts
│   ├── performance
│   │   ├── memory-load-testing.test.ts
│   │   └── memory-stress-testing.test.ts
│   ├── prompts
│   │   ├── guided-workflow-prompts.test.ts
│   │   └── technical-writer-prompts.test.ts
│   ├── server.test.ts
│   ├── setup.ts
│   ├── tools
│   │   ├── all-tools.test.ts
│   │   ├── analyze-coverage.test.ts
│   │   ├── analyze-deployments.test.ts
│   │   ├── analyze-readme.test.ts
│   │   ├── analyze-repository.test.ts
│   │   ├── check-documentation-links.test.ts
│   │   ├── deploy-pages-kg-retrieval.test.ts
│   │   ├── deploy-pages-tracking.test.ts
│   │   ├── deploy-pages.test.ts
│   │   ├── detect-gaps.test.ts
│   │   ├── evaluate-readme-health.test.ts
│   │   ├── generate-contextual-content.test.ts
│   │   ├── generate-llm-context.test.ts
│   │   ├── generate-readme-template.test.ts
│   │   ├── generate-technical-writer-prompts.test.ts
│   │   ├── kg-health-check.test.ts
│   │   ├── manage-sitemap.test.ts
│   │   ├── optimize-readme.test.ts
│   │   ├── readme-best-practices.test.ts
│   │   ├── recommend-ssg-historical.test.ts
│   │   ├── recommend-ssg-preferences.test.ts
│   │   ├── recommend-ssg.test.ts
│   │   ├── simple-coverage.test.ts
│   │   ├── sync-code-to-docs.test.ts
│   │   ├── test-local-deployment.test.ts
│   │   ├── tool-error-handling.test.ts
│   │   ├── track-documentation-freshness.test.ts
│   │   ├── validate-content.test.ts
│   │   ├── validate-documentation-freshness.test.ts
│   │   └── validate-readme-checklist.test.ts
│   ├── types
│   │   └── type-safety.test.ts
│   └── utils
│       ├── ast-analyzer.test.ts
│       ├── content-extractor.test.ts
│       ├── drift-detector.test.ts
│       ├── freshness-tracker.test.ts
│       └── sitemap-generator.test.ts
├── tsconfig.json
└── typedoc.json
```

# Files

--------------------------------------------------------------------------------
/src/memory/visualization.ts:
--------------------------------------------------------------------------------

```typescript
   1 | /**
   2 |  * Memory Visualization Interface for DocuMCP
   3 |  * Generate visual representations of memory data, patterns, and insights
   4 |  */
   5 | 
   6 | import { EventEmitter } from "events";
   7 | import { MemoryEntry, JSONLStorage } from "./storage.js";
   8 | import { MemoryManager } from "./manager.js";
   9 | import { IncrementalLearningSystem } from "./learning.js";
  10 | import { KnowledgeGraph } from "./knowledge-graph.js";
  11 | import { TemporalMemoryAnalysis } from "./temporal-analysis.js";
  12 | 
  13 | export interface VisualizationConfig {
  14 |   width: number;
  15 |   height: number;
  16 |   theme: "light" | "dark" | "auto";
  17 |   colorScheme: string[];
  18 |   interactive: boolean;
  19 |   exportFormat: "svg" | "png" | "json" | "html";
  20 |   responsive: boolean;
  21 | }
  22 | 
  23 | export interface ChartData {
  24 |   type:
  25 |     | "line"
  26 |     | "bar"
  27 |     | "scatter"
  28 |     | "heatmap"
  29 |     | "network"
  30 |     | "sankey"
  31 |     | "treemap"
  32 |     | "timeline";
  33 |   title: string;
  34 |   description: string;
  35 |   data: any;
  36 |   config: Partial<VisualizationConfig>;
  37 |   metadata: {
  38 |     generated: Date;
  39 |     dataPoints: number;
  40 |     timeRange?: { start: Date; end: Date };
  41 |     filters?: Record<string, any>;
  42 |   };
  43 | }
  44 | 
  45 | export interface DashboardData {
  46 |   title: string;
  47 |   description: string;
  48 |   charts: ChartData[];
  49 |   summary: {
  50 |     totalEntries: number;
  51 |     timeRange: { start: Date; end: Date };
  52 |     keyInsights: string[];
  53 |     healthScore: number;
  54 |   };
  55 |   generated: Date;
  56 | }
  57 | 
  58 | export interface NetworkVisualization {
  59 |   nodes: Array<{
  60 |     id: string;
  61 |     label: string;
  62 |     group: string;
  63 |     size: number;
  64 |     color: string;
  65 |     metadata: any;
  66 |   }>;
  67 |   edges: Array<{
  68 |     source: string;
  69 |     target: string;
  70 |     weight: number;
  71 |     type: string;
  72 |     color: string;
  73 |     metadata: any;
  74 |   }>;
  75 |   layout: "force" | "circular" | "hierarchical" | "grid";
  76 |   clustering: boolean;
  77 | }
  78 | 
  79 | export interface HeatmapVisualization {
  80 |   data: number[][];
  81 |   labels: {
  82 |     x: string[];
  83 |     y: string[];
  84 |   };
  85 |   colorScale: {
  86 |     min: number;
  87 |     max: number;
  88 |     colors: string[];
  89 |   };
  90 |   title: string;
  91 |   description: string;
  92 | }
  93 | 
  94 | export interface TimelineVisualization {
  95 |   events: Array<{
  96 |     id: string;
  97 |     timestamp: Date;
  98 |     title: string;
  99 |     description: string;
 100 |     type: string;
 101 |     importance: number;
 102 |     color: string;
 103 |     metadata: any;
 104 |   }>;
 105 |   timeRange: { start: Date; end: Date };
 106 |   granularity: "hour" | "day" | "week" | "month";
 107 |   groupBy?: string;
 108 | }
 109 | 
 110 | export class MemoryVisualizationSystem extends EventEmitter {
 111 |   private storage: JSONLStorage;
 112 |   private manager: MemoryManager;
 113 |   private learningSystem: IncrementalLearningSystem;
 114 |   private knowledgeGraph: KnowledgeGraph;
 115 |   private temporalAnalysis: TemporalMemoryAnalysis;
 116 |   private defaultConfig: VisualizationConfig;
 117 |   private visualizationCache: Map<string, ChartData>;
 118 | 
 119 |   constructor(
 120 |     storage: JSONLStorage,
 121 |     manager: MemoryManager,
 122 |     learningSystem: IncrementalLearningSystem,
 123 |     knowledgeGraph: KnowledgeGraph,
 124 |     temporalAnalysis: TemporalMemoryAnalysis,
 125 |   ) {
 126 |     super();
 127 |     this.storage = storage;
 128 |     this.manager = manager;
 129 |     this.learningSystem = learningSystem;
 130 |     this.knowledgeGraph = knowledgeGraph;
 131 |     this.temporalAnalysis = temporalAnalysis;
 132 |     this.visualizationCache = new Map();
 133 | 
 134 |     this.defaultConfig = {
 135 |       width: 800,
 136 |       height: 600,
 137 |       theme: "light",
 138 |       colorScheme: [
 139 |         "#3B82F6", // Blue
 140 |         "#10B981", // Green
 141 |         "#F59E0B", // Yellow
 142 |         "#EF4444", // Red
 143 |         "#8B5CF6", // Purple
 144 |         "#06B6D4", // Cyan
 145 |         "#F97316", // Orange
 146 |         "#84CC16", // Lime
 147 |       ],
 148 |       interactive: true,
 149 |       exportFormat: "svg",
 150 |       responsive: true,
 151 |     };
 152 |   }
 153 | 
 154 |   /**
 155 |    * Generate comprehensive dashboard
 156 |    */
 157 |   async generateDashboard(options?: {
 158 |     timeRange?: { start: Date; end: Date };
 159 |     includeCharts?: string[];
 160 |     config?: Partial<VisualizationConfig>;
 161 |   }): Promise<DashboardData> {
 162 |     const timeRange = options?.timeRange || this.getDefaultTimeRange();
 163 |     const config = { ...this.defaultConfig, ...options?.config };
 164 | 
 165 |     this.emit("dashboard_generation_started", { timeRange });
 166 | 
 167 |     try {
 168 |       const charts: ChartData[] = [];
 169 | 
 170 |       // Activity Timeline
 171 |       if (
 172 |         !options?.includeCharts ||
 173 |         options.includeCharts.includes("activity")
 174 |       ) {
 175 |         charts.push(await this.generateActivityTimeline(timeRange, config));
 176 |       }
 177 | 
 178 |       // Memory Type Distribution
 179 |       if (
 180 |         !options?.includeCharts ||
 181 |         options.includeCharts.includes("distribution")
 182 |       ) {
 183 |         charts.push(
 184 |           await this.generateMemoryTypeDistribution(timeRange, config),
 185 |         );
 186 |       }
 187 | 
 188 |       // Success Rate Trends
 189 |       if (
 190 |         !options?.includeCharts ||
 191 |         options.includeCharts.includes("success")
 192 |       ) {
 193 |         charts.push(await this.generateSuccessRateTrends(timeRange, config));
 194 |       }
 195 | 
 196 |       // Knowledge Graph Network
 197 |       if (
 198 |         !options?.includeCharts ||
 199 |         options.includeCharts.includes("network")
 200 |       ) {
 201 |         charts.push(await this.generateKnowledgeGraphVisualization(config));
 202 |       }
 203 | 
 204 |       // Learning Patterns Heatmap
 205 |       if (
 206 |         !options?.includeCharts ||
 207 |         options.includeCharts.includes("learning")
 208 |       ) {
 209 |         charts.push(await this.generateLearningPatternsHeatmap(config));
 210 |       }
 211 | 
 212 |       // Temporal Patterns
 213 |       if (
 214 |         !options?.includeCharts ||
 215 |         options.includeCharts.includes("temporal")
 216 |       ) {
 217 |         charts.push(
 218 |           await this.generateTemporalPatternsChart(timeRange, config),
 219 |         );
 220 |       }
 221 | 
 222 |       // Project Correlation Matrix
 223 |       if (
 224 |         !options?.includeCharts ||
 225 |         options.includeCharts.includes("correlation")
 226 |       ) {
 227 |         charts.push(
 228 |           await this.generateProjectCorrelationMatrix(timeRange, config),
 229 |         );
 230 |       }
 231 | 
 232 |       // Get summary data
 233 |       const entries = await this.getEntriesInTimeRange(timeRange);
 234 |       const keyInsights = await this.generateKeyInsights(entries, timeRange);
 235 |       const healthScore = await this.calculateSystemHealthScore(entries);
 236 | 
 237 |       const dashboard: DashboardData = {
 238 |         title: "DocuMCP Memory System Dashboard",
 239 |         description: `Comprehensive overview of memory system activity from ${timeRange.start.toLocaleDateString()} to ${timeRange.end.toLocaleDateString()}`,
 240 |         charts,
 241 |         summary: {
 242 |           totalEntries: entries.length,
 243 |           timeRange,
 244 |           keyInsights,
 245 |           healthScore,
 246 |         },
 247 |         generated: new Date(),
 248 |       };
 249 | 
 250 |       this.emit("dashboard_generated", {
 251 |         charts: charts.length,
 252 |         entries: entries.length,
 253 |         timeRange,
 254 |       });
 255 | 
 256 |       return dashboard;
 257 |     } catch (error) {
 258 |       this.emit("dashboard_error", {
 259 |         error: error instanceof Error ? error.message : String(error),
 260 |       });
 261 |       throw error;
 262 |     }
 263 |   }
 264 | 
 265 |   /**
 266 |    * Generate activity timeline chart
 267 |    */
 268 |   async generateActivityTimeline(
 269 |     timeRange: { start: Date; end: Date },
 270 |     config: Partial<VisualizationConfig>,
 271 |   ): Promise<ChartData> {
 272 |     const entries = await this.getEntriesInTimeRange(timeRange);
 273 | 
 274 |     // Group entries by day
 275 |     const dailyData = new Map<string, number>();
 276 |     const successData = new Map<string, number>();
 277 | 
 278 |     for (const entry of entries) {
 279 |       const day = entry.timestamp.slice(0, 10); // YYYY-MM-DD
 280 |       dailyData.set(day, (dailyData.get(day) || 0) + 1);
 281 | 
 282 |       if (entry.data.outcome === "success" || entry.data.success === true) {
 283 |         successData.set(day, (successData.get(day) || 0) + 1);
 284 |       }
 285 |     }
 286 | 
 287 |     // Create time series data
 288 |     const datasets = [
 289 |       {
 290 |         label: "Total Activity",
 291 |         data: Array.from(dailyData.entries()).map(([date, count]) => ({
 292 |           x: date,
 293 |           y: count,
 294 |         })),
 295 |         borderColor: config.colorScheme?.[0] || "#3B82F6",
 296 |         backgroundColor: config.colorScheme?.[0] || "#3B82F6",
 297 |         fill: false,
 298 |       },
 299 |       {
 300 |         label: "Successful Activities",
 301 |         data: Array.from(successData.entries()).map(([date, count]) => ({
 302 |           x: date,
 303 |           y: count,
 304 |         })),
 305 |         borderColor: config.colorScheme?.[1] || "#10B981",
 306 |         backgroundColor: config.colorScheme?.[1] || "#10B981",
 307 |         fill: false,
 308 |       },
 309 |     ];
 310 | 
 311 |     return {
 312 |       type: "line",
 313 |       title: "Memory Activity Timeline",
 314 |       description:
 315 |         "Daily memory system activity showing total entries and successful outcomes",
 316 |       data: {
 317 |         datasets,
 318 |         options: {
 319 |           responsive: config.responsive,
 320 |           plugins: {
 321 |             title: {
 322 |               display: true,
 323 |               text: "Memory Activity Over Time",
 324 |             },
 325 |             legend: {
 326 |               display: true,
 327 |               position: "top",
 328 |             },
 329 |           },
 330 |           scales: {
 331 |             x: {
 332 |               type: "time",
 333 |               time: {
 334 |                 unit: "day",
 335 |               },
 336 |               title: {
 337 |                 display: true,
 338 |                 text: "Date",
 339 |               },
 340 |             },
 341 |             y: {
 342 |               title: {
 343 |                 display: true,
 344 |                 text: "Number of Entries",
 345 |               },
 346 |             },
 347 |           },
 348 |         },
 349 |       },
 350 |       config,
 351 |       metadata: {
 352 |         generated: new Date(),
 353 |         dataPoints: entries.length,
 354 |         timeRange,
 355 |         filters: { type: "activity_timeline" },
 356 |       },
 357 |     };
 358 |   }
 359 | 
 360 |   /**
 361 |    * Generate memory type distribution chart
 362 |    */
 363 |   async generateMemoryTypeDistribution(
 364 |     timeRange: { start: Date; end: Date },
 365 |     config: Partial<VisualizationConfig>,
 366 |   ): Promise<ChartData> {
 367 |     const entries = await this.getEntriesInTimeRange(timeRange);
 368 | 
 369 |     // Count entries by type
 370 |     const typeCounts = new Map<string, number>();
 371 |     for (const entry of entries) {
 372 |       typeCounts.set(entry.type, (typeCounts.get(entry.type) || 0) + 1);
 373 |     }
 374 | 
 375 |     // Sort by count
 376 |     const sortedTypes = Array.from(typeCounts.entries()).sort(
 377 |       ([, a], [, b]) => b - a,
 378 |     );
 379 | 
 380 |     const data = {
 381 |       labels: sortedTypes.map(([type]) => type),
 382 |       datasets: [
 383 |         {
 384 |           data: sortedTypes.map(([, count]) => count),
 385 |           backgroundColor: config.colorScheme || this.defaultConfig.colorScheme,
 386 |           borderColor:
 387 |             config.colorScheme?.map((c) => this.darkenColor(c)) ||
 388 |             this.defaultConfig.colorScheme.map((c) => this.darkenColor(c)),
 389 |           borderWidth: 2,
 390 |         },
 391 |       ],
 392 |     };
 393 | 
 394 |     return {
 395 |       type: "bar",
 396 |       title: "Memory Type Distribution",
 397 |       description: "Distribution of memory entries by type",
 398 |       data: {
 399 |         ...data,
 400 |         options: {
 401 |           responsive: config.responsive,
 402 |           plugins: {
 403 |             title: {
 404 |               display: true,
 405 |               text: "Memory Entry Types",
 406 |             },
 407 |             legend: {
 408 |               display: false,
 409 |             },
 410 |           },
 411 |           scales: {
 412 |             y: {
 413 |               beginAtZero: true,
 414 |               title: {
 415 |                 display: true,
 416 |                 text: "Number of Entries",
 417 |               },
 418 |             },
 419 |             x: {
 420 |               title: {
 421 |                 display: true,
 422 |                 text: "Memory Type",
 423 |               },
 424 |             },
 425 |           },
 426 |         },
 427 |       },
 428 |       config,
 429 |       metadata: {
 430 |         generated: new Date(),
 431 |         dataPoints: entries.length,
 432 |         timeRange,
 433 |         filters: { type: "type_distribution" },
 434 |       },
 435 |     };
 436 |   }
 437 | 
 438 |   /**
 439 |    * Generate success rate trends chart
 440 |    */
 441 |   async generateSuccessRateTrends(
 442 |     timeRange: { start: Date; end: Date },
 443 |     config: Partial<VisualizationConfig>,
 444 |   ): Promise<ChartData> {
 445 |     const entries = await this.getEntriesInTimeRange(timeRange);
 446 | 
 447 |     // Group by week and calculate success rates
 448 |     const weeklyData = new Map<string, { total: number; successful: number }>();
 449 | 
 450 |     for (const entry of entries) {
 451 |       const week = this.getWeekKey(new Date(entry.timestamp));
 452 |       const current = weeklyData.get(week) || { total: 0, successful: 0 };
 453 | 
 454 |       current.total++;
 455 |       if (entry.data.outcome === "success" || entry.data.success === true) {
 456 |         current.successful++;
 457 |       }
 458 | 
 459 |       weeklyData.set(week, current);
 460 |     }
 461 | 
 462 |     // Calculate success rates
 463 |     const data = Array.from(weeklyData.entries())
 464 |       .map(([week, stats]) => ({
 465 |         x: week,
 466 |         y: stats.total > 0 ? (stats.successful / stats.total) * 100 : 0,
 467 |         total: stats.total,
 468 |         successful: stats.successful,
 469 |       }))
 470 |       .sort((a, b) => a.x.localeCompare(b.x));
 471 | 
 472 |     return {
 473 |       type: "line",
 474 |       title: "Success Rate Trends",
 475 |       description: "Weekly success rate trends for memory system operations",
 476 |       data: {
 477 |         datasets: [
 478 |           {
 479 |             label: "Success Rate (%)",
 480 |             data: data,
 481 |             borderColor: config.colorScheme?.[1] || "#10B981",
 482 |             backgroundColor: config.colorScheme?.[1] || "#10B981",
 483 |             fill: false,
 484 |             tension: 0.1,
 485 |           },
 486 |         ],
 487 |         options: {
 488 |           responsive: config.responsive,
 489 |           plugins: {
 490 |             title: {
 491 |               display: true,
 492 |               text: "Success Rate Over Time",
 493 |             },
 494 |             tooltip: {
 495 |               callbacks: {
 496 |                 afterBody: (context: any) => {
 497 |                   const point = data[context[0].dataIndex];
 498 |                   return `Total: ${point.total}, Successful: ${point.successful}`;
 499 |                 },
 500 |               },
 501 |             },
 502 |           },
 503 |           scales: {
 504 |             x: {
 505 |               title: {
 506 |                 display: true,
 507 |                 text: "Week",
 508 |               },
 509 |             },
 510 |             y: {
 511 |               beginAtZero: true,
 512 |               max: 100,
 513 |               title: {
 514 |                 display: true,
 515 |                 text: "Success Rate (%)",
 516 |               },
 517 |             },
 518 |           },
 519 |         },
 520 |       },
 521 |       config,
 522 |       metadata: {
 523 |         generated: new Date(),
 524 |         dataPoints: data.length,
 525 |         timeRange,
 526 |         filters: { type: "success_trends" },
 527 |       },
 528 |     };
 529 |   }
 530 | 
 531 |   /**
 532 |    * Generate knowledge graph network visualization
 533 |    */
 534 |   async generateKnowledgeGraphVisualization(
 535 |     config: Partial<VisualizationConfig>,
 536 |   ): Promise<ChartData> {
 537 |     const allNodes = await this.knowledgeGraph.getAllNodes();
 538 |     const allEdges = await this.knowledgeGraph.getAllEdges();
 539 | 
 540 |     // Prepare network data
 541 |     const networkData: NetworkVisualization = {
 542 |       nodes: allNodes.map((node) => ({
 543 |         id: node.id,
 544 |         label: node.label || node.id.slice(0, 10),
 545 |         group: node.type,
 546 |         size: Math.max(10, Math.min(30, (node.weight || 1) * 10)),
 547 |         color: this.getColorForNodeType(node.type, config.colorScheme),
 548 |         metadata: node.properties,
 549 |       })),
 550 |       edges: allEdges.map((edge) => ({
 551 |         source: edge.source,
 552 |         target: edge.target,
 553 |         weight: edge.weight,
 554 |         type: edge.type,
 555 |         color: this.getColorForEdgeType(edge.type, config.colorScheme),
 556 |         metadata: edge.properties,
 557 |       })),
 558 |       layout: "force",
 559 |       clustering: true,
 560 |     };
 561 | 
 562 |     return {
 563 |       type: "network",
 564 |       title: "Knowledge Graph Network",
 565 |       description:
 566 |         "Interactive network visualization of memory relationships and connections",
 567 |       data: networkData,
 568 |       config,
 569 |       metadata: {
 570 |         generated: new Date(),
 571 |         dataPoints: allNodes.length + allEdges.length,
 572 |         filters: { type: "knowledge_graph" },
 573 |       },
 574 |     };
 575 |   }
 576 | 
 577 |   /**
 578 |    * Generate learning patterns heatmap
 579 |    */
 580 |   async generateLearningPatternsHeatmap(
 581 |     config: Partial<VisualizationConfig>,
 582 |   ): Promise<ChartData> {
 583 |     const patterns = await this.learningSystem.getPatterns();
 584 | 
 585 |     // Create correlation matrix between different pattern dimensions
 586 |     const frameworks = [
 587 |       ...new Set(
 588 |         patterns
 589 |           .flatMap((p) => p.metadata.technologies || [])
 590 |           .filter(
 591 |             (t) =>
 592 |               t.includes("framework") ||
 593 |               t.includes("js") ||
 594 |               t.includes("react") ||
 595 |               t.includes("vue"),
 596 |           ),
 597 |       ),
 598 |     ];
 599 |     const languages = [
 600 |       ...new Set(
 601 |         patterns
 602 |           .flatMap((p) => p.metadata.technologies || [])
 603 |           .filter((t) => !t.includes("framework")),
 604 |       ),
 605 |     ];
 606 | 
 607 |     const heatmapData: number[][] = [];
 608 |     const labels = { x: frameworks, y: languages };
 609 | 
 610 |     for (const language of languages) {
 611 |       const row: number[] = [];
 612 |       for (const framework of frameworks) {
 613 |         // Calculate correlation/co-occurrence
 614 |         const langPatterns = patterns.filter(
 615 |           (p) => p.metadata.technologies?.includes(language),
 616 |         );
 617 |         const frameworkPatterns = patterns.filter(
 618 |           (p) => p.metadata.technologies?.includes(framework),
 619 |         );
 620 |         const bothPatterns = patterns.filter(
 621 |           (p) =>
 622 |             p.metadata.technologies?.includes(language) &&
 623 |             p.metadata.technologies?.includes(framework),
 624 |         );
 625 | 
 626 |         const correlation =
 627 |           langPatterns.length > 0 && frameworkPatterns.length > 0
 628 |             ? bothPatterns.length /
 629 |               Math.min(langPatterns.length, frameworkPatterns.length)
 630 |             : 0;
 631 | 
 632 |         row.push(correlation);
 633 |       }
 634 |       heatmapData.push(row);
 635 |     }
 636 | 
 637 |     const heatmap: HeatmapVisualization = {
 638 |       data: heatmapData,
 639 |       labels,
 640 |       colorScale: {
 641 |         min: 0,
 642 |         max: 1,
 643 |         colors: ["#F3F4F6", "#93C5FD", "#3B82F6", "#1D4ED8", "#1E3A8A"],
 644 |       },
 645 |       title: "Language-Framework Learning Patterns",
 646 |       description:
 647 |         "Correlation matrix showing relationships between programming languages and frameworks in learning patterns",
 648 |     };
 649 | 
 650 |     return {
 651 |       type: "heatmap",
 652 |       title: "Learning Patterns Heatmap",
 653 |       description:
 654 |         "Visualization of learning pattern correlations across languages and frameworks",
 655 |       data: heatmap,
 656 |       config,
 657 |       metadata: {
 658 |         generated: new Date(),
 659 |         dataPoints: patterns.length,
 660 |         filters: { type: "learning_patterns" },
 661 |       },
 662 |     };
 663 |   }
 664 | 
 665 |   /**
 666 |    * Generate temporal patterns chart
 667 |    */
 668 |   async generateTemporalPatternsChart(
 669 |     timeRange: { start: Date; end: Date },
 670 |     config: Partial<VisualizationConfig>,
 671 |   ): Promise<ChartData> {
 672 |     const patterns = await this.temporalAnalysis.analyzeTemporalPatterns({
 673 |       granularity: "day",
 674 |       aggregation: "count",
 675 |       timeRange: {
 676 |         start: timeRange.start,
 677 |         end: timeRange.end,
 678 |         duration: timeRange.end.getTime() - timeRange.start.getTime(),
 679 |         label: "Analysis Period",
 680 |       },
 681 |     });
 682 | 
 683 |     // Prepare data for different pattern types
 684 |     const patternData = patterns.map((pattern) => ({
 685 |       type: pattern.type,
 686 |       confidence: pattern.confidence,
 687 |       description: pattern.description,
 688 |       dataPoints: pattern.dataPoints?.length || 0,
 689 |     }));
 690 | 
 691 |     const data = {
 692 |       labels: patternData.map((p) => p.type),
 693 |       datasets: [
 694 |         {
 695 |           label: "Pattern Confidence",
 696 |           data: patternData.map((p) => p.confidence * 100),
 697 |           backgroundColor: config.colorScheme || this.defaultConfig.colorScheme,
 698 |           borderColor:
 699 |             config.colorScheme?.map((c) => this.darkenColor(c)) ||
 700 |             this.defaultConfig.colorScheme.map((c) => this.darkenColor(c)),
 701 |           borderWidth: 2,
 702 |         },
 703 |       ],
 704 |     };
 705 | 
 706 |     return {
 707 |       type: "bar",
 708 |       title: "Temporal Patterns Analysis",
 709 |       description:
 710 |         "Confidence levels of detected temporal patterns in memory activity",
 711 |       data: {
 712 |         ...data,
 713 |         options: {
 714 |           responsive: config.responsive,
 715 |           plugins: {
 716 |             title: {
 717 |               display: true,
 718 |               text: "Detected Temporal Patterns",
 719 |             },
 720 |             tooltip: {
 721 |               callbacks: {
 722 |                 afterBody: (context: any) => {
 723 |                   const pattern = patternData[context[0].dataIndex];
 724 |                   return pattern.description;
 725 |                 },
 726 |               },
 727 |             },
 728 |           },
 729 |           scales: {
 730 |             y: {
 731 |               beginAtZero: true,
 732 |               max: 100,
 733 |               title: {
 734 |                 display: true,
 735 |                 text: "Confidence (%)",
 736 |               },
 737 |             },
 738 |             x: {
 739 |               title: {
 740 |                 display: true,
 741 |                 text: "Pattern Type",
 742 |               },
 743 |             },
 744 |           },
 745 |         },
 746 |       },
 747 |       config,
 748 |       metadata: {
 749 |         generated: new Date(),
 750 |         dataPoints: patterns.length,
 751 |         timeRange,
 752 |         filters: { type: "temporal_patterns" },
 753 |       },
 754 |     };
 755 |   }
 756 | 
 757 |   /**
 758 |    * Generate project correlation matrix
 759 |    */
 760 |   async generateProjectCorrelationMatrix(
 761 |     timeRange: { start: Date; end: Date },
 762 |     config: Partial<VisualizationConfig>,
 763 |   ): Promise<ChartData> {
 764 |     const entries = await this.getEntriesInTimeRange(timeRange);
 765 | 
 766 |     // Extract unique projects
 767 |     const projects = [
 768 |       ...new Set(
 769 |         entries
 770 |           .map((e) => e.data.projectPath || e.data.projectId || "Unknown")
 771 |           .filter((p) => p !== "Unknown"),
 772 |       ),
 773 |     ].slice(0, 10); // Limit to top 10
 774 | 
 775 |     // Calculate correlation matrix
 776 |     const correlationMatrix: number[][] = [];
 777 | 
 778 |     for (const project1 of projects) {
 779 |       const row: number[] = [];
 780 |       for (const project2 of projects) {
 781 |         if (project1 === project2) {
 782 |           row.push(1.0);
 783 |         } else {
 784 |           const correlation = this.calculateProjectCorrelation(
 785 |             entries,
 786 |             project1,
 787 |             project2,
 788 |           );
 789 |           row.push(correlation);
 790 |         }
 791 |       }
 792 |       correlationMatrix.push(row);
 793 |     }
 794 | 
 795 |     const heatmap: HeatmapVisualization = {
 796 |       data: correlationMatrix,
 797 |       labels: { x: projects, y: projects },
 798 |       colorScale: {
 799 |         min: -1,
 800 |         max: 1,
 801 |         colors: ["#EF4444", "#F59E0B", "#F3F4F6", "#10B981", "#059669"],
 802 |       },
 803 |       title: "Project Correlation Matrix",
 804 |       description:
 805 |         "Correlation matrix showing relationships between different projects based on memory patterns",
 806 |     };
 807 | 
 808 |     return {
 809 |       type: "heatmap",
 810 |       title: "Project Correlations",
 811 |       description:
 812 |         "Visualization of correlations between different projects in the memory system",
 813 |       data: heatmap,
 814 |       config,
 815 |       metadata: {
 816 |         generated: new Date(),
 817 |         dataPoints: projects.length * projects.length,
 818 |         timeRange,
 819 |         filters: { type: "project_correlation" },
 820 |       },
 821 |     };
 822 |   }
 823 | 
 824 |   /**
 825 |    * Generate custom visualization
 826 |    */
 827 |   async generateCustomVisualization(
 828 |     type: ChartData["type"],
 829 |     query: {
 830 |       filters?: Record<string, any>;
 831 |       timeRange?: { start: Date; end: Date };
 832 |       aggregation?: string;
 833 |       groupBy?: string;
 834 |     },
 835 |     config?: Partial<VisualizationConfig>,
 836 |   ): Promise<ChartData> {
 837 |     const activeConfig = { ...this.defaultConfig, ...config };
 838 |     const timeRange = query.timeRange || this.getDefaultTimeRange();
 839 | 
 840 |     let entries = await this.getEntriesInTimeRange(timeRange);
 841 | 
 842 |     // Apply filters
 843 |     if (query.filters) {
 844 |       entries = this.applyFilters(entries, query.filters);
 845 |     }
 846 | 
 847 |     switch (type) {
 848 |       case "timeline":
 849 |         return this.generateTimelineVisualization(entries, query, activeConfig);
 850 |       case "scatter":
 851 |         return this.generateScatterPlot(entries, query, activeConfig);
 852 |       case "treemap":
 853 |         return this.generateTreemapVisualization(entries, query, activeConfig);
 854 |       case "sankey":
 855 |         return this.generateSankeyDiagram(entries, query, activeConfig);
 856 |       default:
 857 |         throw new Error(`Unsupported visualization type: ${type}`);
 858 |     }
 859 |   }
 860 | 
 861 |   /**
 862 |    * Export visualization to specified format
 863 |    */
 864 |   async exportVisualization(
 865 |     chartData: ChartData,
 866 |     format: "svg" | "png" | "json" | "html" = "json",
 867 |     options?: {
 868 |       filename?: string;
 869 |       quality?: number;
 870 |       width?: number;
 871 |       height?: number;
 872 |     },
 873 |   ): Promise<string | Buffer> {
 874 |     this.emit("export_started", { type: chartData.type, format });
 875 | 
 876 |     try {
 877 |       switch (format) {
 878 |         case "json":
 879 |           return JSON.stringify(chartData, null, 2);
 880 | 
 881 |         case "html":
 882 |           return this.generateHTMLVisualization(chartData, options);
 883 | 
 884 |         case "svg":
 885 |           return this.generateSVGVisualization(chartData, options);
 886 | 
 887 |         case "png":
 888 |           // This would require a rendering library like Puppeteer
 889 |           throw new Error(
 890 |             "PNG export requires additional rendering capabilities",
 891 |           );
 892 | 
 893 |         default:
 894 |           throw new Error(`Unsupported export format: ${format}`);
 895 |       }
 896 |     } catch (error) {
 897 |       this.emit("export_error", {
 898 |         error: error instanceof Error ? error.message : String(error),
 899 |       });
 900 |       throw error;
 901 |     }
 902 |   }
 903 | 
 904 |   /**
 905 |    * Helper methods
 906 |    */
 907 |   private async getEntriesInTimeRange(timeRange: {
 908 |     start: Date;
 909 |     end: Date;
 910 |   }): Promise<MemoryEntry[]> {
 911 |     const allEntries = await this.storage.getAll();
 912 |     return allEntries.filter((entry) => {
 913 |       const entryDate = new Date(entry.timestamp);
 914 |       return entryDate >= timeRange.start && entryDate <= timeRange.end;
 915 |     });
 916 |   }
 917 | 
 918 |   private getDefaultTimeRange(): { start: Date; end: Date } {
 919 |     const end = new Date();
 920 |     const start = new Date(end.getTime() - 30 * 24 * 60 * 60 * 1000); // 30 days ago
 921 |     return { start, end };
 922 |   }
 923 | 
 924 |   private getWeekKey(date: Date): string {
 925 |     const year = date.getFullYear();
 926 |     const week = this.getWeekNumber(date);
 927 |     return `${year}-W${week.toString().padStart(2, "0")}`;
 928 |   }
 929 | 
 930 |   private getWeekNumber(date: Date): number {
 931 |     const d = new Date(
 932 |       Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()),
 933 |     );
 934 |     const dayNum = d.getUTCDay() || 7;
 935 |     d.setUTCDate(d.getUTCDate() + 4 - dayNum);
 936 |     const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
 937 |     return Math.ceil(((d.getTime() - yearStart.getTime()) / 86400000 + 1) / 7);
 938 |   }
 939 | 
 940 |   private getColorForNodeType(type: string, colorScheme?: string[]): string {
 941 |     const colors = colorScheme || this.defaultConfig.colorScheme;
 942 |     const index = type.charCodeAt(0) % colors.length;
 943 |     return colors[index];
 944 |   }
 945 | 
 946 |   private getColorForEdgeType(type: string, colorScheme?: string[]): string {
 947 |     const colors = colorScheme || this.defaultConfig.colorScheme;
 948 |     const typeColors: Record<string, string> = {
 949 |       similarity: colors[0],
 950 |       dependency: colors[1],
 951 |       temporal: colors[2],
 952 |       causal: colors[3],
 953 |     };
 954 |     return typeColors[type] || colors[4];
 955 |   }
 956 | 
 957 |   private darkenColor(color: string): string {
 958 |     // Simple color darkening - in production, use a proper color library
 959 |     if (color.startsWith("#")) {
 960 |       const hex = color.slice(1);
 961 |       const num = parseInt(hex, 16);
 962 |       const r = Math.max(0, (num >> 16) - 40);
 963 |       const g = Math.max(0, ((num >> 8) & 0x00ff) - 40);
 964 |       const b = Math.max(0, (num & 0x0000ff) - 40);
 965 |       return `#${((r << 16) | (g << 8) | b).toString(16).padStart(6, "0")}`;
 966 |     }
 967 |     return color;
 968 |   }
 969 | 
 970 |   private calculateProjectCorrelation(
 971 |     entries: MemoryEntry[],
 972 |     project1: string,
 973 |     project2: string,
 974 |   ): number {
 975 |     const entries1 = entries.filter(
 976 |       (e) =>
 977 |         e.data.projectPath?.includes(project1) || e.data.projectId === project1,
 978 |     );
 979 |     const entries2 = entries.filter(
 980 |       (e) =>
 981 |         e.data.projectPath?.includes(project2) || e.data.projectId === project2,
 982 |     );
 983 | 
 984 |     if (entries1.length === 0 || entries2.length === 0) return 0;
 985 | 
 986 |     // Simple correlation based on shared characteristics
 987 |     let sharedFeatures = 0;
 988 |     let totalFeatures = 0;
 989 | 
 990 |     // Compare languages
 991 |     const lang1 = new Set(entries1.map((e) => e.data.language).filter(Boolean));
 992 |     const lang2 = new Set(entries2.map((e) => e.data.language).filter(Boolean));
 993 |     const sharedLangs = new Set([...lang1].filter((l) => lang2.has(l)));
 994 |     sharedFeatures += sharedLangs.size;
 995 |     totalFeatures += new Set([...lang1, ...lang2]).size;
 996 | 
 997 |     // Compare frameworks
 998 |     const fw1 = new Set(entries1.map((e) => e.data.framework).filter(Boolean));
 999 |     const fw2 = new Set(entries2.map((e) => e.data.framework).filter(Boolean));
1000 |     const sharedFws = new Set([...fw1].filter((f) => fw2.has(f)));
1001 |     sharedFeatures += sharedFws.size;
1002 |     totalFeatures += new Set([...fw1, ...fw2]).size;
1003 | 
1004 |     return totalFeatures > 0 ? sharedFeatures / totalFeatures : 0;
1005 |   }
1006 | 
1007 |   private applyFilters(
1008 |     entries: MemoryEntry[],
1009 |     filters: Record<string, any>,
1010 |   ): MemoryEntry[] {
1011 |     return entries.filter((entry) => {
1012 |       for (const [key, value] of Object.entries(filters)) {
1013 |         switch (key) {
1014 |           case "type":
1015 |             if (Array.isArray(value) && !value.includes(entry.type))
1016 |               return false;
1017 |             if (typeof value === "string" && entry.type !== value) return false;
1018 |             break;
1019 |           case "outcome":
1020 |             if (entry.data.outcome !== value) return false;
1021 |             break;
1022 |           case "language":
1023 |             if (entry.data.language !== value) return false;
1024 |             break;
1025 |           case "framework":
1026 |             if (entry.data.framework !== value) return false;
1027 |             break;
1028 |           case "project":
1029 |             if (
1030 |               !entry.data.projectPath?.includes(value) &&
1031 |               entry.data.projectId !== value
1032 |             ) {
1033 |               return false;
1034 |             }
1035 |             break;
1036 |           case "tags":
1037 |             if (
1038 |               Array.isArray(value) &&
1039 |               !value.some((tag) => entry.tags?.includes(tag))
1040 |             ) {
1041 |               return false;
1042 |             }
1043 |             break;
1044 |         }
1045 |       }
1046 |       return true;
1047 |     });
1048 |   }
1049 | 
1050 |   private async generateKeyInsights(
1051 |     entries: MemoryEntry[],
1052 |     timeRange: { start: Date; end: Date },
1053 |   ): Promise<string[]> {
1054 |     const insights: string[] = [];
1055 | 
1056 |     // Activity insight
1057 |     const dailyAverage =
1058 |       entries.length /
1059 |       Math.max(
1060 |         1,
1061 |         Math.ceil(
1062 |           (timeRange.end.getTime() - timeRange.start.getTime()) /
1063 |             (24 * 60 * 60 * 1000),
1064 |         ),
1065 |       );
1066 |     insights.push(`Average ${dailyAverage.toFixed(1)} entries per day`);
1067 | 
1068 |     // Success rate insight
1069 |     const successful = entries.filter(
1070 |       (e) => e.data.outcome === "success" || e.data.success === true,
1071 |     ).length;
1072 |     const successRate =
1073 |       entries.length > 0 ? (successful / entries.length) * 100 : 0;
1074 |     insights.push(`${successRate.toFixed(1)}% success rate`);
1075 | 
1076 |     // Most common type
1077 |     const typeCounts = new Map<string, number>();
1078 |     entries.forEach((e) =>
1079 |       typeCounts.set(e.type, (typeCounts.get(e.type) || 0) + 1),
1080 |     );
1081 |     const mostCommonType = Array.from(typeCounts.entries()).sort(
1082 |       ([, a], [, b]) => b - a,
1083 |     )[0];
1084 |     if (mostCommonType) {
1085 |       insights.push(
1086 |         `Most common activity: ${mostCommonType[0]} (${mostCommonType[1]} entries)`,
1087 |       );
1088 |     }
1089 | 
1090 |     // Growth trend
1091 |     const midpoint = new Date(
1092 |       (timeRange.start.getTime() + timeRange.end.getTime()) / 2,
1093 |     );
1094 |     const firstHalf = entries.filter(
1095 |       (e) => new Date(e.timestamp) < midpoint,
1096 |     ).length;
1097 |     const secondHalf = entries.filter(
1098 |       (e) => new Date(e.timestamp) >= midpoint,
1099 |     ).length;
1100 |     if (firstHalf > 0) {
1101 |       const growthRate = ((secondHalf - firstHalf) / firstHalf) * 100;
1102 |       insights.push(
1103 |         `Activity ${growthRate >= 0 ? "increased" : "decreased"} by ${Math.abs(
1104 |           growthRate,
1105 |         ).toFixed(1)}%`,
1106 |       );
1107 |     }
1108 | 
1109 |     return insights.slice(0, 5); // Return top 5 insights
1110 |   }
1111 | 
1112 |   private async calculateSystemHealthScore(
1113 |     entries: MemoryEntry[],
1114 |   ): Promise<number> {
1115 |     let score = 0;
1116 | 
1117 |     // Activity level (0-25 points)
1118 |     const recentEntries = entries.filter(
1119 |       (e) =>
1120 |         new Date(e.timestamp) > new Date(Date.now() - 7 * 24 * 60 * 60 * 1000),
1121 |     );
1122 |     score += Math.min(25, recentEntries.length * 2);
1123 | 
1124 |     // Success rate (0-25 points)
1125 |     const successful = entries.filter(
1126 |       (e) => e.data.outcome === "success" || e.data.success === true,
1127 |     ).length;
1128 |     const successRate = entries.length > 0 ? successful / entries.length : 0;
1129 |     score += successRate * 25;
1130 | 
1131 |     // Diversity (0-25 points)
1132 |     const uniqueTypes = new Set(entries.map((e) => e.type)).size;
1133 |     score += Math.min(25, uniqueTypes * 3);
1134 | 
1135 |     // Consistency (0-25 points)
1136 |     if (entries.length >= 7) {
1137 |       const dailyActivities = new Map<string, number>();
1138 |       entries.forEach((e) => {
1139 |         const day = e.timestamp.slice(0, 10);
1140 |         dailyActivities.set(day, (dailyActivities.get(day) || 0) + 1);
1141 |       });
1142 | 
1143 |       const values = Array.from(dailyActivities.values());
1144 |       const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
1145 |       const variance =
1146 |         values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) /
1147 |         values.length;
1148 |       const consistency =
1149 |         mean > 0 ? Math.max(0, 1 - Math.sqrt(variance) / mean) : 0;
1150 |       score += consistency * 25;
1151 |     }
1152 | 
1153 |     return Math.round(Math.min(100, score));
1154 |   }
1155 | 
1156 |   private generateTimelineVisualization(
1157 |     entries: MemoryEntry[],
1158 |     query: any,
1159 |     config: VisualizationConfig,
1160 |   ): ChartData {
1161 |     const events = entries.map((entry) => ({
1162 |       id: entry.id,
1163 |       timestamp: new Date(entry.timestamp),
1164 |       title: entry.type,
1165 |       description: entry.data.description || `${entry.type} entry`,
1166 |       type: entry.type,
1167 |       importance: entry.data.outcome === "success" ? 1 : 0.5,
1168 |       color: this.getColorForNodeType(entry.type, config.colorScheme),
1169 |       metadata: entry.data,
1170 |     }));
1171 | 
1172 |     const timelineData: TimelineVisualization = {
1173 |       events,
1174 |       timeRange: {
1175 |         start: new Date(Math.min(...events.map((e) => e.timestamp.getTime()))),
1176 |         end: new Date(Math.max(...events.map((e) => e.timestamp.getTime()))),
1177 |       },
1178 |       granularity: "day",
1179 |       groupBy: query.groupBy,
1180 |     };
1181 | 
1182 |     return {
1183 |       type: "timeline",
1184 |       title: "Memory Activity Timeline",
1185 |       description: "Chronological timeline of memory system activities",
1186 |       data: timelineData,
1187 |       config,
1188 |       metadata: {
1189 |         generated: new Date(),
1190 |         dataPoints: events.length,
1191 |         filters: query.filters,
1192 |       },
1193 |     };
1194 |   }
1195 | 
1196 |   private generateScatterPlot(
1197 |     entries: MemoryEntry[],
1198 |     query: any,
1199 |     config: VisualizationConfig,
1200 |   ): ChartData {
1201 |     // Create scatter plot data based on timestamp vs some metric
1202 |     const data = entries.map((entry) => ({
1203 |       x: new Date(entry.timestamp).getTime(),
1204 |       y: entry.data.duration || entry.data.complexity || Math.random(), // Use available metric
1205 |       color: this.getColorForNodeType(entry.type, config.colorScheme),
1206 |       metadata: entry,
1207 |     }));
1208 | 
1209 |     return {
1210 |       type: "scatter",
1211 |       title: "Memory Activity Scatter Plot",
1212 |       description: "Scatter plot visualization of memory activities",
1213 |       data: {
1214 |         datasets: [
1215 |           {
1216 |             label: "Activities",
1217 |             data: data,
1218 |             backgroundColor: data.map((d) => d.color),
1219 |           },
1220 |         ],
1221 |       },
1222 |       config,
1223 |       metadata: {
1224 |         generated: new Date(),
1225 |         dataPoints: data.length,
1226 |         filters: query.filters,
1227 |       },
1228 |     };
1229 |   }
1230 | 
1231 |   private generateTreemapVisualization(
1232 |     entries: MemoryEntry[],
1233 |     query: any,
1234 |     config: VisualizationConfig,
1235 |   ): ChartData {
1236 |     // Group entries by type and project for treemap
1237 |     const hierarchy = new Map<string, Map<string, number>>();
1238 | 
1239 |     for (const entry of entries) {
1240 |       const type = entry.type;
1241 |       const project =
1242 |         entry.data.projectPath || entry.data.projectId || "Unknown";
1243 | 
1244 |       if (!hierarchy.has(type)) {
1245 |         hierarchy.set(type, new Map());
1246 |       }
1247 |       hierarchy
1248 |         .get(type)!
1249 |         .set(project, (hierarchy.get(type)!.get(project) || 0) + 1);
1250 |     }
1251 | 
1252 |     // Convert to treemap format
1253 |     const treemapData = Array.from(hierarchy.entries()).map(
1254 |       ([type, projects]) => ({
1255 |         name: type,
1256 |         value: Array.from(projects.values()).reduce((sum, val) => sum + val, 0),
1257 |         children: Array.from(projects.entries()).map(([project, count]) => ({
1258 |           name: project,
1259 |           value: count,
1260 |         })),
1261 |       }),
1262 |     );
1263 | 
1264 |     return {
1265 |       type: "treemap",
1266 |       title: "Memory Type Hierarchy",
1267 |       description: "Hierarchical treemap of memory entries by type and project",
1268 |       data: treemapData,
1269 |       config,
1270 |       metadata: {
1271 |         generated: new Date(),
1272 |         dataPoints: entries.length,
1273 |         filters: query.filters,
1274 |       },
1275 |     };
1276 |   }
1277 | 
1278 |   private generateSankeyDiagram(
1279 |     entries: MemoryEntry[],
1280 |     query: any,
1281 |     config: VisualizationConfig,
1282 |   ): ChartData {
1283 |     // Create flow data from entry types to outcomes
1284 |     const flows = new Map<string, Map<string, number>>();
1285 | 
1286 |     for (const entry of entries) {
1287 |       const source = entry.type;
1288 |       const target =
1289 |         entry.data.outcome || (entry.data.success ? "success" : "unknown");
1290 | 
1291 |       if (!flows.has(source)) {
1292 |         flows.set(source, new Map());
1293 |       }
1294 |       flows.get(source)!.set(target, (flows.get(source)!.get(target) || 0) + 1);
1295 |     }
1296 | 
1297 |     // Convert to Sankey format
1298 |     const nodes: string[] = [];
1299 |     const links: Array<{ source: number; target: number; value: number }> = [];
1300 | 
1301 |     // Collect all unique nodes
1302 |     const sources = Array.from(flows.keys());
1303 |     const targets = new Set<string>();
1304 |     flows.forEach((targetMap) => {
1305 |       targetMap.forEach((_, target) => targets.add(target));
1306 |     });
1307 | 
1308 |     nodes.push(
1309 |       ...sources,
1310 |       ...Array.from(targets).filter((t) => !sources.includes(t)),
1311 |     );
1312 | 
1313 |     // Create links
1314 |     flows.forEach((targetMap, source) => {
1315 |       targetMap.forEach((value, target) => {
1316 |         const sourceIndex = nodes.indexOf(source);
1317 |         const targetIndex = nodes.indexOf(target);
1318 |         if (sourceIndex !== -1 && targetIndex !== -1) {
1319 |           links.push({ source: sourceIndex, target: targetIndex, value });
1320 |         }
1321 |       });
1322 |     });
1323 | 
1324 |     return {
1325 |       type: "sankey",
1326 |       title: "Memory Flow Diagram",
1327 |       description: "Sankey diagram showing flow from memory types to outcomes",
1328 |       data: { nodes, links },
1329 |       config,
1330 |       metadata: {
1331 |         generated: new Date(),
1332 |         dataPoints: links.length,
1333 |         filters: query.filters,
1334 |       },
1335 |     };
1336 |   }
1337 | 
1338 |   private generateHTMLVisualization(
1339 |     chartData: ChartData,
1340 |     _options?: any,
1341 |   ): string {
1342 |     // Generate basic HTML with embedded Chart.js or D3.js
1343 |     return `
1344 | <!DOCTYPE html>
1345 | <html>
1346 | <head>
1347 |     <title>${chartData.title}</title>
1348 |     <script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
1349 |     <style>
1350 |         body { font-family: Arial, sans-serif; margin: 20px; }
1351 |         .chart-container { width: 100%; height: 400px; }
1352 |         .description { margin-bottom: 20px; color: #666; }
1353 |     </style>
1354 | </head>
1355 | <body>
1356 |     <h1>${chartData.title}</h1>
1357 |     <p class="description">${chartData.description}</p>
1358 |     <div class="chart-container">
1359 |         <canvas id="chart"></canvas>
1360 |     </div>
1361 |     <script>
1362 |         const ctx = document.getElementById('chart').getContext('2d');
1363 |         new Chart(ctx, ${JSON.stringify(chartData.data)});
1364 |     </script>
1365 | </body>
1366 | </html>`;
1367 |   }
1368 | 
1369 |   private generateSVGVisualization(
1370 |     chartData: ChartData,
1371 |     options?: any,
1372 |   ): string {
1373 |     // Generate basic SVG - in production, use a proper chart library
1374 |     const width = options?.width || 800;
1375 |     const height = options?.height || 600;
1376 | 
1377 |     return `
1378 | <svg width="${width}" height="${height}" xmlns="http://www.w3.org/2000/svg">
1379 |     <rect width="100%" height="100%" fill="white"/>
1380 |     <text x="50%" y="30" text-anchor="middle" font-size="18" font-weight="bold">
1381 |         ${chartData.title}
1382 |     </text>
1383 |     <text x="50%" y="50" text-anchor="middle" font-size="14" fill="#666">
1384 |         ${chartData.description}
1385 |     </text>
1386 |     <!-- Chart data would be rendered here -->
1387 |     <text x="50%" y="${
1388 |       height / 2
1389 |     }" text-anchor="middle" font-size="12" fill="#999">
1390 |         Chart visualization (${chartData.metadata.dataPoints} data points)
1391 |     </text>
1392 | </svg>`;
1393 |   }
1394 | }
1395 | 
```

--------------------------------------------------------------------------------
/src/memory/temporal-analysis.ts:
--------------------------------------------------------------------------------

```typescript
   1 | /**
   2 |  * Temporal Memory Analysis System for DocuMCP
   3 |  * Time-based analysis of memory patterns, trends, and predictions
   4 |  */
   5 | 
   6 | import { EventEmitter } from "events";
   7 | import { MemoryEntry, JSONLStorage } from "./storage.js";
   8 | import { MemoryManager } from "./manager.js";
   9 | import { IncrementalLearningSystem } from "./learning.js";
  10 | import { KnowledgeGraph } from "./knowledge-graph.js";
  11 | 
  12 | export interface TimeWindow {
  13 |   start: Date;
  14 |   end: Date;
  15 |   duration: number; // in milliseconds
  16 |   label: string;
  17 | }
  18 | 
  19 | export interface TemporalPattern {
  20 |   type: "periodic" | "trending" | "seasonal" | "burst" | "decay";
  21 |   confidence: number;
  22 |   period?: number; // For periodic patterns (in milliseconds)
  23 |   trend?: "increasing" | "decreasing" | "stable";
  24 |   seasonality?: "daily" | "weekly" | "monthly" | "yearly";
  25 |   description: string;
  26 |   dataPoints: Array<{ timestamp: Date; value: number; metadata?: any }>;
  27 | }
  28 | 
  29 | export interface TemporalMetrics {
  30 |   activityLevel: number; // 0-1 scale
  31 |   growthRate: number; // percentage change
  32 |   peakActivity: { timestamp: Date; count: number };
  33 |   averageInterval: number; // average time between entries
  34 |   consistency: number; // 0-1 scale of temporal consistency
  35 |   cyclicalStrength: number; // 0-1 scale of cyclical patterns
  36 | }
  37 | 
  38 | export interface PredictionResult {
  39 |   nextActivity: {
  40 |     probability: number;
  41 |     timeRange: TimeWindow;
  42 |     expectedCount: number;
  43 |     confidence: number;
  44 |   };
  45 |   trends: {
  46 |     shortTerm: TemporalPattern[];
  47 |     longTerm: TemporalPattern[];
  48 |   };
  49 |   anomalies: Array<{
  50 |     timestamp: Date;
  51 |     type: "spike" | "drought" | "shift";
  52 |     severity: number;
  53 |     description: string;
  54 |   }>;
  55 |   recommendations: string[];
  56 | }
  57 | 
  58 | export interface TemporalQuery {
  59 |   timeRange?: TimeWindow;
  60 |   granularity: "hour" | "day" | "week" | "month" | "year";
  61 |   aggregation: "count" | "success_rate" | "activity_level" | "diversity";
  62 |   filters?: {
  63 |     types?: string[];
  64 |     projects?: string[];
  65 |     outcomes?: string[];
  66 |     tags?: string[];
  67 |   };
  68 |   smoothing?: {
  69 |     enabled: boolean;
  70 |     method: "moving_average" | "exponential" | "savitzky_golay";
  71 |     window: number;
  72 |   };
  73 | }
  74 | 
  75 | export interface TemporalInsight {
  76 |   type: "pattern" | "anomaly" | "trend" | "prediction";
  77 |   title: string;
  78 |   description: string;
  79 |   confidence: number;
  80 |   timeframe: TimeWindow;
  81 |   actionable: boolean;
  82 |   recommendations?: string[];
  83 |   visualData?: any;
  84 | }
  85 | 
  86 | export class TemporalMemoryAnalysis extends EventEmitter {
  87 |   private storage: JSONLStorage;
  88 |   private manager: MemoryManager;
  89 |   private learningSystem: IncrementalLearningSystem;
  90 |   private knowledgeGraph: KnowledgeGraph;
  91 |   private patternCache: Map<string, TemporalPattern[]>;
  92 |   private metricsCache: Map<string, TemporalMetrics>;
  93 |   private predictionCache: Map<string, PredictionResult>;
  94 | 
  95 |   constructor(
  96 |     storage: JSONLStorage,
  97 |     manager: MemoryManager,
  98 |     learningSystem: IncrementalLearningSystem,
  99 |     knowledgeGraph: KnowledgeGraph,
 100 |   ) {
 101 |     super();
 102 |     this.storage = storage;
 103 |     this.manager = manager;
 104 |     this.learningSystem = learningSystem;
 105 |     this.knowledgeGraph = knowledgeGraph;
 106 |     this.patternCache = new Map();
 107 |     this.metricsCache = new Map();
 108 |     this.predictionCache = new Map();
 109 | 
 110 |     this.setupPeriodicAnalysis();
 111 |   }
 112 | 
 113 |   /**
 114 |    * Analyze temporal patterns in memory data
 115 |    */
 116 |   async analyzeTemporalPatterns(
 117 |     query?: TemporalQuery,
 118 |   ): Promise<TemporalPattern[]> {
 119 |     const defaultQuery: TemporalQuery = {
 120 |       granularity: "day",
 121 |       aggregation: "count",
 122 |       timeRange: this.getDefaultTimeRange(),
 123 |       smoothing: {
 124 |         enabled: true,
 125 |         method: "moving_average",
 126 |         window: 7,
 127 |       },
 128 |     };
 129 | 
 130 |     const activeQuery = { ...defaultQuery, ...query };
 131 |     const cacheKey = this.generateCacheKey("patterns", activeQuery);
 132 | 
 133 |     // Check cache first
 134 |     if (this.patternCache.has(cacheKey)) {
 135 |       return this.patternCache.get(cacheKey)!;
 136 |     }
 137 | 
 138 |     try {
 139 |       // Get time series data
 140 |       const timeSeries = await this.buildTimeSeries(activeQuery);
 141 | 
 142 |       // Detect different types of patterns
 143 |       const patterns: TemporalPattern[] = [];
 144 | 
 145 |       // Periodic patterns
 146 |       patterns.push(
 147 |         ...(await this.detectPeriodicPatterns(timeSeries, activeQuery)),
 148 |       );
 149 | 
 150 |       // Trend patterns
 151 |       patterns.push(
 152 |         ...(await this.detectTrendPatterns(timeSeries, activeQuery)),
 153 |       );
 154 | 
 155 |       // Seasonal patterns
 156 |       patterns.push(
 157 |         ...(await this.detectSeasonalPatterns(timeSeries, activeQuery)),
 158 |       );
 159 | 
 160 |       // Burst patterns
 161 |       patterns.push(
 162 |         ...(await this.detectBurstPatterns(timeSeries, activeQuery)),
 163 |       );
 164 | 
 165 |       // Decay patterns
 166 |       patterns.push(
 167 |         ...(await this.detectDecayPatterns(timeSeries, activeQuery)),
 168 |       );
 169 | 
 170 |       // Sort by confidence
 171 |       patterns.sort((a, b) => b.confidence - a.confidence);
 172 | 
 173 |       // Cache results
 174 |       this.patternCache.set(cacheKey, patterns);
 175 | 
 176 |       this.emit("patterns_analyzed", {
 177 |         query: activeQuery,
 178 |         patterns: patterns.length,
 179 |         highConfidence: patterns.filter((p) => p.confidence > 0.7).length,
 180 |       });
 181 | 
 182 |       return patterns;
 183 |     } catch (error) {
 184 |       this.emit("analysis_error", {
 185 |         error: error instanceof Error ? error.message : String(error),
 186 |       });
 187 |       throw error;
 188 |     }
 189 |   }
 190 | 
 191 |   /**
 192 |    * Get temporal metrics for a time range
 193 |    */
 194 |   async getTemporalMetrics(query?: TemporalQuery): Promise<TemporalMetrics> {
 195 |     const defaultQuery: TemporalQuery = {
 196 |       granularity: "day",
 197 |       aggregation: "count",
 198 |       timeRange: this.getDefaultTimeRange(),
 199 |     };
 200 | 
 201 |     const activeQuery = { ...defaultQuery, ...query };
 202 |     const cacheKey = this.generateCacheKey("metrics", activeQuery);
 203 | 
 204 |     if (this.metricsCache.has(cacheKey)) {
 205 |       return this.metricsCache.get(cacheKey)!;
 206 |     }
 207 | 
 208 |     const timeSeries = await this.buildTimeSeries(activeQuery);
 209 | 
 210 |     // Calculate activity level
 211 |     const totalActivity = timeSeries.reduce(
 212 |       (sum, point) => sum + point.value,
 213 |       0,
 214 |     );
 215 |     const maxPossibleActivity =
 216 |       timeSeries.length * Math.max(...timeSeries.map((p) => p.value));
 217 |     const activityLevel =
 218 |       maxPossibleActivity > 0 ? totalActivity / maxPossibleActivity : 0;
 219 | 
 220 |     // Calculate growth rate
 221 |     const firstHalf = timeSeries.slice(0, Math.floor(timeSeries.length / 2));
 222 |     const secondHalf = timeSeries.slice(Math.floor(timeSeries.length / 2));
 223 |     const firstHalfAvg =
 224 |       firstHalf.reduce((sum, p) => sum + p.value, 0) / firstHalf.length;
 225 |     const secondHalfAvg =
 226 |       secondHalf.reduce((sum, p) => sum + p.value, 0) / secondHalf.length;
 227 |     const growthRate =
 228 |       firstHalfAvg > 0
 229 |         ? ((secondHalfAvg - firstHalfAvg) / firstHalfAvg) * 100
 230 |         : 0;
 231 | 
 232 |     // Find peak activity
 233 |     const peakPoint = timeSeries.reduce((max, point) =>
 234 |       point.value > max.value ? point : max,
 235 |     );
 236 |     const peakActivity = {
 237 |       timestamp: peakPoint.timestamp,
 238 |       count: peakPoint.value,
 239 |     };
 240 | 
 241 |     // Calculate average interval
 242 |     const intervals = [];
 243 |     for (let i = 1; i < timeSeries.length; i++) {
 244 |       intervals.push(
 245 |         timeSeries[i].timestamp.getTime() -
 246 |           timeSeries[i - 1].timestamp.getTime(),
 247 |       );
 248 |     }
 249 |     const averageInterval =
 250 |       intervals.length > 0
 251 |         ? intervals.reduce((sum, interval) => sum + interval, 0) /
 252 |           intervals.length
 253 |         : 0;
 254 | 
 255 |     // Calculate consistency (inverse of coefficient of variation)
 256 |     const values = timeSeries.map((p) => p.value);
 257 |     const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
 258 |     const variance =
 259 |       values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) /
 260 |       values.length;
 261 |     const stdDev = Math.sqrt(variance);
 262 |     const consistency = mean > 0 ? Math.max(0, 1 - stdDev / mean) : 0;
 263 | 
 264 |     // Calculate cyclical strength using autocorrelation
 265 |     const cyclicalStrength = this.calculateCyclicalStrength(values);
 266 | 
 267 |     const metrics: TemporalMetrics = {
 268 |       activityLevel,
 269 |       growthRate,
 270 |       peakActivity,
 271 |       averageInterval,
 272 |       consistency,
 273 |       cyclicalStrength,
 274 |     };
 275 | 
 276 |     this.metricsCache.set(cacheKey, metrics);
 277 |     return metrics;
 278 |   }
 279 | 
 280 |   /**
 281 |    * Make predictions based on temporal patterns
 282 |    */
 283 |   async predictFutureActivity(
 284 |     query?: TemporalQuery,
 285 |   ): Promise<PredictionResult> {
 286 |     const defaultQuery: TemporalQuery = {
 287 |       granularity: "day",
 288 |       aggregation: "count",
 289 |       timeRange: this.getDefaultTimeRange(),
 290 |     };
 291 | 
 292 |     const activeQuery = { ...defaultQuery, ...query };
 293 |     const cacheKey = this.generateCacheKey("predictions", activeQuery);
 294 | 
 295 |     if (this.predictionCache.has(cacheKey)) {
 296 |       return this.predictionCache.get(cacheKey)!;
 297 |     }
 298 | 
 299 |     // Get historical patterns and metrics
 300 |     const patterns = await this.analyzeTemporalPatterns(activeQuery);
 301 |     const metrics = await this.getTemporalMetrics(activeQuery);
 302 |     const timeSeries = await this.buildTimeSeries(activeQuery);
 303 | 
 304 |     // Predict next activity window
 305 |     const nextActivity = await this.predictNextActivity(
 306 |       timeSeries,
 307 |       patterns,
 308 |       metrics,
 309 |     );
 310 | 
 311 |     // Categorize trends
 312 |     const shortTermPatterns = patterns.filter((p) =>
 313 |       this.isShortTerm(p, activeQuery),
 314 |     );
 315 |     const longTermPatterns = patterns.filter((p) =>
 316 |       this.isLongTerm(p, activeQuery),
 317 |     );
 318 | 
 319 |     // Detect anomalies
 320 |     const anomalies = await this.detectAnomalies(timeSeries, patterns);
 321 | 
 322 |     // Generate recommendations
 323 |     const recommendations = this.generateRecommendations(
 324 |       patterns,
 325 |       metrics,
 326 |       anomalies,
 327 |     );
 328 | 
 329 |     const result: PredictionResult = {
 330 |       nextActivity,
 331 |       trends: {
 332 |         shortTerm: shortTermPatterns,
 333 |         longTerm: longTermPatterns,
 334 |       },
 335 |       anomalies,
 336 |       recommendations,
 337 |     };
 338 | 
 339 |     this.predictionCache.set(cacheKey, result);
 340 |     return result;
 341 |   }
 342 | 
 343 |   /**
 344 |    * Get temporal insights and actionable recommendations
 345 |    */
 346 |   async getTemporalInsights(query?: TemporalQuery): Promise<TemporalInsight[]> {
 347 |     const patterns = await this.analyzeTemporalPatterns(query);
 348 |     const metrics = await this.getTemporalMetrics(query);
 349 |     const predictions = await this.predictFutureActivity(query);
 350 | 
 351 |     const insights: TemporalInsight[] = [];
 352 | 
 353 |     // Pattern-based insights
 354 |     for (const pattern of patterns.filter((p) => p.confidence > 0.6)) {
 355 |       insights.push({
 356 |         type: "pattern",
 357 |         title: `${
 358 |           pattern.type.charAt(0).toUpperCase() + pattern.type.slice(1)
 359 |         } Pattern Detected`,
 360 |         description: pattern.description,
 361 |         confidence: pattern.confidence,
 362 |         timeframe: this.getPatternTimeframe(pattern),
 363 |         actionable: this.isActionablePattern(pattern),
 364 |         recommendations: this.getPatternRecommendations(pattern),
 365 |       });
 366 |     }
 367 | 
 368 |     // Trend insights
 369 |     if (metrics.growthRate > 20) {
 370 |       insights.push({
 371 |         type: "trend",
 372 |         title: "Increasing Activity Trend",
 373 |         description: `Memory activity has increased by ${metrics.growthRate.toFixed(
 374 |           1,
 375 |         )}% over the analysis period`,
 376 |         confidence: 0.8,
 377 |         timeframe: query?.timeRange || this.getDefaultTimeRange(),
 378 |         actionable: true,
 379 |         recommendations: [
 380 |           "Consider optimizing memory storage for increased load",
 381 |           "Monitor system performance as activity grows",
 382 |           "Evaluate current pruning policies",
 383 |         ],
 384 |       });
 385 |     }
 386 | 
 387 |     // Anomaly insights
 388 |     for (const anomaly of predictions.anomalies.filter(
 389 |       (a) => a.severity > 0.7,
 390 |     )) {
 391 |       insights.push({
 392 |         type: "anomaly",
 393 |         title: `${
 394 |           anomaly.type.charAt(0).toUpperCase() + anomaly.type.slice(1)
 395 |         } Anomaly`,
 396 |         description: anomaly.description,
 397 |         confidence: anomaly.severity,
 398 |         timeframe: {
 399 |           start: anomaly.timestamp,
 400 |           end: anomaly.timestamp,
 401 |           duration: 0,
 402 |           label: "Point Anomaly",
 403 |         },
 404 |         actionable: true,
 405 |         recommendations: this.getAnomalyRecommendations(anomaly),
 406 |       });
 407 |     }
 408 | 
 409 |     // Prediction insights
 410 |     if (predictions.nextActivity.probability > 0.7) {
 411 |       insights.push({
 412 |         type: "prediction",
 413 |         title: "High Probability Activity Window",
 414 |         description: `${(predictions.nextActivity.probability * 100).toFixed(
 415 |           1,
 416 |         )}% chance of ${predictions.nextActivity.expectedCount} activities`,
 417 |         confidence: predictions.nextActivity.confidence,
 418 |         timeframe: predictions.nextActivity.timeRange,
 419 |         actionable: true,
 420 |         recommendations: [
 421 |           "Prepare system for predicted activity surge",
 422 |           "Consider pre-emptive optimization",
 423 |           "Monitor resource utilization during predicted window",
 424 |         ],
 425 |       });
 426 |     }
 427 | 
 428 |     // Sort by confidence and actionability
 429 |     insights.sort((a, b) => {
 430 |       if (a.actionable !== b.actionable) {
 431 |         return a.actionable ? -1 : 1;
 432 |       }
 433 |       return b.confidence - a.confidence;
 434 |     });
 435 | 
 436 |     return insights;
 437 |   }
 438 | 
 439 |   /**
 440 |    * Build time series data from memory entries
 441 |    */
 442 |   private async buildTimeSeries(
 443 |     query: TemporalQuery,
 444 |   ): Promise<Array<{ timestamp: Date; value: number; metadata?: any }>> {
 445 |     const entries = await this.getFilteredEntries(query);
 446 |     const timeRange = query.timeRange || this.getDefaultTimeRange();
 447 | 
 448 |     // Create time buckets based on granularity
 449 |     const buckets = this.createTimeBuckets(timeRange, query.granularity);
 450 |     const timeSeries: Array<{
 451 |       timestamp: Date;
 452 |       value: number;
 453 |       metadata?: any;
 454 |     }> = [];
 455 | 
 456 |     for (const bucket of buckets) {
 457 |       const bucketEntries = entries.filter((entry) => {
 458 |         const entryTime = new Date(entry.timestamp);
 459 |         return entryTime >= bucket.start && entryTime < bucket.end;
 460 |       });
 461 | 
 462 |       let value = 0;
 463 |       const metadata: any = {};
 464 | 
 465 |       switch (query.aggregation) {
 466 |         case "count":
 467 |           value = bucketEntries.length;
 468 |           break;
 469 |         case "success_rate": {
 470 |           const successful = bucketEntries.filter(
 471 |             (e) => e.data.outcome === "success" || e.data.success === true,
 472 |           ).length;
 473 |           value =
 474 |             bucketEntries.length > 0 ? successful / bucketEntries.length : 0;
 475 |           break;
 476 |         }
 477 |         case "activity_level":
 478 |           // Custom metric based on entry types and interactions
 479 |           value = this.calculateActivityLevel(bucketEntries);
 480 |           break;
 481 |         case "diversity": {
 482 |           const uniqueTypes = new Set(bucketEntries.map((e) => e.type));
 483 |           value = uniqueTypes.size;
 484 |           break;
 485 |         }
 486 |       }
 487 | 
 488 |       // Add metadata
 489 |       metadata.entryCount = bucketEntries.length;
 490 |       metadata.types = [...new Set(bucketEntries.map((e) => e.type))];
 491 | 
 492 |       timeSeries.push({
 493 |         timestamp: bucket.start,
 494 |         value,
 495 |         metadata,
 496 |       });
 497 |     }
 498 | 
 499 |     // Apply smoothing if requested
 500 |     if (query.smoothing?.enabled) {
 501 |       return this.applySmoothingToTimeSeries(timeSeries, query.smoothing);
 502 |     }
 503 | 
 504 |     return timeSeries;
 505 |   }
 506 | 
 507 |   /**
 508 |    * Get filtered entries based on query
 509 |    */
 510 |   private async getFilteredEntries(
 511 |     query: TemporalQuery,
 512 |   ): Promise<MemoryEntry[]> {
 513 |     let entries = await this.storage.getAll();
 514 | 
 515 |     // Apply time range filter
 516 |     if (query.timeRange) {
 517 |       entries = entries.filter((entry) => {
 518 |         const entryTime = new Date(entry.timestamp);
 519 |         return (
 520 |           entryTime >= query.timeRange!.start &&
 521 |           entryTime <= query.timeRange!.end
 522 |         );
 523 |       });
 524 |     }
 525 | 
 526 |     // Apply filters
 527 |     if (query.filters) {
 528 |       if (query.filters.types) {
 529 |         entries = entries.filter((entry) =>
 530 |           query.filters!.types!.includes(entry.type),
 531 |         );
 532 |       }
 533 | 
 534 |       if (query.filters.projects) {
 535 |         entries = entries.filter((entry) =>
 536 |           query.filters!.projects!.some(
 537 |             (project) =>
 538 |               entry.data.projectPath?.includes(project) ||
 539 |               entry.data.projectId === project,
 540 |           ),
 541 |         );
 542 |       }
 543 | 
 544 |       if (query.filters.outcomes) {
 545 |         entries = entries.filter(
 546 |           (entry) =>
 547 |             query.filters!.outcomes!.includes(entry.data.outcome) ||
 548 |             (entry.data.success === true &&
 549 |               query.filters!.outcomes!.includes("success")) ||
 550 |             (entry.data.success === false &&
 551 |               query.filters!.outcomes!.includes("failure")),
 552 |         );
 553 |       }
 554 | 
 555 |       if (query.filters.tags) {
 556 |         entries = entries.filter(
 557 |           (entry) =>
 558 |             entry.tags?.some((tag) => query.filters!.tags!.includes(tag)),
 559 |         );
 560 |       }
 561 |     }
 562 | 
 563 |     return entries;
 564 |   }
 565 | 
 566 |   /**
 567 |    * Create time buckets for analysis
 568 |    */
 569 |   private createTimeBuckets(
 570 |     timeRange: TimeWindow,
 571 |     granularity: string,
 572 |   ): TimeWindow[] {
 573 |     const buckets: TimeWindow[] = [];
 574 |     let current = new Date(timeRange.start);
 575 |     const end = new Date(timeRange.end);
 576 | 
 577 |     while (current < end) {
 578 |       const bucketStart = new Date(current);
 579 |       let bucketEnd: Date;
 580 | 
 581 |       switch (granularity) {
 582 |         case "hour":
 583 |           bucketEnd = new Date(current.getTime() + 60 * 60 * 1000);
 584 |           break;
 585 |         case "day":
 586 |           bucketEnd = new Date(current.getTime() + 24 * 60 * 60 * 1000);
 587 |           break;
 588 |         case "week":
 589 |           bucketEnd = new Date(current.getTime() + 7 * 24 * 60 * 60 * 1000);
 590 |           break;
 591 |         case "month":
 592 |           bucketEnd = new Date(
 593 |             current.getFullYear(),
 594 |             current.getMonth() + 1,
 595 |             1,
 596 |           );
 597 |           break;
 598 |         case "year":
 599 |           bucketEnd = new Date(current.getFullYear() + 1, 0, 1);
 600 |           break;
 601 |         default:
 602 |           bucketEnd = new Date(current.getTime() + 24 * 60 * 60 * 1000);
 603 |       }
 604 | 
 605 |       if (bucketEnd > end) {
 606 |         bucketEnd = new Date(end);
 607 |       }
 608 | 
 609 |       buckets.push({
 610 |         start: bucketStart,
 611 |         end: bucketEnd,
 612 |         duration: bucketEnd.getTime() - bucketStart.getTime(),
 613 |         label: this.formatTimeLabel(bucketStart, granularity),
 614 |       });
 615 | 
 616 |       current = bucketEnd;
 617 |     }
 618 | 
 619 |     return buckets;
 620 |   }
 621 | 
 622 |   /**
 623 |    * Detect periodic patterns in time series
 624 |    */
 625 |   private async detectPeriodicPatterns(
 626 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
 627 |     query: TemporalQuery,
 628 |   ): Promise<TemporalPattern[]> {
 629 |     const patterns: TemporalPattern[] = [];
 630 |     const values = timeSeries.map((p) => p.value);
 631 | 
 632 |     // Check for different periods (daily, weekly, monthly cycles)
 633 |     const periods = [1, 7, 30, 365]; // days
 634 | 
 635 |     for (const period of periods) {
 636 |       const adjustedPeriod = this.adjustPeriodForGranularity(
 637 |         period,
 638 |         query.granularity,
 639 |       );
 640 |       if (adjustedPeriod >= values.length / 3) continue; // Need at least 3 cycles
 641 | 
 642 |       const correlation = this.calculateAutocorrelation(values, adjustedPeriod);
 643 | 
 644 |       if (correlation > 0.6) {
 645 |         patterns.push({
 646 |           type: "periodic",
 647 |           confidence: correlation,
 648 |           period: period * 24 * 60 * 60 * 1000, // Convert to milliseconds
 649 |           description: `${period}-${query.granularity} cycle detected with ${(
 650 |             correlation * 100
 651 |           ).toFixed(1)}% correlation`,
 652 |           dataPoints: timeSeries,
 653 |         });
 654 |       }
 655 |     }
 656 | 
 657 |     return patterns;
 658 |   }
 659 | 
 660 |   /**
 661 |    * Detect trend patterns
 662 |    */
 663 |   private async detectTrendPatterns(
 664 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
 665 |     _query: TemporalQuery,
 666 |   ): Promise<TemporalPattern[]> {
 667 |     const patterns: TemporalPattern[] = [];
 668 |     const values = timeSeries.map((p) => p.value);
 669 | 
 670 |     if (values.length < 5) return patterns;
 671 | 
 672 |     // Calculate linear regression
 673 |     const { slope, rSquared } = this.calculateLinearRegression(values);
 674 | 
 675 |     if (rSquared > 0.5) {
 676 |       // Good fit
 677 |       const trend =
 678 |         slope > 0.01 ? "increasing" : slope < -0.01 ? "decreasing" : "stable";
 679 | 
 680 |       if (trend !== "stable") {
 681 |         patterns.push({
 682 |           type: "trending",
 683 |           confidence: rSquared,
 684 |           trend,
 685 |           description: `${trend} trend detected with R² = ${rSquared.toFixed(
 686 |             3,
 687 |           )}`,
 688 |           dataPoints: timeSeries,
 689 |         });
 690 |       }
 691 |     }
 692 | 
 693 |     return patterns;
 694 |   }
 695 | 
 696 |   /**
 697 |    * Detect seasonal patterns
 698 |    */
 699 |   private async detectSeasonalPatterns(
 700 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
 701 |     query: TemporalQuery,
 702 |   ): Promise<TemporalPattern[]> {
 703 |     const patterns: TemporalPattern[] = [];
 704 | 
 705 |     // Check for daily patterns (hour of day)
 706 |     if (query.granularity === "hour") {
 707 |       const hourlyPattern = this.analyzeHourlyPattern(timeSeries);
 708 |       if (hourlyPattern.confidence > 0.6) {
 709 |         patterns.push({
 710 |           type: "seasonal",
 711 |           confidence: hourlyPattern.confidence,
 712 |           seasonality: "daily",
 713 |           description: `Daily pattern: peak activity at ${hourlyPattern.peakHour}:00`,
 714 |           dataPoints: timeSeries,
 715 |         });
 716 |       }
 717 |     }
 718 | 
 719 |     // Check for weekly patterns (day of week)
 720 |     if (["hour", "day"].includes(query.granularity)) {
 721 |       const weeklyPattern = this.analyzeWeeklyPattern(timeSeries);
 722 |       if (weeklyPattern.confidence > 0.6) {
 723 |         patterns.push({
 724 |           type: "seasonal",
 725 |           confidence: weeklyPattern.confidence,
 726 |           seasonality: "weekly",
 727 |           description: `Weekly pattern: peak activity on ${weeklyPattern.peakDay}`,
 728 |           dataPoints: timeSeries,
 729 |         });
 730 |       }
 731 |     }
 732 | 
 733 |     return patterns;
 734 |   }
 735 | 
 736 |   /**
 737 |    * Detect burst patterns (sudden spikes)
 738 |    */
 739 |   private async detectBurstPatterns(
 740 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
 741 |     _query: TemporalQuery,
 742 |   ): Promise<TemporalPattern[]> {
 743 |     const patterns: TemporalPattern[] = [];
 744 |     const values = timeSeries.map((p) => p.value);
 745 | 
 746 |     const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
 747 |     const stdDev = Math.sqrt(
 748 |       values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) /
 749 |         values.length,
 750 |     );
 751 | 
 752 |     const threshold = mean + 2 * stdDev; // 2 standard deviations above mean
 753 | 
 754 |     const bursts = [];
 755 |     for (let i = 0; i < values.length; i++) {
 756 |       if (values[i] > threshold) {
 757 |         bursts.push(i);
 758 |       }
 759 |     }
 760 | 
 761 |     if (bursts.length > 0 && bursts.length < values.length * 0.1) {
 762 |       // Bursts are rare
 763 |       const confidence = Math.min(0.9, bursts.length / (values.length * 0.05));
 764 | 
 765 |       patterns.push({
 766 |         type: "burst",
 767 |         confidence,
 768 |         description: `${bursts.length} burst events detected (${(
 769 |           threshold / mean
 770 |         ).toFixed(1)}x normal activity)`,
 771 |         dataPoints: bursts.map((i) => timeSeries[i]),
 772 |       });
 773 |     }
 774 | 
 775 |     return patterns;
 776 |   }
 777 | 
 778 |   /**
 779 |    * Detect decay patterns (gradual decline)
 780 |    */
 781 |   private async detectDecayPatterns(
 782 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
 783 |     _query: TemporalQuery,
 784 |   ): Promise<TemporalPattern[]> {
 785 |     const patterns: TemporalPattern[] = [];
 786 |     const values = timeSeries.map((p) => p.value);
 787 | 
 788 |     if (values.length < 10) return patterns;
 789 | 
 790 |     // Look for exponential decay pattern
 791 |     const logValues = values.map((v) => Math.log(Math.max(v, 0.1))); // Avoid log(0)
 792 |     const { slope, rSquared } = this.calculateLinearRegression(logValues);
 793 | 
 794 |     if (slope < -0.05 && rSquared > 0.7) {
 795 |       // Significant decay with good fit
 796 |       patterns.push({
 797 |         type: "decay",
 798 |         confidence: rSquared,
 799 |         description: `Exponential decay detected (half-life ≈ ${(
 800 |           -0.693 / slope
 801 |         ).toFixed(1)} periods)`,
 802 |         dataPoints: timeSeries,
 803 |       });
 804 |     }
 805 | 
 806 |     return patterns;
 807 |   }
 808 | 
 809 |   /**
 810 |    * Calculate activity level for a set of entries
 811 |    */
 812 |   private calculateActivityLevel(entries: MemoryEntry[]): number {
 813 |     if (entries.length === 0) return 0;
 814 | 
 815 |     let score = 0;
 816 | 
 817 |     // Base score from count
 818 |     score += Math.min(1, entries.length / 10); // Cap at 10 entries = 1.0
 819 | 
 820 |     // Bonus for diversity
 821 |     const uniqueTypes = new Set(entries.map((e) => e.type));
 822 |     score += uniqueTypes.size * 0.1;
 823 | 
 824 |     // Bonus for successful outcomes
 825 |     const successful = entries.filter(
 826 |       (e) => e.data.outcome === "success" || e.data.success === true,
 827 |     ).length;
 828 |     score += (successful / entries.length) * 0.3;
 829 | 
 830 |     return Math.min(1, score);
 831 |   }
 832 | 
 833 |   /**
 834 |    * Apply smoothing to time series data
 835 |    */
 836 |   private applySmoothingToTimeSeries(
 837 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
 838 |     smoothing: { method: string; window: number },
 839 |   ): Array<{ timestamp: Date; value: number; metadata?: any }> {
 840 |     const values = timeSeries.map((p) => p.value);
 841 |     let smoothedValues: number[];
 842 | 
 843 |     switch (smoothing.method) {
 844 |       case "moving_average":
 845 |         smoothedValues = this.applyMovingAverage(values, smoothing.window);
 846 |         break;
 847 |       case "exponential":
 848 |         smoothedValues = this.applyExponentialSmoothing(values, 0.3);
 849 |         break;
 850 |       default:
 851 |         smoothedValues = values;
 852 |     }
 853 | 
 854 |     return timeSeries.map((point, i) => ({
 855 |       ...point,
 856 |       value: smoothedValues[i],
 857 |     }));
 858 |   }
 859 | 
 860 |   /**
 861 |    * Apply moving average smoothing
 862 |    */
 863 |   private applyMovingAverage(values: number[], window: number): number[] {
 864 |     const smoothed: number[] = [];
 865 | 
 866 |     for (let i = 0; i < values.length; i++) {
 867 |       const start = Math.max(0, i - Math.floor(window / 2));
 868 |       const end = Math.min(values.length, i + Math.ceil(window / 2));
 869 |       const windowValues = values.slice(start, end);
 870 |       const average =
 871 |         windowValues.reduce((sum, val) => sum + val, 0) / windowValues.length;
 872 |       smoothed.push(average);
 873 |     }
 874 | 
 875 |     return smoothed;
 876 |   }
 877 | 
 878 |   /**
 879 |    * Apply exponential smoothing
 880 |    */
 881 |   private applyExponentialSmoothing(values: number[], alpha: number): number[] {
 882 |     const smoothed: number[] = [values[0]];
 883 | 
 884 |     for (let i = 1; i < values.length; i++) {
 885 |       smoothed.push(alpha * values[i] + (1 - alpha) * smoothed[i - 1]);
 886 |     }
 887 | 
 888 |     return smoothed;
 889 |   }
 890 | 
 891 |   /**
 892 |    * Calculate autocorrelation for periodic pattern detection
 893 |    */
 894 |   private calculateAutocorrelation(values: number[], lag: number): number {
 895 |     if (lag >= values.length) return 0;
 896 | 
 897 |     const n = values.length - lag;
 898 |     const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
 899 | 
 900 |     let numerator = 0;
 901 |     let denominator = 0;
 902 | 
 903 |     for (let i = 0; i < n; i++) {
 904 |       numerator += (values[i] - mean) * (values[i + lag] - mean);
 905 |     }
 906 | 
 907 |     for (let i = 0; i < values.length; i++) {
 908 |       denominator += Math.pow(values[i] - mean, 2);
 909 |     }
 910 | 
 911 |     return denominator > 0 ? numerator / denominator : 0;
 912 |   }
 913 | 
 914 |   /**
 915 |    * Calculate cyclical strength using autocorrelation
 916 |    */
 917 |   private calculateCyclicalStrength(values: number[]): number {
 918 |     const maxLag = Math.min(values.length / 3, 30);
 919 |     let maxCorrelation = 0;
 920 | 
 921 |     for (let lag = 1; lag < maxLag; lag++) {
 922 |       const correlation = Math.abs(this.calculateAutocorrelation(values, lag));
 923 |       maxCorrelation = Math.max(maxCorrelation, correlation);
 924 |     }
 925 | 
 926 |     return maxCorrelation;
 927 |   }
 928 | 
 929 |   /**
 930 |    * Calculate linear regression
 931 |    */
 932 |   private calculateLinearRegression(values: number[]): {
 933 |     slope: number;
 934 |     intercept: number;
 935 |     rSquared: number;
 936 |   } {
 937 |     const n = values.length;
 938 |     const x = Array.from({ length: n }, (_, i) => i);
 939 | 
 940 |     const sumX = x.reduce((sum, val) => sum + val, 0);
 941 |     const sumY = values.reduce((sum, val) => sum + val, 0);
 942 |     const sumXY = x.reduce((sum, val, i) => sum + val * values[i], 0);
 943 |     const sumXX = x.reduce((sum, val) => sum + val * val, 0);
 944 | 
 945 |     const slope = (n * sumXY - sumX * sumY) / (n * sumXX - sumX * sumX);
 946 |     const intercept = (sumY - slope * sumX) / n;
 947 | 
 948 |     // Calculate R²
 949 |     const meanY = sumY / n;
 950 |     const ssRes = values.reduce((sum, val, i) => {
 951 |       const predicted = slope * i + intercept;
 952 |       return sum + Math.pow(val - predicted, 2);
 953 |     }, 0);
 954 |     const ssTot = values.reduce(
 955 |       (sum, val) => sum + Math.pow(val - meanY, 2),
 956 |       0,
 957 |     );
 958 |     const rSquared = ssTot > 0 ? 1 - ssRes / ssTot : 0;
 959 | 
 960 |     return { slope, intercept, rSquared };
 961 |   }
 962 | 
 963 |   /**
 964 |    * Predict next activity window
 965 |    */
 966 |   private async predictNextActivity(
 967 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
 968 |     patterns: TemporalPattern[],
 969 |     _metrics: TemporalMetrics,
 970 |   ): Promise<PredictionResult["nextActivity"]> {
 971 |     const lastPoint = timeSeries[timeSeries.length - 1];
 972 |     const averageValue =
 973 |       timeSeries.reduce((sum, p) => sum + p.value, 0) / timeSeries.length;
 974 | 
 975 |     // Base prediction on recent trend
 976 |     let expectedCount = averageValue;
 977 |     let probability = 0.5;
 978 | 
 979 |     // Adjust based on trends
 980 |     const trendPattern = patterns.find((p) => p.type === "trending");
 981 |     if (trendPattern && trendPattern.trend === "increasing") {
 982 |       expectedCount *= 1.2;
 983 |       probability += 0.2;
 984 |     } else if (trendPattern && trendPattern.trend === "decreasing") {
 985 |       expectedCount *= 0.8;
 986 |       probability -= 0.1;
 987 |     }
 988 | 
 989 |     // Adjust based on periodic patterns
 990 |     const periodicPattern = patterns.find(
 991 |       (p) => p.type === "periodic" && p.confidence > 0.7,
 992 |     );
 993 |     if (periodicPattern) {
 994 |       probability += 0.3;
 995 |     }
 996 | 
 997 |     // Determine time range for next activity (next period based on granularity)
 998 |     const nextStart = new Date(
 999 |       lastPoint.timestamp.getTime() + 24 * 60 * 60 * 1000,
1000 |     ); // Next day
1001 |     const nextEnd = new Date(nextStart.getTime() + 24 * 60 * 60 * 1000);
1002 | 
1003 |     return {
1004 |       probability: Math.min(0.95, Math.max(0.05, probability)),
1005 |       timeRange: {
1006 |         start: nextStart,
1007 |         end: nextEnd,
1008 |         duration: 24 * 60 * 60 * 1000,
1009 |         label: "Next 24 hours",
1010 |       },
1011 |       expectedCount: Math.round(expectedCount),
1012 |       confidence: Math.min(
1013 |         0.9,
1014 |         patterns.reduce((sum, p) => sum + p.confidence, 0) / patterns.length,
1015 |       ),
1016 |     };
1017 |   }
1018 | 
1019 |   /**
1020 |    * Detect anomalies in time series
1021 |    */
1022 |   private async detectAnomalies(
1023 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
1024 |     _patterns: TemporalPattern[],
1025 |   ): Promise<PredictionResult["anomalies"]> {
1026 |     const anomalies: PredictionResult["anomalies"] = [];
1027 |     const values = timeSeries.map((p) => p.value);
1028 | 
1029 |     const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
1030 |     const stdDev = Math.sqrt(
1031 |       values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) /
1032 |         values.length,
1033 |     );
1034 | 
1035 |     for (let i = 0; i < timeSeries.length; i++) {
1036 |       const point = timeSeries[i];
1037 |       const value = point.value;
1038 | 
1039 |       // Spike detection
1040 |       if (value > mean + 3 * stdDev) {
1041 |         anomalies.push({
1042 |           timestamp: point.timestamp,
1043 |           type: "spike",
1044 |           severity: Math.min(1, (value - mean) / (3 * stdDev)),
1045 |           description: `Activity spike: ${value} (${(
1046 |             (value / mean - 1) *
1047 |             100
1048 |           ).toFixed(0)}% above normal)`,
1049 |         });
1050 |       }
1051 | 
1052 |       // Drought detection
1053 |       if (value < mean - 2 * stdDev && mean > 1) {
1054 |         anomalies.push({
1055 |           timestamp: point.timestamp,
1056 |           type: "drought",
1057 |           severity: Math.min(1, (mean - value) / (2 * stdDev)),
1058 |           description: `Activity drought: ${value} (${(
1059 |             (1 - value / mean) *
1060 |             100
1061 |           ).toFixed(0)}% below normal)`,
1062 |         });
1063 |       }
1064 |     }
1065 | 
1066 |     // Detect regime shifts (significant changes in mean)
1067 |     const shifts = this.detectRegimeShifts(timeSeries);
1068 |     anomalies.push(...shifts);
1069 | 
1070 |     return anomalies.sort((a, b) => b.severity - a.severity);
1071 |   }
1072 | 
1073 |   /**
1074 |    * Detect regime shifts in time series
1075 |    */
1076 |   private detectRegimeShifts(
1077 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
1078 |   ): Array<{
1079 |     timestamp: Date;
1080 |     type: "shift";
1081 |     severity: number;
1082 |     description: string;
1083 |   }> {
1084 |     const shifts: Array<{
1085 |       timestamp: Date;
1086 |       type: "shift";
1087 |       severity: number;
1088 |       description: string;
1089 |     }> = [];
1090 |     const values = timeSeries.map((p) => p.value);
1091 | 
1092 |     if (values.length < 20) return shifts; // Need sufficient data
1093 | 
1094 |     const windowSize = Math.floor(values.length / 4);
1095 | 
1096 |     for (let i = windowSize; i < values.length - windowSize; i++) {
1097 |       const before = values.slice(i - windowSize, i);
1098 |       const after = values.slice(i, i + windowSize);
1099 | 
1100 |       const meanBefore =
1101 |         before.reduce((sum, val) => sum + val, 0) / before.length;
1102 |       const meanAfter = after.reduce((sum, val) => sum + val, 0) / after.length;
1103 | 
1104 |       const changeMagnitude = Math.abs(meanAfter - meanBefore);
1105 |       const relativeChange = meanBefore > 0 ? changeMagnitude / meanBefore : 0;
1106 | 
1107 |       if (relativeChange > 0.5) {
1108 |         // 50% change
1109 |         shifts.push({
1110 |           timestamp: timeSeries[i].timestamp,
1111 |           type: "shift",
1112 |           severity: Math.min(1, relativeChange),
1113 |           description: `Regime shift: ${meanBefore.toFixed(
1114 |             1,
1115 |           )} → ${meanAfter.toFixed(1)} (${(relativeChange * 100).toFixed(
1116 |             0,
1117 |           )}% change)`,
1118 |         });
1119 |       }
1120 |     }
1121 | 
1122 |     return shifts;
1123 |   }
1124 | 
1125 |   /**
1126 |    * Generate recommendations based on analysis
1127 |    */
1128 |   private generateRecommendations(
1129 |     patterns: TemporalPattern[],
1130 |     metrics: TemporalMetrics,
1131 |     anomalies: PredictionResult["anomalies"],
1132 |   ): string[] {
1133 |     const recommendations: string[] = [];
1134 | 
1135 |     // Pattern-based recommendations
1136 |     const periodicPattern = patterns.find(
1137 |       (p) => p.type === "periodic" && p.confidence > 0.7,
1138 |     );
1139 |     if (periodicPattern) {
1140 |       recommendations.push(
1141 |         "Schedule maintenance and optimizations during low-activity periods based on detected cycles",
1142 |       );
1143 |     }
1144 | 
1145 |     const trendPattern = patterns.find((p) => p.type === "trending");
1146 |     if (trendPattern?.trend === "increasing") {
1147 |       recommendations.push(
1148 |         "Plan for increased storage and processing capacity based on growing activity trend",
1149 |       );
1150 |     } else if (trendPattern?.trend === "decreasing") {
1151 |       recommendations.push(
1152 |         "Investigate causes of declining activity and consider engagement strategies",
1153 |       );
1154 |     }
1155 | 
1156 |     // Metrics-based recommendations
1157 |     if (metrics.consistency < 0.5) {
1158 |       recommendations.push(
1159 |         "High variability detected - consider implementing activity smoothing mechanisms",
1160 |       );
1161 |     }
1162 | 
1163 |     if (metrics.growthRate > 50) {
1164 |       recommendations.push(
1165 |         "Rapid growth detected - implement proactive scaling measures",
1166 |       );
1167 |     }
1168 | 
1169 |     // Anomaly-based recommendations
1170 |     const spikes = anomalies.filter(
1171 |       (a) => a.type === "spike" && a.severity > 0.7,
1172 |     );
1173 |     if (spikes.length > 0) {
1174 |       recommendations.push(
1175 |         "Implement burst handling to manage activity spikes effectively",
1176 |       );
1177 |     }
1178 | 
1179 |     const droughts = anomalies.filter(
1180 |       (a) => a.type === "drought" && a.severity > 0.7,
1181 |     );
1182 |     if (droughts.length > 0) {
1183 |       recommendations.push(
1184 |         "Investigate causes of activity droughts and implement retention strategies",
1185 |       );
1186 |     }
1187 | 
1188 |     return recommendations;
1189 |   }
1190 | 
1191 |   /**
1192 |    * Utility methods
1193 |    */
1194 |   private getDefaultTimeRange(): TimeWindow {
1195 |     const end = new Date();
1196 |     const start = new Date(end.getTime() - 30 * 24 * 60 * 60 * 1000); // 30 days ago
1197 | 
1198 |     return {
1199 |       start,
1200 |       end,
1201 |       duration: end.getTime() - start.getTime(),
1202 |       label: "Last 30 days",
1203 |     };
1204 |   }
1205 | 
1206 |   private generateCacheKey(type: string, query: TemporalQuery): string {
1207 |     return `${type}_${JSON.stringify(query)}`;
1208 |   }
1209 | 
1210 |   private adjustPeriodForGranularity(
1211 |     period: number,
1212 |     granularity: string,
1213 |   ): number {
1214 |     switch (granularity) {
1215 |       case "hour":
1216 |         return period * 24;
1217 |       case "day":
1218 |         return period;
1219 |       case "week":
1220 |         return Math.ceil(period / 7);
1221 |       case "month":
1222 |         return Math.ceil(period / 30);
1223 |       case "year":
1224 |         return Math.ceil(period / 365);
1225 |       default:
1226 |         return period;
1227 |     }
1228 |   }
1229 | 
1230 |   private formatTimeLabel(date: Date, granularity: string): string {
1231 |     switch (granularity) {
1232 |       case "hour":
1233 |         return date.toISOString().slice(0, 13) + ":00";
1234 |       case "day":
1235 |         return date.toISOString().slice(0, 10);
1236 |       case "week":
1237 |         return `Week of ${date.toISOString().slice(0, 10)}`;
1238 |       case "month":
1239 |         return `${date.getFullYear()}-${(date.getMonth() + 1)
1240 |           .toString()
1241 |           .padStart(2, "0")}`;
1242 |       case "year":
1243 |         return date.getFullYear().toString();
1244 |       default:
1245 |         return date.toISOString().slice(0, 10);
1246 |     }
1247 |   }
1248 | 
1249 |   private isShortTerm(
1250 |     pattern: TemporalPattern,
1251 |     _query: TemporalQuery,
1252 |   ): boolean {
1253 |     if (pattern.period) {
1254 |       const days = pattern.period / (24 * 60 * 60 * 1000);
1255 |       return days <= 7;
1256 |     }
1257 |     return true;
1258 |   }
1259 | 
1260 |   private isLongTerm(pattern: TemporalPattern, _query: TemporalQuery): boolean {
1261 |     if (pattern.period) {
1262 |       const days = pattern.period / (24 * 60 * 60 * 1000);
1263 |       return days > 30;
1264 |     }
1265 |     return false;
1266 |   }
1267 | 
1268 |   private getPatternTimeframe(pattern: TemporalPattern): TimeWindow {
1269 |     if (pattern.dataPoints.length > 0) {
1270 |       const start = pattern.dataPoints[0].timestamp;
1271 |       const end = pattern.dataPoints[pattern.dataPoints.length - 1].timestamp;
1272 |       return {
1273 |         start,
1274 |         end,
1275 |         duration: end.getTime() - start.getTime(),
1276 |         label: `${start.toISOString().slice(0, 10)} to ${end
1277 |           .toISOString()
1278 |           .slice(0, 10)}`,
1279 |       };
1280 |     }
1281 |     return this.getDefaultTimeRange();
1282 |   }
1283 | 
1284 |   private isActionablePattern(pattern: TemporalPattern): boolean {
1285 |     return (
1286 |       pattern.confidence > 0.7 &&
1287 |       ["periodic", "trending", "seasonal"].includes(pattern.type)
1288 |     );
1289 |   }
1290 | 
1291 |   private getPatternRecommendations(pattern: TemporalPattern): string[] {
1292 |     const recommendations: string[] = [];
1293 | 
1294 |     switch (pattern.type) {
1295 |       case "periodic":
1296 |         recommendations.push(
1297 |           "Schedule regular maintenance during low-activity periods",
1298 |         );
1299 |         recommendations.push(
1300 |           "Optimize resource allocation based on predictable cycles",
1301 |         );
1302 |         break;
1303 |       case "trending":
1304 |         if (pattern.trend === "increasing") {
1305 |           recommendations.push("Plan for capacity expansion");
1306 |           recommendations.push("Implement proactive monitoring");
1307 |         } else if (pattern.trend === "decreasing") {
1308 |           recommendations.push("Investigate root causes of decline");
1309 |           recommendations.push("Consider engagement interventions");
1310 |         }
1311 |         break;
1312 |       case "seasonal":
1313 |         recommendations.push(
1314 |           "Adjust system configuration for seasonal patterns",
1315 |         );
1316 |         recommendations.push(
1317 |           "Plan marketing and engagement around peak periods",
1318 |         );
1319 |         break;
1320 |     }
1321 | 
1322 |     return recommendations;
1323 |   }
1324 | 
1325 |   private getAnomalyRecommendations(anomaly: {
1326 |     type: string;
1327 |     severity: number;
1328 |   }): string[] {
1329 |     const recommendations: string[] = [];
1330 | 
1331 |     switch (anomaly.type) {
1332 |       case "spike":
1333 |         recommendations.push("Implement burst protection mechanisms");
1334 |         recommendations.push("Investigate spike triggers for prevention");
1335 |         recommendations.push("Consider auto-scaling capabilities");
1336 |         break;
1337 |       case "drought":
1338 |         recommendations.push("Implement activity monitoring alerts");
1339 |         recommendations.push("Investigate user engagement issues");
1340 |         recommendations.push("Consider proactive outreach strategies");
1341 |         break;
1342 |       case "shift":
1343 |         recommendations.push("Investigate underlying system changes");
1344 |         recommendations.push("Update baseline metrics and thresholds");
1345 |         recommendations.push("Review configuration changes during this period");
1346 |         break;
1347 |     }
1348 | 
1349 |     return recommendations;
1350 |   }
1351 | 
1352 |   private analyzeHourlyPattern(
1353 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
1354 |   ): { confidence: number; peakHour: number } {
1355 |     const hourlyActivity = new Array(24).fill(0);
1356 |     const hourlyCounts = new Array(24).fill(0);
1357 | 
1358 |     for (const point of timeSeries) {
1359 |       const hour = point.timestamp.getHours();
1360 |       hourlyActivity[hour] += point.value;
1361 |       hourlyCounts[hour]++;
1362 |     }
1363 | 
1364 |     // Calculate average activity per hour
1365 |     const hourlyAverages = hourlyActivity.map((total, i) =>
1366 |       hourlyCounts[i] > 0 ? total / hourlyCounts[i] : 0,
1367 |     );
1368 | 
1369 |     // Find peak hour
1370 |     const peakHour = hourlyAverages.indexOf(Math.max(...hourlyAverages));
1371 | 
1372 |     // Calculate confidence based on variance
1373 |     const mean = hourlyAverages.reduce((sum, val) => sum + val, 0) / 24;
1374 |     const variance =
1375 |       hourlyAverages.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) /
1376 |       24;
1377 |     const stdDev = Math.sqrt(variance);
1378 |     const confidence = mean > 0 ? Math.min(0.9, stdDev / mean) : 0;
1379 | 
1380 |     return { confidence, peakHour };
1381 |   }
1382 | 
1383 |   private analyzeWeeklyPattern(
1384 |     timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>,
1385 |   ): { confidence: number; peakDay: string } {
1386 |     const weeklyActivity = new Array(7).fill(0);
1387 |     const weeklyCounts = new Array(7).fill(0);
1388 |     const dayNames = [
1389 |       "Sunday",
1390 |       "Monday",
1391 |       "Tuesday",
1392 |       "Wednesday",
1393 |       "Thursday",
1394 |       "Friday",
1395 |       "Saturday",
1396 |     ];
1397 | 
1398 |     for (const point of timeSeries) {
1399 |       const day = point.timestamp.getDay();
1400 |       weeklyActivity[day] += point.value;
1401 |       weeklyCounts[day]++;
1402 |     }
1403 | 
1404 |     // Calculate average activity per day
1405 |     const weeklyAverages = weeklyActivity.map((total, i) =>
1406 |       weeklyCounts[i] > 0 ? total / weeklyCounts[i] : 0,
1407 |     );
1408 | 
1409 |     // Find peak day
1410 |     const peakDayIndex = weeklyAverages.indexOf(Math.max(...weeklyAverages));
1411 |     const peakDay = dayNames[peakDayIndex];
1412 | 
1413 |     // Calculate confidence
1414 |     const mean = weeklyAverages.reduce((sum, val) => sum + val, 0) / 7;
1415 |     const variance =
1416 |       weeklyAverages.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / 7;
1417 |     const stdDev = Math.sqrt(variance);
1418 |     const confidence = mean > 0 ? Math.min(0.9, stdDev / mean) : 0;
1419 | 
1420 |     return { confidence, peakDay };
1421 |   }
1422 | 
1423 |   /**
1424 |    * Setup periodic analysis
1425 |    */
1426 |   private setupPeriodicAnalysis(): void {
1427 |     // Run analysis every 6 hours
1428 |     setInterval(
1429 |       async () => {
1430 |         try {
1431 |           const insights = await this.getTemporalInsights();
1432 |           this.emit("periodic_analysis_completed", {
1433 |             insights: insights.length,
1434 |           });
1435 |         } catch (error) {
1436 |           this.emit("periodic_analysis_error", {
1437 |             error: error instanceof Error ? error.message : String(error),
1438 |           });
1439 |         }
1440 |       },
1441 |       6 * 60 * 60 * 1000,
1442 |     );
1443 |   }
1444 | }
1445 | 
```

--------------------------------------------------------------------------------
/tests/tools/validate-content.test.ts:
--------------------------------------------------------------------------------

```typescript
   1 | import { describe, it, expect, beforeEach, afterEach } from "@jest/globals";
   2 | import * as fs from "fs/promises";
   3 | import * as path from "path";
   4 | import {
   5 |   handleValidateDiataxisContent,
   6 |   validateGeneralContent,
   7 | } from "../../src/tools/validate-content.js";
   8 | import { ValidationResult } from "../../src/tools/validate-content.js";
   9 | 
  10 | describe("Content Validation Tool", () => {
  11 |   const testTempDir = path.join(__dirname, "../../.tmp/test-validation");
  12 | 
  13 |   beforeEach(async () => {
  14 |     // Create test directory
  15 |     await fs.mkdir(testTempDir, { recursive: true });
  16 |   });
  17 | 
  18 |   afterEach(async () => {
  19 |     // Clean up test directory
  20 |     try {
  21 |       await fs.rm(testTempDir, { recursive: true });
  22 |     } catch {
  23 |       // Ignore cleanup errors
  24 |     }
  25 |   });
  26 | 
  27 |   describe("Application Code Validation", () => {
  28 |     it("should detect application code path correctly", async () => {
  29 |       // Create mock application structure
  30 |       const appDir = path.join(testTempDir, "mock-app");
  31 |       await fs.mkdir(appDir, { recursive: true });
  32 |       await fs.mkdir(path.join(appDir, "src"), { recursive: true });
  33 |       await fs.writeFile(
  34 |         path.join(appDir, "package.json"),
  35 |         '{"name": "test-app"}',
  36 |       );
  37 | 
  38 |       // Create TypeScript file without documentation
  39 |       const tsFile = path.join(appDir, "src", "index.ts");
  40 |       await fs.writeFile(
  41 |         tsFile,
  42 |         `
  43 | export function undocumentedFunction(param: string): string {
  44 |   return param.toUpperCase();
  45 | }
  46 | 
  47 | export const anotherFunction = (value: number) => {
  48 |   if (value < 0) {
  49 |     throw new Error('Invalid value');
  50 |   }
  51 |   return value * 2;
  52 | };
  53 |       `.trim(),
  54 |       );
  55 | 
  56 |       const result = await handleValidateDiataxisContent({
  57 |         contentPath: appDir,
  58 |         validationType: "compliance",
  59 |         includeCodeValidation: true,
  60 |       });
  61 | 
  62 |       expect(result).toBeDefined();
  63 |       expect(result.issues).toBeDefined();
  64 | 
  65 |       // Should find issues with undocumented exported functions
  66 |       const undocumentedIssues = result.issues.filter((issue) =>
  67 |         issue.description.includes("lacks documentation"),
  68 |       );
  69 |       expect(undocumentedIssues.length).toBeGreaterThan(0);
  70 | 
  71 |       // Should find issues with undocumented error throwing
  72 |       const errorDocIssues = result.issues.filter((issue) =>
  73 |         issue.description.includes(
  74 |           "Error throwing code found without error documentation",
  75 |         ),
  76 |       );
  77 |       expect(errorDocIssues.length).toBeGreaterThan(0);
  78 |     });
  79 | 
  80 |     it("should validate application architecture structure", async () => {
  81 |       // Create mock application with missing directories
  82 |       const appDir = path.join(testTempDir, "incomplete-app");
  83 |       await fs.mkdir(appDir, { recursive: true });
  84 |       await fs.writeFile(
  85 |         path.join(appDir, "package.json"),
  86 |         '{"name": "incomplete-app"}',
  87 |       );
  88 | 
  89 |       // Missing tools and types directories
  90 |       await fs.mkdir(path.join(appDir, "src"), { recursive: true });
  91 |       await fs.writeFile(
  92 |         path.join(appDir, "src", "index.ts"),
  93 |         'export const app = "test";',
  94 |       );
  95 | 
  96 |       const result = await handleValidateDiataxisContent({
  97 |         contentPath: appDir,
  98 |         validationType: "compliance",
  99 |         includeCodeValidation: false,
 100 |       });
 101 | 
 102 |       const structureIssues = result.issues.filter(
 103 |         (issue) => issue.location.file === "application structure",
 104 |       );
 105 |       expect(structureIssues.length).toBeGreaterThan(0);
 106 | 
 107 |       // Should suggest missing tools directory
 108 |       const toolsIssue = structureIssues.find((issue) =>
 109 |         issue.description.includes("tools directory"),
 110 |       );
 111 |       expect(toolsIssue).toBeDefined();
 112 |     });
 113 | 
 114 |     it("should validate README structure", async () => {
 115 |       const appDir = path.join(testTempDir, "readme-test");
 116 |       await fs.mkdir(appDir, { recursive: true });
 117 |       await fs.mkdir(path.join(appDir, "src"), { recursive: true });
 118 |       await fs.writeFile(
 119 |         path.join(appDir, "package.json"),
 120 |         '{"name": "readme-test"}',
 121 |       );
 122 |       await fs.writeFile(
 123 |         path.join(appDir, "src", "index.ts"),
 124 |         'export const app = "test";',
 125 |       );
 126 | 
 127 |       // Create README with missing sections
 128 |       await fs.writeFile(
 129 |         path.join(appDir, "README.md"),
 130 |         `
 131 | This is a project without proper structure.
 132 | Some description here.
 133 |       `.trim(),
 134 |       );
 135 | 
 136 |       const result = await handleValidateDiataxisContent({
 137 |         contentPath: appDir,
 138 |         validationType: "compliance",
 139 |         includeCodeValidation: false,
 140 |       });
 141 | 
 142 |       // Application validation should find issues
 143 | 
 144 |       const readmeIssues = result.issues.filter(
 145 |         (issue) => issue.location.file === "README.md",
 146 |       );
 147 |       expect(readmeIssues.length).toBeGreaterThan(0);
 148 | 
 149 |       // Should find issues with README structure
 150 |       const structureIssue = readmeIssues.find((issue) =>
 151 |         issue.description.includes("lacks essential sections"),
 152 |       );
 153 |       expect(structureIssue).toBeDefined();
 154 |     });
 155 | 
 156 |     it("should detect properly documented functions", async () => {
 157 |       const appDir = path.join(testTempDir, "documented-app");
 158 |       await fs.mkdir(appDir, { recursive: true });
 159 |       await fs.mkdir(path.join(appDir, "src"), { recursive: true });
 160 |       await fs.writeFile(
 161 |         path.join(appDir, "package.json"),
 162 |         '{"name": "documented-app"}',
 163 |       );
 164 | 
 165 |       // Create well-documented TypeScript file
 166 |       const tsFile = path.join(appDir, "src", "documented.ts");
 167 |       await fs.writeFile(
 168 |         tsFile,
 169 |         `
 170 | /**
 171 |  * Converts a string to uppercase
 172 |  * @param param - The input string
 173 |  * @returns The uppercase string
 174 |  */
 175 | export function documentedFunction(param: string): string {
 176 |   return param.toUpperCase();
 177 | }
 178 | 
 179 | /**
 180 |  * Doubles a positive number
 181 |  * @param value - The input number (must be positive)
 182 |  * @returns The doubled value
 183 |  * @throws {Error} When value is negative
 184 |  */
 185 | export const wellDocumentedFunction = (value: number) => {
 186 |   if (value < 0) {
 187 |     throw new Error('Invalid value');
 188 |   }
 189 |   return value * 2;
 190 | };
 191 |       `.trim(),
 192 |       );
 193 | 
 194 |       const result = await handleValidateDiataxisContent({
 195 |         contentPath: appDir,
 196 |         validationType: "compliance",
 197 |         includeCodeValidation: true,
 198 |       });
 199 | 
 200 |       // Should have no undocumented issues since functions are properly documented
 201 |       const undocumentedIssues = result.issues.filter((issue) =>
 202 |         issue.description.includes("lacks documentation"),
 203 |       );
 204 |       expect(undocumentedIssues.length).toBe(0);
 205 | 
 206 |       // Should not complain about error documentation
 207 |       const errorDocIssues = result.issues.filter((issue) =>
 208 |         issue.description.includes(
 209 |           "Error throwing code found without error documentation",
 210 |         ),
 211 |       );
 212 |       expect(errorDocIssues.length).toBe(0);
 213 |     });
 214 |   });
 215 | 
 216 |   describe("Documentation Validation", () => {
 217 |     it("should detect documentation directory correctly", async () => {
 218 |       // Create mock documentation structure
 219 |       const docsDir = path.join(testTempDir, "docs");
 220 |       await fs.mkdir(docsDir, { recursive: true });
 221 |       await fs.mkdir(path.join(docsDir, "tutorials"), { recursive: true });
 222 | 
 223 |       await fs.writeFile(
 224 |         path.join(docsDir, "tutorials", "tutorial1.md"),
 225 |         `
 226 | # Tutorial 1
 227 | 
 228 | This is a tutorial without prerequisites section.
 229 | 
 230 | \`\`\`javascript
 231 | console.log("hello")
 232 | \`\`\`
 233 |       `.trim(),
 234 |       );
 235 | 
 236 |       const result = await handleValidateDiataxisContent({
 237 |         contentPath: docsDir,
 238 |         validationType: "compliance",
 239 |         includeCodeValidation: true,
 240 |       });
 241 | 
 242 |       expect(result).toBeDefined();
 243 | 
 244 |       // Should find Diataxis compliance issues
 245 |       const complianceIssues = result.issues.filter(
 246 |         (issue) => issue.category === "compliance",
 247 |       );
 248 |       expect(complianceIssues.length).toBeGreaterThan(0);
 249 | 
 250 |       // Should find missing prerequisites in tutorial
 251 |       const prereqIssue = complianceIssues.find((issue) =>
 252 |         issue.description.includes("prerequisites"),
 253 |       );
 254 |       expect(prereqIssue).toBeDefined();
 255 |     });
 256 | 
 257 |     it("should validate link integrity", async () => {
 258 |       const docsDir = path.join(testTempDir, "docs-links");
 259 |       await fs.mkdir(docsDir, { recursive: true });
 260 | 
 261 |       // Create file with broken internal link
 262 |       await fs.writeFile(
 263 |         path.join(docsDir, "index.md"),
 264 |         `
 265 | # Documentation
 266 | 
 267 | [Broken Link](./nonexistent.md)
 268 | [Another Link](./other.md)
 269 |       `.trim(),
 270 |       );
 271 | 
 272 |       // Create the referenced file
 273 |       await fs.writeFile(path.join(docsDir, "other.md"), "# Other Page");
 274 | 
 275 |       const result = await handleValidateDiataxisContent({
 276 |         contentPath: docsDir,
 277 |         validationType: "accuracy",
 278 |         includeCodeValidation: false,
 279 |       });
 280 | 
 281 |       const linkIssues = result.issues.filter((issue) =>
 282 |         issue.description.includes("Broken internal link"),
 283 |       );
 284 |       expect(linkIssues.length).toBe(1);
 285 | 
 286 |       const brokenLink = linkIssues[0];
 287 |       expect(brokenLink.description).toContain("nonexistent.md");
 288 |     });
 289 | 
 290 |     it("should validate code blocks in documentation", async () => {
 291 |       const docsDir = path.join(testTempDir, "docs-code");
 292 |       await fs.mkdir(docsDir, { recursive: true });
 293 | 
 294 |       await fs.writeFile(
 295 |         path.join(docsDir, "guide.md"),
 296 |         `
 297 | # Code Examples
 298 | 
 299 | \`\`\`javascript
 300 | // Missing semicolon
 301 | console.log("test")
 302 | \`\`\`
 303 | 
 304 | \`\`\`json
 305 | { "valid": "json" }
 306 | \`\`\`
 307 | 
 308 | \`\`\`json
 309 | { "invalid": json }
 310 | \`\`\`
 311 |       `.trim(),
 312 |       );
 313 | 
 314 |       const result = await handleValidateDiataxisContent({
 315 |         contentPath: docsDir,
 316 |         validationType: "all",
 317 |         includeCodeValidation: true,
 318 |       });
 319 | 
 320 |       expect(result.codeValidation).toBeDefined();
 321 |       expect(result.codeValidation!.exampleResults.length).toBeGreaterThan(0);
 322 | 
 323 |       // Should find JSON syntax error
 324 |       const jsonErrors = result.codeValidation!.exampleResults.filter((ex) =>
 325 |         ex.issues.some((issue) => issue.description.includes("Invalid JSON")),
 326 |       );
 327 |       expect(jsonErrors.length).toBeGreaterThan(0);
 328 |     });
 329 |   });
 330 | 
 331 |   describe("General Content Validation", () => {
 332 |     it("should validate general content with link checking", async () => {
 333 |       const contentDir = path.join(testTempDir, "general-content");
 334 |       await fs.mkdir(contentDir, { recursive: true });
 335 | 
 336 |       await fs.writeFile(
 337 |         path.join(contentDir, "page.md"),
 338 |         `
 339 | # Test Page
 340 | 
 341 | [Good Link](./existing.md)
 342 | [Bad Link](./missing.md)
 343 | 
 344 | \`\`\`js
 345 | console.log("missing semicolon")
 346 | \`\`\`
 347 |       `.trim(),
 348 |       );
 349 | 
 350 |       await fs.writeFile(
 351 |         path.join(contentDir, "existing.md"),
 352 |         "# Existing Page",
 353 |       );
 354 | 
 355 |       const result = await validateGeneralContent({
 356 |         contentPath: contentDir,
 357 |         validationType: "all",
 358 |         includeCodeValidation: true,
 359 |       });
 360 | 
 361 |       expect(result.success).toBe(false);
 362 |       expect(result.brokenLinks.length).toBe(1);
 363 |       expect(result.brokenLinks[0]).toContain("missing.md");
 364 |       expect(result.codeBlocksValidated).toBeGreaterThan(0);
 365 |       expect(result.codeErrors.length).toBeGreaterThan(0);
 366 |       expect(result.recommendations.length).toBeGreaterThan(0);
 367 |     });
 368 | 
 369 |     it("should pass validation for clean content", async () => {
 370 |       const contentDir = path.join(testTempDir, "clean-content");
 371 |       await fs.mkdir(contentDir, { recursive: true });
 372 | 
 373 |       await fs.writeFile(
 374 |         path.join(contentDir, "clean.md"),
 375 |         `
 376 | # Clean Page
 377 | 
 378 | [Good Link](./other.md)
 379 | 
 380 | \`\`\`json
 381 | { "valid": "json" }
 382 | \`\`\`
 383 |       `.trim(),
 384 |       );
 385 | 
 386 |       await fs.writeFile(path.join(contentDir, "other.md"), "# Other Page");
 387 | 
 388 |       const result = await validateGeneralContent({
 389 |         contentPath: contentDir,
 390 |         validationType: "all",
 391 |         includeCodeValidation: true,
 392 |       });
 393 | 
 394 |       expect(result.success).toBe(true);
 395 |       expect(result.brokenLinks.length).toBe(0);
 396 |       expect(result.recommendations).toContain(
 397 |         "Content validation passed - no critical issues found",
 398 |       );
 399 |     });
 400 |   });
 401 | 
 402 |   describe("Confidence Metrics", () => {
 403 |     it("should calculate confidence metrics correctly", async () => {
 404 |       const appDir = path.join(testTempDir, "confidence-test");
 405 |       await fs.mkdir(appDir, { recursive: true });
 406 |       await fs.mkdir(path.join(appDir, "src"), { recursive: true });
 407 |       await fs.writeFile(
 408 |         path.join(appDir, "package.json"),
 409 |         '{"name": "confidence-test"}',
 410 |       );
 411 |       await fs.writeFile(
 412 |         path.join(appDir, "src", "index.ts"),
 413 |         'export const test = "value";',
 414 |       );
 415 | 
 416 |       const result = await handleValidateDiataxisContent({
 417 |         contentPath: appDir,
 418 |         validationType: "all",
 419 |         includeCodeValidation: true,
 420 |       });
 421 | 
 422 |       expect(result.confidence).toBeDefined();
 423 |       expect(result.confidence.overall).toBeGreaterThan(0);
 424 |       expect(result.confidence.overall).toBeLessThanOrEqual(100);
 425 | 
 426 |       expect(result.confidence.breakdown).toBeDefined();
 427 |       expect(result.confidence.breakdown.technologyDetection).toBeDefined();
 428 |       expect(result.confidence.breakdown.codeExampleRelevance).toBeDefined();
 429 |       expect(
 430 |         result.confidence.breakdown.architecturalAssumptions,
 431 |       ).toBeDefined();
 432 |     });
 433 | 
 434 |     it("should provide recommendations based on confidence", async () => {
 435 |       const appDir = path.join(testTempDir, "recommendations-test");
 436 |       await fs.mkdir(appDir, { recursive: true });
 437 |       await fs.writeFile(
 438 |         path.join(appDir, "package.json"),
 439 |         '{"name": "recommendations-test"}',
 440 |       );
 441 | 
 442 |       // Create content that will generate issues
 443 |       await fs.writeFile(path.join(appDir, "README.md"), "No proper structure");
 444 | 
 445 |       const result = await handleValidateDiataxisContent({
 446 |         contentPath: appDir,
 447 |         validationType: "all",
 448 |         includeCodeValidation: false,
 449 |       });
 450 | 
 451 |       expect(result.recommendations).toBeDefined();
 452 |       expect(result.recommendations.length).toBeGreaterThan(0);
 453 |       expect(result.nextSteps).toBeDefined();
 454 |       expect(result.nextSteps.length).toBeGreaterThan(0);
 455 | 
 456 |       if (result.confidence.overall < 70) {
 457 |         expect(
 458 |           result.recommendations.some((rec) =>
 459 |             rec.includes("comprehensive review"),
 460 |           ),
 461 |         ).toBe(true);
 462 |       }
 463 |     });
 464 |   });
 465 | 
 466 |   describe("Error Handling and Edge Cases", () => {
 467 |     it("should handle non-existent content path gracefully", async () => {
 468 |       const nonExistentPath = path.join(testTempDir, "does-not-exist");
 469 | 
 470 |       const result = await handleValidateDiataxisContent({
 471 |         contentPath: nonExistentPath,
 472 |         validationType: "all",
 473 |         includeCodeValidation: false,
 474 |       });
 475 | 
 476 |       expect(result).toBeDefined();
 477 |       // The function handles non-existent paths gracefully but may still succeed
 478 |       expect(result.confidence).toBeDefined();
 479 |     });
 480 | 
 481 |     it("should handle empty directory", async () => {
 482 |       const emptyDir = path.join(testTempDir, "empty-dir");
 483 |       await fs.mkdir(emptyDir, { recursive: true });
 484 | 
 485 |       const result = await handleValidateDiataxisContent({
 486 |         contentPath: emptyDir,
 487 |         validationType: "all",
 488 |         includeCodeValidation: true,
 489 |       });
 490 | 
 491 |       expect(result).toBeDefined();
 492 |       expect(result.confidence.breakdown.architecturalAssumptions).toBeLessThan(
 493 |         80,
 494 |       );
 495 |     });
 496 | 
 497 |     it("should handle project context loading with analysis ID", async () => {
 498 |       const appDir = path.join(testTempDir, "context-test");
 499 |       await fs.mkdir(appDir, { recursive: true });
 500 |       await fs.writeFile(
 501 |         path.join(appDir, "package.json"),
 502 |         '{"name": "context-test"}',
 503 |       );
 504 | 
 505 |       // Create .documcp directory with analysis
 506 |       const docucmpDir = path.join(appDir, ".documcp", "analyses");
 507 |       await fs.mkdir(docucmpDir, { recursive: true });
 508 |       await fs.writeFile(
 509 |         path.join(docucmpDir, "test-analysis.json"),
 510 |         JSON.stringify({
 511 |           metadata: {
 512 |             projectName: "test-project",
 513 |             primaryLanguage: "TypeScript",
 514 |           },
 515 |           technologies: { framework: "React" },
 516 |           dependencies: { packages: ["react", "typescript"] },
 517 |         }),
 518 |       );
 519 | 
 520 |       const result = await handleValidateDiataxisContent({
 521 |         contentPath: appDir,
 522 |         analysisId: "test-analysis",
 523 |         validationType: "accuracy",
 524 |         includeCodeValidation: false,
 525 |       });
 526 | 
 527 |       expect(result).toBeDefined();
 528 |       expect(result.confidence).toBeDefined();
 529 |     });
 530 | 
 531 |     it("should handle missing analysis ID gracefully", async () => {
 532 |       const appDir = path.join(testTempDir, "missing-analysis");
 533 |       await fs.mkdir(appDir, { recursive: true });
 534 |       await fs.writeFile(
 535 |         path.join(appDir, "package.json"),
 536 |         '{"name": "missing-analysis"}',
 537 |       );
 538 | 
 539 |       const result = await handleValidateDiataxisContent({
 540 |         contentPath: appDir,
 541 |         analysisId: "non-existent-analysis",
 542 |         validationType: "accuracy",
 543 |         includeCodeValidation: false,
 544 |       });
 545 | 
 546 |       expect(result).toBeDefined();
 547 |       expect(result.confidence).toBeDefined();
 548 |     });
 549 | 
 550 |     it("should detect documentation directory correctly", async () => {
 551 |       const docsPath = path.join(testTempDir, "project", "docs");
 552 |       await fs.mkdir(docsPath, { recursive: true });
 553 |       await fs.writeFile(path.join(docsPath, "index.md"), "# Documentation");
 554 | 
 555 |       const result = await handleValidateDiataxisContent({
 556 |         contentPath: docsPath,
 557 |         validationType: "compliance",
 558 |         includeCodeValidation: false,
 559 |       });
 560 | 
 561 |       expect(result).toBeDefined();
 562 |       expect(result.confidence).toBeDefined();
 563 |       // Documentation directory should be processed
 564 |       expect(
 565 |         result.confidence.breakdown.architecturalAssumptions,
 566 |       ).toBeGreaterThan(0);
 567 |     });
 568 | 
 569 |     it("should handle different validation types", async () => {
 570 |       const appDir = path.join(testTempDir, "validation-types");
 571 |       await fs.mkdir(appDir, { recursive: true });
 572 |       await fs.writeFile(
 573 |         path.join(appDir, "test.md"),
 574 |         "# Test\n[broken link](./missing.md)",
 575 |       );
 576 | 
 577 |       // Test accuracy only
 578 |       const accuracyResult = await handleValidateDiataxisContent({
 579 |         contentPath: appDir,
 580 |         validationType: "accuracy",
 581 |         includeCodeValidation: false,
 582 |       });
 583 |       expect(accuracyResult).toBeDefined();
 584 | 
 585 |       // Test completeness only
 586 |       const completenessResult = await handleValidateDiataxisContent({
 587 |         contentPath: appDir,
 588 |         validationType: "completeness",
 589 |         includeCodeValidation: false,
 590 |       });
 591 |       expect(completenessResult).toBeDefined();
 592 | 
 593 |       // Test compliance only
 594 |       const complianceResult = await handleValidateDiataxisContent({
 595 |         contentPath: appDir,
 596 |         validationType: "compliance",
 597 |         includeCodeValidation: false,
 598 |       });
 599 |       expect(complianceResult).toBeDefined();
 600 |     });
 601 | 
 602 |     it("should handle code validation failure scenarios", async () => {
 603 |       const appDir = path.join(testTempDir, "code-validation-fail");
 604 |       await fs.mkdir(appDir, { recursive: true });
 605 | 
 606 |       // Create markdown with broken code examples
 607 |       await fs.writeFile(
 608 |         path.join(appDir, "broken-code.md"),
 609 |         `
 610 | # Broken Code Examples
 611 | 
 612 | \`\`\`javascript
 613 | // Syntax error
 614 | console.log("missing quote);
 615 | \`\`\`
 616 | 
 617 | \`\`\`json
 618 | { "invalid": json }
 619 | \`\`\`
 620 |       `.trim(),
 621 |       );
 622 | 
 623 |       const result = await handleValidateDiataxisContent({
 624 |         contentPath: appDir,
 625 |         validationType: "all",
 626 |         includeCodeValidation: true,
 627 |       });
 628 | 
 629 |       expect(result.codeValidation).toBeDefined();
 630 |       expect(result.codeValidation!.overallSuccess).toBe(false);
 631 |       expect(
 632 |         result.recommendations.some((rec) => rec.includes("Fix code examples")),
 633 |       ).toBe(true);
 634 |     });
 635 | 
 636 |     it("should generate risk factors for critical issues", async () => {
 637 |       const appDir = path.join(testTempDir, "risk-factors");
 638 |       await fs.mkdir(appDir, { recursive: true });
 639 | 
 640 |       // Create content with multiple critical issues
 641 |       await fs.writeFile(
 642 |         path.join(appDir, "critical-issues.md"),
 643 |         `
 644 | # Critical Issues
 645 | 
 646 | [Broken Link 1](./missing1.md)
 647 | [Broken Link 2](./missing2.md)
 648 | [Broken Link 3](./missing3.md)
 649 |       `.trim(),
 650 |       );
 651 | 
 652 |       const result = await handleValidateDiataxisContent({
 653 |         contentPath: appDir,
 654 |         validationType: "all",
 655 |         includeCodeValidation: false,
 656 |       });
 657 | 
 658 |       expect(result.confidence.riskFactors).toBeDefined();
 659 |       expect(result.confidence.riskFactors.length).toBeGreaterThan(0);
 660 | 
 661 |       const highRiskFactors = result.confidence.riskFactors.filter(
 662 |         (rf) => rf.type === "high",
 663 |       );
 664 |       expect(highRiskFactors.length).toBeGreaterThan(0);
 665 |     });
 666 | 
 667 |     it("should handle uncertainty flags and medium risk factors", async () => {
 668 |       const appDir = path.join(testTempDir, "uncertainty-test");
 669 |       await fs.mkdir(appDir, { recursive: true });
 670 | 
 671 |       // Create content that generates uncertainties
 672 |       await fs.writeFile(
 673 |         path.join(appDir, "uncertain.md"),
 674 |         `
 675 | # Uncertain Content
 676 | 
 677 | This content has many ambiguous references and unclear instructions.
 678 | Multiple areas need clarification for proper understanding.
 679 |       `.trim(),
 680 |       );
 681 | 
 682 |       const result = await handleValidateDiataxisContent({
 683 |         contentPath: appDir,
 684 |         validationType: "all",
 685 |         includeCodeValidation: false,
 686 |       });
 687 | 
 688 |       // Manually add uncertainties to test the risk factor generation
 689 |       result.uncertainties = [
 690 |         {
 691 |           area: "test1",
 692 |           severity: "high",
 693 |           description: "test",
 694 |           potentialImpact: "test",
 695 |           clarificationNeeded: "test",
 696 |           fallbackStrategy: "test",
 697 |         },
 698 |         {
 699 |           area: "test2",
 700 |           severity: "high",
 701 |           description: "test",
 702 |           potentialImpact: "test",
 703 |           clarificationNeeded: "test",
 704 |           fallbackStrategy: "test",
 705 |         },
 706 |         {
 707 |           area: "test3",
 708 |           severity: "high",
 709 |           description: "test",
 710 |           potentialImpact: "test",
 711 |           clarificationNeeded: "test",
 712 |           fallbackStrategy: "test",
 713 |         },
 714 |         {
 715 |           area: "test4",
 716 |           severity: "high",
 717 |           description: "test",
 718 |           potentialImpact: "test",
 719 |           clarificationNeeded: "test",
 720 |           fallbackStrategy: "test",
 721 |         },
 722 |         {
 723 |           area: "test5",
 724 |           severity: "high",
 725 |           description: "test",
 726 |           potentialImpact: "test",
 727 |           clarificationNeeded: "test",
 728 |           fallbackStrategy: "test",
 729 |         },
 730 |         {
 731 |           area: "test6",
 732 |           severity: "high",
 733 |           description: "test",
 734 |           potentialImpact: "test",
 735 |           clarificationNeeded: "test",
 736 |           fallbackStrategy: "test",
 737 |         },
 738 |       ];
 739 | 
 740 |       expect(result.uncertainties.length).toBeGreaterThan(5);
 741 | 
 742 |       const highUncertainties = result.uncertainties.filter(
 743 |         (u) => u.severity === "high" || u.severity === "critical",
 744 |       );
 745 |       expect(highUncertainties.length).toBeGreaterThan(0);
 746 |     });
 747 | 
 748 |     it("should handle Diataxis structure analysis", async () => {
 749 |       const docsDir = path.join(testTempDir, "diataxis-structure");
 750 |       await fs.mkdir(docsDir, { recursive: true });
 751 | 
 752 |       // Create Diataxis structure
 753 |       await fs.mkdir(path.join(docsDir, "tutorials"), { recursive: true });
 754 |       await fs.mkdir(path.join(docsDir, "how-to"), { recursive: true });
 755 |       await fs.mkdir(path.join(docsDir, "reference"), { recursive: true });
 756 |       await fs.mkdir(path.join(docsDir, "explanation"), { recursive: true });
 757 | 
 758 |       await fs.writeFile(
 759 |         path.join(docsDir, "tutorials", "tutorial.md"),
 760 |         "# Tutorial",
 761 |       );
 762 |       await fs.writeFile(
 763 |         path.join(docsDir, "how-to", "guide.md"),
 764 |         "# How-to Guide",
 765 |       );
 766 |       await fs.writeFile(
 767 |         path.join(docsDir, "reference", "api.md"),
 768 |         "# API Reference",
 769 |       );
 770 |       await fs.writeFile(
 771 |         path.join(docsDir, "explanation", "concept.md"),
 772 |         "# Explanation",
 773 |       );
 774 | 
 775 |       const result = await handleValidateDiataxisContent({
 776 |         contentPath: docsDir,
 777 |         validationType: "compliance",
 778 |         includeCodeValidation: false,
 779 |       });
 780 | 
 781 |       expect(result).toBeDefined();
 782 |       expect(
 783 |         result.confidence.breakdown.architecturalAssumptions,
 784 |       ).toBeGreaterThan(60);
 785 |     });
 786 | 
 787 |     it("should handle successful validation with no issues", async () => {
 788 |       const cleanDir = path.join(testTempDir, "clean-validation");
 789 |       await fs.mkdir(cleanDir, { recursive: true });
 790 | 
 791 |       // Create clean content with no issues
 792 |       await fs.writeFile(
 793 |         path.join(cleanDir, "clean.md"),
 794 |         `
 795 | # Clean Documentation
 796 | 
 797 | This is well-structured documentation with no issues.
 798 | 
 799 | \`\`\`json
 800 | { "valid": "json" }
 801 | \`\`\`
 802 |       `.trim(),
 803 |       );
 804 | 
 805 |       const result = await handleValidateDiataxisContent({
 806 |         contentPath: cleanDir,
 807 |         validationType: "all",
 808 |         includeCodeValidation: true,
 809 |       });
 810 | 
 811 |       // Should have minimal issues and good confidence
 812 |       expect(result.confidence.overall).toBeGreaterThan(0);
 813 |       expect(result.recommendations).toBeDefined();
 814 |       expect(result.recommendations.length).toBeGreaterThan(0);
 815 |     });
 816 | 
 817 |     it("should handle timeout scenarios", async () => {
 818 |       // Test timeout handling by creating a scenario that might take time
 819 |       const largeDir = path.join(testTempDir, "timeout-test");
 820 |       await fs.mkdir(largeDir, { recursive: true });
 821 | 
 822 |       // Create multiple markdown files to simulate processing time
 823 |       for (let i = 0; i < 5; i++) {
 824 |         await fs.writeFile(
 825 |           path.join(largeDir, `file${i}.md`),
 826 |           `
 827 | # File ${i}
 828 | 
 829 | Content for file ${i} with some text.
 830 | 
 831 | \`\`\`javascript
 832 | console.log("File ${i}");
 833 | \`\`\`
 834 |         `.trim(),
 835 |         );
 836 |       }
 837 | 
 838 |       const result = await handleValidateDiataxisContent({
 839 |         contentPath: largeDir,
 840 |         validationType: "all",
 841 |         includeCodeValidation: true,
 842 |       });
 843 | 
 844 |       expect(result).toBeDefined();
 845 |       expect(result.confidence).toBeDefined();
 846 |     });
 847 | 
 848 |     it("should handle confidence levels and validation modes", async () => {
 849 |       const testDir = path.join(testTempDir, "confidence-levels");
 850 |       await fs.mkdir(testDir, { recursive: true });
 851 |       await fs.writeFile(path.join(testDir, "test.md"), "# Test Content");
 852 | 
 853 |       // Test different confidence levels
 854 |       const strictResult = await handleValidateDiataxisContent({
 855 |         contentPath: testDir,
 856 |         validationType: "all",
 857 |         includeCodeValidation: false,
 858 |         confidence: "strict",
 859 |       });
 860 |       expect(strictResult).toBeDefined();
 861 | 
 862 |       const moderateResult = await handleValidateDiataxisContent({
 863 |         contentPath: testDir,
 864 |         validationType: "all",
 865 |         includeCodeValidation: false,
 866 |         confidence: "moderate",
 867 |       });
 868 |       expect(moderateResult).toBeDefined();
 869 | 
 870 |       const permissiveResult = await handleValidateDiataxisContent({
 871 |         contentPath: testDir,
 872 |         validationType: "all",
 873 |         includeCodeValidation: false,
 874 |         confidence: "permissive",
 875 |       });
 876 |       expect(permissiveResult).toBeDefined();
 877 |     });
 878 | 
 879 |     it("should handle TypeScript files without package.json", async () => {
 880 |       const tsDir = path.join(testTempDir, "typescript-only");
 881 |       await fs.mkdir(tsDir, { recursive: true });
 882 |       await fs.mkdir(path.join(tsDir, "src"), { recursive: true });
 883 | 
 884 |       // Create TypeScript files without package.json
 885 |       await fs.writeFile(
 886 |         path.join(tsDir, "src", "app.ts"),
 887 |         `
 888 | export class TestClass {
 889 |   public method(): void {
 890 |     console.log('test');
 891 |   }
 892 | }
 893 |       `.trim(),
 894 |       );
 895 | 
 896 |       const result = await handleValidateDiataxisContent({
 897 |         contentPath: tsDir,
 898 |         validationType: "compliance",
 899 |         includeCodeValidation: false,
 900 |       });
 901 | 
 902 |       expect(result).toBeDefined();
 903 |       expect(result.confidence).toBeDefined();
 904 |     });
 905 | 
 906 |     it("should handle mixed content scenarios", async () => {
 907 |       const mixedDir = path.join(testTempDir, "mixed-content");
 908 |       await fs.mkdir(mixedDir, { recursive: true });
 909 |       await fs.mkdir(path.join(mixedDir, "src"), { recursive: true });
 910 | 
 911 |       // Create both application and documentation content
 912 |       await fs.writeFile(
 913 |         path.join(mixedDir, "package.json"),
 914 |         '{"name": "mixed-app"}',
 915 |       );
 916 |       await fs.writeFile(
 917 |         path.join(mixedDir, "src", "index.ts"),
 918 |         'export const app = "test";',
 919 |       );
 920 |       await fs.writeFile(
 921 |         path.join(mixedDir, "README.md"),
 922 |         `
 923 | # Mixed Content App
 924 | 
 925 | ## Installation
 926 | 
 927 | Run \`npm install\`
 928 | 
 929 | ## Usage
 930 | 
 931 | See the documentation.
 932 |       `.trim(),
 933 |       );
 934 | 
 935 |       const result = await handleValidateDiataxisContent({
 936 |         contentPath: mixedDir,
 937 |         validationType: "all",
 938 |         includeCodeValidation: true,
 939 |       });
 940 | 
 941 |       expect(result).toBeDefined();
 942 |       expect(
 943 |         result.confidence.breakdown.architecturalAssumptions,
 944 |       ).toBeGreaterThanOrEqual(60);
 945 |     });
 946 | 
 947 |     it("should handle business context alignment scoring", async () => {
 948 |       const businessDir = path.join(testTempDir, "business-context");
 949 |       await fs.mkdir(businessDir, { recursive: true });
 950 | 
 951 |       // Create content with business context
 952 |       await fs.writeFile(
 953 |         path.join(businessDir, "business.md"),
 954 |         `
 955 | # Business Requirements
 956 | 
 957 | This application serves enterprise customers with specific needs.
 958 | The solution addresses market requirements and business objectives.
 959 |       `.trim(),
 960 |       );
 961 | 
 962 |       const result = await handleValidateDiataxisContent({
 963 |         contentPath: businessDir,
 964 |         validationType: "all",
 965 |         includeCodeValidation: false,
 966 |       });
 967 | 
 968 |       expect(result).toBeDefined();
 969 |       expect(
 970 |         result.confidence.breakdown.businessContextAlignment,
 971 |       ).toBeGreaterThanOrEqual(0);
 972 |     });
 973 | 
 974 |     it("should handle deprecated patterns in technical accuracy checks", async () => {
 975 |       const deprecatedDir = path.join(testTempDir, "deprecated-patterns");
 976 |       await fs.mkdir(deprecatedDir, { recursive: true });
 977 | 
 978 |       await fs.writeFile(
 979 |         path.join(deprecatedDir, "deprecated.md"),
 980 |         `
 981 | # Deprecated Patterns
 982 | 
 983 | \`\`\`bash
 984 | npm install -g some-package
 985 | \`\`\`
 986 | 
 987 | \`\`\`javascript
 988 | var oldVariable = "test";
 989 | function() {
 990 |   console.log("old style");
 991 | }
 992 | \`\`\`
 993 | 
 994 | Visit http://example.com for more info.
 995 |       `.trim(),
 996 |       );
 997 | 
 998 |       const result = await handleValidateDiataxisContent({
 999 |         contentPath: deprecatedDir,
1000 |         validationType: "accuracy",
1001 |         includeCodeValidation: false,
1002 |       });
1003 | 
1004 |       const deprecatedIssues = result.issues.filter((issue) =>
1005 |         issue.description.includes("Potentially outdated pattern"),
1006 |       );
1007 |       expect(deprecatedIssues.length).toBeGreaterThan(0);
1008 |     });
1009 | 
1010 |     it("should handle async code without error handling", async () => {
1011 |       const asyncDir = path.join(testTempDir, "async-code");
1012 |       await fs.mkdir(asyncDir, { recursive: true });
1013 | 
1014 |       await fs.writeFile(
1015 |         path.join(asyncDir, "async.md"),
1016 |         `
1017 | # Async Code Examples
1018 | 
1019 | \`\`\`javascript
1020 | async function fetchData() {
1021 |   const response = await fetch('/api/data');
1022 |   return response.json();
1023 | }
1024 | \`\`\`
1025 | 
1026 | \`\`\`typescript
1027 | const getData = async (): Promise<any> => {
1028 |   const result = await someAsyncOperation();
1029 |   return result;
1030 | };
1031 | \`\`\`
1032 |       `.trim(),
1033 |       );
1034 | 
1035 |       const result = await handleValidateDiataxisContent({
1036 |         contentPath: asyncDir,
1037 |         validationType: "accuracy",
1038 |         includeCodeValidation: false,
1039 |       });
1040 | 
1041 |       const asyncIssues = result.issues.filter((issue) =>
1042 |         issue.description.includes("Async code without error handling"),
1043 |       );
1044 |       expect(asyncIssues.length).toBeGreaterThan(0);
1045 |     });
1046 | 
1047 |     it("should handle version compatibility checks with project context", async () => {
1048 |       const versionDir = path.join(testTempDir, "version-compat");
1049 |       await fs.mkdir(versionDir, { recursive: true });
1050 | 
1051 |       // Create .documcp directory with analysis
1052 |       const docucmpDir = path.join(versionDir, ".documcp", "analyses");
1053 |       await fs.mkdir(docucmpDir, { recursive: true });
1054 |       await fs.writeFile(
1055 |         path.join(docucmpDir, "version-analysis.json"),
1056 |         JSON.stringify({
1057 |           metadata: {
1058 |             projectName: "version-test",
1059 |             primaryLanguage: "TypeScript",
1060 |           },
1061 |           technologies: { framework: "React" },
1062 |           dependencies: { packages: ["[email protected]", "[email protected]"] },
1063 |         }),
1064 |       );
1065 | 
1066 |       await fs.writeFile(
1067 |         path.join(versionDir, "versions.md"),
1068 |         `
1069 | # Version Information
1070 | 
1071 | This project uses React @18.2.0 and TypeScript @4.9.0.
1072 | Also compatible with Node.js @16.14.0.
1073 |       `.trim(),
1074 |       );
1075 | 
1076 |       const result = await handleValidateDiataxisContent({
1077 |         contentPath: versionDir,
1078 |         analysisId: "version-analysis",
1079 |         validationType: "accuracy",
1080 |         includeCodeValidation: false,
1081 |       });
1082 | 
1083 |       const versionUncertainties = result.uncertainties.filter(
1084 |         (u) => u.area === "version-compatibility",
1085 |       );
1086 |       expect(versionUncertainties.length).toBeGreaterThan(0);
1087 |     });
1088 | 
1089 |     it("should handle dangerous bash commands", async () => {
1090 |       const bashDir = path.join(testTempDir, "dangerous-bash");
1091 |       await fs.mkdir(bashDir, { recursive: true });
1092 | 
1093 |       await fs.writeFile(
1094 |         path.join(bashDir, "dangerous.md"),
1095 |         `
1096 | # Dangerous Commands
1097 | 
1098 | \`\`\`bash
1099 | rm -rf /
1100 | sudo rm -rf /tmp/important
1101 | chmod 777 /etc/passwd
1102 | command > /dev/null 2>&1
1103 | \`\`\`
1104 |       `.trim(),
1105 |       );
1106 | 
1107 |       const result = await handleValidateDiataxisContent({
1108 |         contentPath: bashDir,
1109 |         validationType: "accuracy",
1110 |         includeCodeValidation: false,
1111 |       });
1112 | 
1113 |       const dangerousIssues = result.issues.filter((issue) =>
1114 |         issue.description.includes("Potentially dangerous command"),
1115 |       );
1116 |       expect(dangerousIssues.length).toBeGreaterThan(0);
1117 |     });
1118 | 
1119 |     it("should handle mixed path separators in commands", async () => {
1120 |       const pathDir = path.join(testTempDir, "mixed-paths");
1121 |       await fs.mkdir(pathDir, { recursive: true });
1122 | 
1123 |       await fs.writeFile(
1124 |         path.join(pathDir, "paths.md"),
1125 |         `
1126 | # Mixed Path Examples
1127 | 
1128 | \`\`\`bash
1129 | cp /unix/path\\windows\\mixed /destination/path
1130 | \`\`\`
1131 |       `.trim(),
1132 |       );
1133 | 
1134 |       const result = await handleValidateDiataxisContent({
1135 |         contentPath: pathDir,
1136 |         validationType: "accuracy",
1137 |         includeCodeValidation: false,
1138 |       });
1139 | 
1140 |       const pathIssues = result.issues.filter((issue) =>
1141 |         issue.description.includes("Mixed path separators"),
1142 |       );
1143 |       expect(pathIssues.length).toBeGreaterThan(0);
1144 |     });
1145 | 
1146 |     it("should handle external links in accuracy validation", async () => {
1147 |       const linksDir = path.join(testTempDir, "external-links");
1148 |       await fs.mkdir(linksDir, { recursive: true });
1149 | 
1150 |       await fs.writeFile(
1151 |         path.join(linksDir, "external.md"),
1152 |         `
1153 | # External Links
1154 | 
1155 | [GitHub](https://github.com)
1156 | [Documentation](https://docs.example.com)
1157 |       `.trim(),
1158 |       );
1159 | 
1160 |       const result = await handleValidateDiataxisContent({
1161 |         contentPath: linksDir,
1162 |         validationType: "accuracy",
1163 |         includeCodeValidation: false,
1164 |       });
1165 | 
1166 |       const linkUncertainties = result.uncertainties.filter(
1167 |         (u) => u.area === "external-links",
1168 |       );
1169 |       expect(linkUncertainties.length).toBeGreaterThan(0);
1170 |     });
1171 | 
1172 |     it("should handle Diataxis compliance rules for different sections", async () => {
1173 |       const complianceDir = path.join(testTempDir, "diataxis-compliance");
1174 |       await fs.mkdir(complianceDir, { recursive: true });
1175 | 
1176 |       // Create directories for each Diataxis section
1177 |       await fs.mkdir(path.join(complianceDir, "tutorials"), {
1178 |         recursive: true,
1179 |       });
1180 |       await fs.mkdir(path.join(complianceDir, "how-to"), { recursive: true });
1181 |       await fs.mkdir(path.join(complianceDir, "reference"), {
1182 |         recursive: true,
1183 |       });
1184 |       await fs.mkdir(path.join(complianceDir, "explanation"), {
1185 |         recursive: true,
1186 |       });
1187 | 
1188 |       // Tutorial without prerequisites
1189 |       await fs.writeFile(
1190 |         path.join(complianceDir, "tutorials", "bad-tutorial.md"),
1191 |         `
1192 | # Bad Tutorial
1193 | 
1194 | This tutorial doesn't have prerequisites or clear steps.
1195 |       `.trim(),
1196 |       );
1197 | 
1198 |       // How-to without task focus
1199 |       await fs.writeFile(
1200 |         path.join(complianceDir, "how-to", "bad-howto.md"),
1201 |         `
1202 | # Bad Guide
1203 | 
1204 | Short guide.
1205 |       `.trim(),
1206 |       );
1207 | 
1208 |       // Reference without structure
1209 |       await fs.writeFile(
1210 |         path.join(complianceDir, "reference", "bad-reference.md"),
1211 |         `
1212 | Bad reference without headings or tables.
1213 |       `.trim(),
1214 |       );
1215 | 
1216 |       // Explanation without "why"
1217 |       await fs.writeFile(
1218 |         path.join(complianceDir, "explanation", "bad-explanation.md"),
1219 |         `
1220 | # Bad Explanation
1221 | 
1222 | Short explanation.
1223 |       `.trim(),
1224 |       );
1225 | 
1226 |       const result = await handleValidateDiataxisContent({
1227 |         contentPath: complianceDir,
1228 |         validationType: "compliance",
1229 |         includeCodeValidation: false,
1230 |       });
1231 | 
1232 |       const complianceIssues = result.issues.filter(
1233 |         (issue) => issue.category === "compliance",
1234 |       );
1235 |       expect(complianceIssues.length).toBeGreaterThan(4); // Should find issues in each section
1236 |     });
1237 | 
1238 |     it("should handle TypeScript code validation with compilation errors", async () => {
1239 |       const tsDir = path.join(testTempDir, "typescript-validation");
1240 |       await fs.mkdir(tsDir, { recursive: true });
1241 | 
1242 |       await fs.writeFile(
1243 |         path.join(tsDir, "typescript.md"),
1244 |         `
1245 | # TypeScript Examples
1246 | 
1247 | \`\`\`typescript
1248 | // This has type errors
1249 | let x: string = 123;
1250 | function badFunction(param: number): string {
1251 |   return param; // Type error
1252 | }
1253 | \`\`\`
1254 |       `.trim(),
1255 |       );
1256 | 
1257 |       const result = await handleValidateDiataxisContent({
1258 |         contentPath: tsDir,
1259 |         validationType: "all",
1260 |         includeCodeValidation: true,
1261 |       });
1262 | 
1263 |       expect(result.codeValidation).toBeDefined();
1264 |       expect(result.codeValidation!.overallSuccess).toBe(false);
1265 |     });
1266 | 
1267 |     it("should handle bash code validation with complex chaining", async () => {
1268 |       const bashComplexDir = path.join(testTempDir, "bash-complex");
1269 |       await fs.mkdir(bashComplexDir, { recursive: true });
1270 | 
1271 |       await fs.writeFile(
1272 |         path.join(bashComplexDir, "complex-bash.md"),
1273 |         `
1274 | # Complex Bash
1275 | 
1276 | \`\`\`bash
1277 | # Complex command chaining
1278 | command1 && command2 || command3
1279 | rm $VARIABLE
1280 | \`\`\`
1281 |       `.trim(),
1282 |       );
1283 | 
1284 |       const result = await handleValidateDiataxisContent({
1285 |         contentPath: bashComplexDir,
1286 |         validationType: "all",
1287 |         includeCodeValidation: true,
1288 |       });
1289 | 
1290 |       expect(result.codeValidation).toBeDefined();
1291 |       const bashIssues = result.codeValidation!.exampleResults.flatMap(
1292 |         (ex) => ex.issues,
1293 |       );
1294 |       expect(bashIssues.length).toBeGreaterThan(0);
1295 |     });
1296 | 
1297 |     it("should handle file limit reached scenario", async () => {
1298 |       const largeDir = path.join(testTempDir, "large-directory");
1299 |       await fs.mkdir(largeDir, { recursive: true });
1300 | 
1301 |       // Create many markdown files to test file limit
1302 |       for (let i = 0; i < 10; i++) {
1303 |         await fs.writeFile(
1304 |           path.join(largeDir, `file${i}.md`),
1305 |           `# File ${i}\nContent for file ${i}.`,
1306 |         );
1307 |       }
1308 | 
1309 |       const result = await handleValidateDiataxisContent({
1310 |         contentPath: largeDir,
1311 |         validationType: "all",
1312 |         includeCodeValidation: false,
1313 |       });
1314 | 
1315 |       expect(result).toBeDefined();
1316 |       expect(
1317 |         result.confidence.breakdown.architecturalAssumptions,
1318 |       ).toBeGreaterThan(60);
1319 |     });
1320 | 
1321 |     it("should handle symlink detection in file scanning", async () => {
1322 |       const symlinkDir = path.join(testTempDir, "symlink-test");
1323 |       await fs.mkdir(symlinkDir, { recursive: true });
1324 | 
1325 |       // Create a regular file
1326 |       await fs.writeFile(path.join(symlinkDir, "regular.md"), "# Regular File");
1327 | 
1328 |       // Create a subdirectory
1329 |       await fs.mkdir(path.join(symlinkDir, "subdir"), { recursive: true });
1330 |       await fs.writeFile(
1331 |         path.join(symlinkDir, "subdir", "nested.md"),
1332 |         "# Nested File",
1333 |       );
1334 | 
1335 |       const result = await handleValidateDiataxisContent({
1336 |         contentPath: symlinkDir,
1337 |         validationType: "all",
1338 |         includeCodeValidation: false,
1339 |       });
1340 | 
1341 |       expect(result).toBeDefined();
1342 |       expect(
1343 |         result.confidence.breakdown.architecturalAssumptions,
1344 |       ).toBeGreaterThanOrEqual(60);
1345 |     });
1346 | 
1347 |     it("should handle timeout scenario", async () => {
1348 |       const timeoutDir = path.join(testTempDir, "timeout-scenario");
1349 |       await fs.mkdir(timeoutDir, { recursive: true });
1350 |       await fs.writeFile(path.join(timeoutDir, "test.md"), "# Test");
1351 | 
1352 |       // Mock a timeout by creating a very short timeout
1353 |       const originalTimeout = 120000;
1354 | 
1355 |       const result = await handleValidateDiataxisContent({
1356 |         contentPath: timeoutDir,
1357 |         validationType: "all",
1358 |         includeCodeValidation: false,
1359 |       });
1360 | 
1361 |       expect(result).toBeDefined();
1362 |     });
1363 | 
1364 |     it("should handle general content validation with external links", async () => {
1365 |       const generalDir = path.join(testTempDir, "general-external");
1366 |       await fs.mkdir(generalDir, { recursive: true });
1367 | 
1368 |       await fs.writeFile(
1369 |         path.join(generalDir, "external.md"),
1370 |         `
1371 | # External Links Test
1372 | 
1373 | [GitHub](https://github.com)
1374 | [Local](./local.md)
1375 |       `.trim(),
1376 |       );
1377 | 
1378 |       await fs.writeFile(path.join(generalDir, "local.md"), "# Local File");
1379 | 
1380 |       const result = await validateGeneralContent({
1381 |         contentPath: generalDir,
1382 |         validationType: "all",
1383 |         includeCodeValidation: true,
1384 |         followExternalLinks: false,
1385 |       });
1386 | 
1387 |       expect(result.linksChecked).toBeGreaterThan(0);
1388 |       expect(result.success).toBe(true);
1389 |     });
1390 | 
1391 |     it("should handle general content validation with code validation", async () => {
1392 |       const codeDir = path.join(testTempDir, "general-code");
1393 |       await fs.mkdir(codeDir, { recursive: true });
1394 | 
1395 |       await fs.writeFile(
1396 |         path.join(codeDir, "code.md"),
1397 |         `
1398 | # Code Test
1399 | 
1400 | \`\`\`javascript
1401 | console.log("test")
1402 | \`\`\`
1403 | 
1404 | \`\`\`js
1405 | console.log("another test");
1406 | \`\`\`
1407 |       `.trim(),
1408 |       );
1409 | 
1410 |       const result = await validateGeneralContent({
1411 |         contentPath: codeDir,
1412 |         validationType: "code",
1413 |         includeCodeValidation: true,
1414 |       });
1415 | 
1416 |       expect(result.codeBlocksValidated).toBeGreaterThan(0);
1417 |       expect(result.codeErrors.length).toBeGreaterThan(0); // Missing semicolon
1418 |     });
1419 | 
1420 |     it("should handle validation with no code blocks", async () => {
1421 |       const noCodeDir = path.join(testTempDir, "no-code");
1422 |       await fs.mkdir(noCodeDir, { recursive: true });
1423 | 
1424 |       await fs.writeFile(
1425 |         path.join(noCodeDir, "text.md"),
1426 |         `
1427 | # Text Only
1428 | 
1429 | This is just text with no code blocks.
1430 |       `.trim(),
1431 |       );
1432 | 
1433 |       const result = await validateGeneralContent({
1434 |         contentPath: noCodeDir,
1435 |         validationType: "all",
1436 |         includeCodeValidation: true,
1437 |       });
1438 | 
1439 |       expect(result.codeBlocksValidated).toBe(0);
1440 |       expect(result.success).toBe(true);
1441 |     });
1442 |   });
1443 | });
1444 | 
```
Page 22/29FirstPrevNextLast