#
tokens: 48951/50000 24/625 files (page 7/47)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 7 of 47. Use http://codebase.md/doobidoo/mcp-memory-service?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .claude
│   ├── agents
│   │   ├── amp-bridge.md
│   │   ├── amp-pr-automator.md
│   │   ├── code-quality-guard.md
│   │   ├── gemini-pr-automator.md
│   │   └── github-release-manager.md
│   ├── settings.local.json.backup
│   └── settings.local.json.local
├── .commit-message
├── .dockerignore
├── .env.example
├── .env.sqlite.backup
├── .envnn#
├── .gitattributes
├── .github
│   ├── FUNDING.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── performance_issue.yml
│   ├── pull_request_template.md
│   └── workflows
│       ├── bridge-tests.yml
│       ├── CACHE_FIX.md
│       ├── claude-code-review.yml
│       ├── claude.yml
│       ├── cleanup-images.yml.disabled
│       ├── dev-setup-validation.yml
│       ├── docker-publish.yml
│       ├── LATEST_FIXES.md
│       ├── main-optimized.yml.disabled
│       ├── main.yml
│       ├── publish-and-test.yml
│       ├── README_OPTIMIZATION.md
│       ├── release-tag.yml.disabled
│       ├── release.yml
│       ├── roadmap-review-reminder.yml
│       ├── SECRET_CONDITIONAL_FIX.md
│       └── WORKFLOW_FIXES.md
├── .gitignore
├── .mcp.json.backup
├── .mcp.json.template
├── .pyscn
│   ├── .gitignore
│   └── reports
│       └── analyze_20251123_214224.html
├── AGENTS.md
├── archive
│   ├── deployment
│   │   ├── deploy_fastmcp_fixed.sh
│   │   ├── deploy_http_with_mcp.sh
│   │   └── deploy_mcp_v4.sh
│   ├── deployment-configs
│   │   ├── empty_config.yml
│   │   └── smithery.yaml
│   ├── development
│   │   └── test_fastmcp.py
│   ├── docs-removed-2025-08-23
│   │   ├── authentication.md
│   │   ├── claude_integration.md
│   │   ├── claude-code-compatibility.md
│   │   ├── claude-code-integration.md
│   │   ├── claude-code-quickstart.md
│   │   ├── claude-desktop-setup.md
│   │   ├── complete-setup-guide.md
│   │   ├── database-synchronization.md
│   │   ├── development
│   │   │   ├── autonomous-memory-consolidation.md
│   │   │   ├── CLEANUP_PLAN.md
│   │   │   ├── CLEANUP_README.md
│   │   │   ├── CLEANUP_SUMMARY.md
│   │   │   ├── dream-inspired-memory-consolidation.md
│   │   │   ├── hybrid-slm-memory-consolidation.md
│   │   │   ├── mcp-milestone.md
│   │   │   ├── multi-client-architecture.md
│   │   │   ├── test-results.md
│   │   │   └── TIMESTAMP_FIX_SUMMARY.md
│   │   ├── distributed-sync.md
│   │   ├── invocation_guide.md
│   │   ├── macos-intel.md
│   │   ├── master-guide.md
│   │   ├── mcp-client-configuration.md
│   │   ├── multi-client-server.md
│   │   ├── service-installation.md
│   │   ├── sessions
│   │   │   └── MCP_ENHANCEMENT_SESSION_MEMORY_v4.1.0.md
│   │   ├── UBUNTU_SETUP.md
│   │   ├── ubuntu.md
│   │   ├── windows-setup.md
│   │   └── windows.md
│   ├── docs-root-cleanup-2025-08-23
│   │   ├── AWESOME_LIST_SUBMISSION.md
│   │   ├── CLOUDFLARE_IMPLEMENTATION.md
│   │   ├── DOCUMENTATION_ANALYSIS.md
│   │   ├── DOCUMENTATION_CLEANUP_PLAN.md
│   │   ├── DOCUMENTATION_CONSOLIDATION_COMPLETE.md
│   │   ├── LITESTREAM_SETUP_GUIDE.md
│   │   ├── lm_studio_system_prompt.md
│   │   ├── PYTORCH_DOWNLOAD_FIX.md
│   │   └── README-ORIGINAL-BACKUP.md
│   ├── investigations
│   │   └── MACOS_HOOKS_INVESTIGATION.md
│   ├── litestream-configs-v6.3.0
│   │   ├── install_service.sh
│   │   ├── litestream_master_config_fixed.yml
│   │   ├── litestream_master_config.yml
│   │   ├── litestream_replica_config_fixed.yml
│   │   ├── litestream_replica_config.yml
│   │   ├── litestream_replica_simple.yml
│   │   ├── litestream-http.service
│   │   ├── litestream.service
│   │   └── requirements-cloudflare.txt
│   ├── release-notes
│   │   └── release-notes-v7.1.4.md
│   └── setup-development
│       ├── README.md
│       ├── setup_consolidation_mdns.sh
│       ├── STARTUP_SETUP_GUIDE.md
│       └── test_service.sh
├── CHANGELOG-HISTORIC.md
├── CHANGELOG.md
├── claude_commands
│   ├── memory-context.md
│   ├── memory-health.md
│   ├── memory-ingest-dir.md
│   ├── memory-ingest.md
│   ├── memory-recall.md
│   ├── memory-search.md
│   ├── memory-store.md
│   ├── README.md
│   └── session-start.md
├── claude-hooks
│   ├── config.json
│   ├── config.template.json
│   ├── CONFIGURATION.md
│   ├── core
│   │   ├── memory-retrieval.js
│   │   ├── mid-conversation.js
│   │   ├── session-end.js
│   │   ├── session-start.js
│   │   └── topic-change.js
│   ├── debug-pattern-test.js
│   ├── install_claude_hooks_windows.ps1
│   ├── install_hooks.py
│   ├── memory-mode-controller.js
│   ├── MIGRATION.md
│   ├── README-NATURAL-TRIGGERS.md
│   ├── README-phase2.md
│   ├── README.md
│   ├── simple-test.js
│   ├── statusline.sh
│   ├── test-adaptive-weights.js
│   ├── test-dual-protocol-hook.js
│   ├── test-mcp-hook.js
│   ├── test-natural-triggers.js
│   ├── test-recency-scoring.js
│   ├── tests
│   │   ├── integration-test.js
│   │   ├── phase2-integration-test.js
│   │   ├── test-code-execution.js
│   │   ├── test-cross-session.json
│   │   ├── test-session-tracking.json
│   │   └── test-threading.json
│   ├── utilities
│   │   ├── adaptive-pattern-detector.js
│   │   ├── context-formatter.js
│   │   ├── context-shift-detector.js
│   │   ├── conversation-analyzer.js
│   │   ├── dynamic-context-updater.js
│   │   ├── git-analyzer.js
│   │   ├── mcp-client.js
│   │   ├── memory-client.js
│   │   ├── memory-scorer.js
│   │   ├── performance-manager.js
│   │   ├── project-detector.js
│   │   ├── session-tracker.js
│   │   ├── tiered-conversation-monitor.js
│   │   └── version-checker.js
│   └── WINDOWS-SESSIONSTART-BUG.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Development-Sprint-November-2025.md
├── docs
│   ├── amp-cli-bridge.md
│   ├── api
│   │   ├── code-execution-interface.md
│   │   ├── memory-metadata-api.md
│   │   ├── PHASE1_IMPLEMENTATION_SUMMARY.md
│   │   ├── PHASE2_IMPLEMENTATION_SUMMARY.md
│   │   ├── PHASE2_REPORT.md
│   │   └── tag-standardization.md
│   ├── architecture
│   │   ├── search-enhancement-spec.md
│   │   └── search-examples.md
│   ├── architecture.md
│   ├── archive
│   │   └── obsolete-workflows
│   │       ├── load_memory_context.md
│   │       └── README.md
│   ├── assets
│   │   └── images
│   │       ├── dashboard-v3.3.0-preview.png
│   │       ├── memory-awareness-hooks-example.png
│   │       ├── project-infographic.svg
│   │       └── README.md
│   ├── CLAUDE_CODE_QUICK_REFERENCE.md
│   ├── cloudflare-setup.md
│   ├── deployment
│   │   ├── docker.md
│   │   ├── dual-service.md
│   │   ├── production-guide.md
│   │   └── systemd-service.md
│   ├── development
│   │   ├── ai-agent-instructions.md
│   │   ├── code-quality
│   │   │   ├── phase-2a-completion.md
│   │   │   ├── phase-2a-handle-get-prompt.md
│   │   │   ├── phase-2a-index.md
│   │   │   ├── phase-2a-install-package.md
│   │   │   └── phase-2b-session-summary.md
│   │   ├── code-quality-workflow.md
│   │   ├── dashboard-workflow.md
│   │   ├── issue-management.md
│   │   ├── pr-review-guide.md
│   │   ├── refactoring-notes.md
│   │   ├── release-checklist.md
│   │   └── todo-tracker.md
│   ├── docker-optimized-build.md
│   ├── document-ingestion.md
│   ├── DOCUMENTATION_AUDIT.md
│   ├── enhancement-roadmap-issue-14.md
│   ├── examples
│   │   ├── analysis-scripts.js
│   │   ├── maintenance-session-example.md
│   │   ├── memory-distribution-chart.jsx
│   │   └── tag-schema.json
│   ├── first-time-setup.md
│   ├── glama-deployment.md
│   ├── guides
│   │   ├── advanced-command-examples.md
│   │   ├── chromadb-migration.md
│   │   ├── commands-vs-mcp-server.md
│   │   ├── mcp-enhancements.md
│   │   ├── mdns-service-discovery.md
│   │   ├── memory-consolidation-guide.md
│   │   ├── migration.md
│   │   ├── scripts.md
│   │   └── STORAGE_BACKENDS.md
│   ├── HOOK_IMPROVEMENTS.md
│   ├── hooks
│   │   └── phase2-code-execution-migration.md
│   ├── http-server-management.md
│   ├── ide-compatability.md
│   ├── IMAGE_RETENTION_POLICY.md
│   ├── images
│   │   └── dashboard-placeholder.md
│   ├── implementation
│   │   ├── health_checks.md
│   │   └── performance.md
│   ├── IMPLEMENTATION_PLAN_HTTP_SSE.md
│   ├── integration
│   │   ├── homebrew.md
│   │   └── multi-client.md
│   ├── integrations
│   │   ├── gemini.md
│   │   ├── groq-bridge.md
│   │   ├── groq-integration-summary.md
│   │   └── groq-model-comparison.md
│   ├── integrations.md
│   ├── legacy
│   │   └── dual-protocol-hooks.md
│   ├── LM_STUDIO_COMPATIBILITY.md
│   ├── maintenance
│   │   └── memory-maintenance.md
│   ├── mastery
│   │   ├── api-reference.md
│   │   ├── architecture-overview.md
│   │   ├── configuration-guide.md
│   │   ├── local-setup-and-run.md
│   │   ├── testing-guide.md
│   │   └── troubleshooting.md
│   ├── migration
│   │   └── code-execution-api-quick-start.md
│   ├── natural-memory-triggers
│   │   ├── cli-reference.md
│   │   ├── installation-guide.md
│   │   └── performance-optimization.md
│   ├── oauth-setup.md
│   ├── pr-graphql-integration.md
│   ├── quick-setup-cloudflare-dual-environment.md
│   ├── README.md
│   ├── remote-configuration-wiki-section.md
│   ├── research
│   │   ├── code-execution-interface-implementation.md
│   │   └── code-execution-interface-summary.md
│   ├── ROADMAP.md
│   ├── sqlite-vec-backend.md
│   ├── statistics
│   │   ├── charts
│   │   │   ├── activity_patterns.png
│   │   │   ├── contributors.png
│   │   │   ├── growth_trajectory.png
│   │   │   ├── monthly_activity.png
│   │   │   └── october_sprint.png
│   │   ├── data
│   │   │   ├── activity_by_day.csv
│   │   │   ├── activity_by_hour.csv
│   │   │   ├── contributors.csv
│   │   │   └── monthly_activity.csv
│   │   ├── generate_charts.py
│   │   └── REPOSITORY_STATISTICS.md
│   ├── technical
│   │   ├── development.md
│   │   ├── memory-migration.md
│   │   ├── migration-log.md
│   │   ├── sqlite-vec-embedding-fixes.md
│   │   └── tag-storage.md
│   ├── testing
│   │   └── regression-tests.md
│   ├── testing-cloudflare-backend.md
│   ├── troubleshooting
│   │   ├── cloudflare-api-token-setup.md
│   │   ├── cloudflare-authentication.md
│   │   ├── general.md
│   │   ├── hooks-quick-reference.md
│   │   ├── pr162-schema-caching-issue.md
│   │   ├── session-end-hooks.md
│   │   └── sync-issues.md
│   └── tutorials
│       ├── advanced-techniques.md
│       ├── data-analysis.md
│       └── demo-session-walkthrough.md
├── examples
│   ├── claude_desktop_config_template.json
│   ├── claude_desktop_config_windows.json
│   ├── claude-desktop-http-config.json
│   ├── config
│   │   └── claude_desktop_config.json
│   ├── http-mcp-bridge.js
│   ├── memory_export_template.json
│   ├── README.md
│   ├── setup
│   │   └── setup_multi_client_complete.py
│   └── start_https_example.sh
├── install_service.py
├── install.py
├── LICENSE
├── NOTICE
├── pyproject.toml
├── pytest.ini
├── README.md
├── run_server.py
├── scripts
│   ├── .claude
│   │   └── settings.local.json
│   ├── archive
│   │   └── check_missing_timestamps.py
│   ├── backup
│   │   ├── backup_memories.py
│   │   ├── backup_sqlite_vec.sh
│   │   ├── export_distributable_memories.sh
│   │   └── restore_memories.py
│   ├── benchmarks
│   │   ├── benchmark_code_execution_api.py
│   │   ├── benchmark_hybrid_sync.py
│   │   └── benchmark_server_caching.py
│   ├── database
│   │   ├── analyze_sqlite_vec_db.py
│   │   ├── check_sqlite_vec_status.py
│   │   ├── db_health_check.py
│   │   └── simple_timestamp_check.py
│   ├── development
│   │   ├── debug_server_initialization.py
│   │   ├── find_orphaned_files.py
│   │   ├── fix_mdns.sh
│   │   ├── fix_sitecustomize.py
│   │   ├── remote_ingest.sh
│   │   ├── setup-git-merge-drivers.sh
│   │   ├── uv-lock-merge.sh
│   │   └── verify_hybrid_sync.py
│   ├── hooks
│   │   └── pre-commit
│   ├── installation
│   │   ├── install_linux_service.py
│   │   ├── install_macos_service.py
│   │   ├── install_uv.py
│   │   ├── install_windows_service.py
│   │   ├── install.py
│   │   ├── setup_backup_cron.sh
│   │   ├── setup_claude_mcp.sh
│   │   └── setup_cloudflare_resources.py
│   ├── linux
│   │   ├── service_status.sh
│   │   ├── start_service.sh
│   │   ├── stop_service.sh
│   │   ├── uninstall_service.sh
│   │   └── view_logs.sh
│   ├── maintenance
│   │   ├── assign_memory_types.py
│   │   ├── check_memory_types.py
│   │   ├── cleanup_corrupted_encoding.py
│   │   ├── cleanup_memories.py
│   │   ├── cleanup_organize.py
│   │   ├── consolidate_memory_types.py
│   │   ├── consolidation_mappings.json
│   │   ├── delete_orphaned_vectors_fixed.py
│   │   ├── fast_cleanup_duplicates_with_tracking.sh
│   │   ├── find_all_duplicates.py
│   │   ├── find_cloudflare_duplicates.py
│   │   ├── find_duplicates.py
│   │   ├── memory-types.md
│   │   ├── README.md
│   │   ├── recover_timestamps_from_cloudflare.py
│   │   ├── regenerate_embeddings.py
│   │   ├── repair_malformed_tags.py
│   │   ├── repair_memories.py
│   │   ├── repair_sqlite_vec_embeddings.py
│   │   ├── repair_zero_embeddings.py
│   │   ├── restore_from_json_export.py
│   │   └── scan_todos.sh
│   ├── migration
│   │   ├── cleanup_mcp_timestamps.py
│   │   ├── legacy
│   │   │   └── migrate_chroma_to_sqlite.py
│   │   ├── mcp-migration.py
│   │   ├── migrate_sqlite_vec_embeddings.py
│   │   ├── migrate_storage.py
│   │   ├── migrate_tags.py
│   │   ├── migrate_timestamps.py
│   │   ├── migrate_to_cloudflare.py
│   │   ├── migrate_to_sqlite_vec.py
│   │   ├── migrate_v5_enhanced.py
│   │   ├── TIMESTAMP_CLEANUP_README.md
│   │   └── verify_mcp_timestamps.py
│   ├── pr
│   │   ├── amp_collect_results.sh
│   │   ├── amp_detect_breaking_changes.sh
│   │   ├── amp_generate_tests.sh
│   │   ├── amp_pr_review.sh
│   │   ├── amp_quality_gate.sh
│   │   ├── amp_suggest_fixes.sh
│   │   ├── auto_review.sh
│   │   ├── detect_breaking_changes.sh
│   │   ├── generate_tests.sh
│   │   ├── lib
│   │   │   └── graphql_helpers.sh
│   │   ├── quality_gate.sh
│   │   ├── resolve_threads.sh
│   │   ├── run_pyscn_analysis.sh
│   │   ├── run_quality_checks.sh
│   │   ├── thread_status.sh
│   │   └── watch_reviews.sh
│   ├── quality
│   │   ├── fix_dead_code_install.sh
│   │   ├── phase1_dead_code_analysis.md
│   │   ├── phase2_complexity_analysis.md
│   │   ├── README_PHASE1.md
│   │   ├── README_PHASE2.md
│   │   ├── track_pyscn_metrics.sh
│   │   └── weekly_quality_review.sh
│   ├── README.md
│   ├── run
│   │   ├── run_mcp_memory.sh
│   │   ├── run-with-uv.sh
│   │   └── start_sqlite_vec.sh
│   ├── run_memory_server.py
│   ├── server
│   │   ├── check_http_server.py
│   │   ├── check_server_health.py
│   │   ├── memory_offline.py
│   │   ├── preload_models.py
│   │   ├── run_http_server.py
│   │   ├── run_memory_server.py
│   │   ├── start_http_server.bat
│   │   └── start_http_server.sh
│   ├── service
│   │   ├── deploy_dual_services.sh
│   │   ├── install_http_service.sh
│   │   ├── mcp-memory-http.service
│   │   ├── mcp-memory.service
│   │   ├── memory_service_manager.sh
│   │   ├── service_control.sh
│   │   ├── service_utils.py
│   │   └── update_service.sh
│   ├── sync
│   │   ├── check_drift.py
│   │   ├── claude_sync_commands.py
│   │   ├── export_memories.py
│   │   ├── import_memories.py
│   │   ├── litestream
│   │   │   ├── apply_local_changes.sh
│   │   │   ├── enhanced_memory_store.sh
│   │   │   ├── init_staging_db.sh
│   │   │   ├── io.litestream.replication.plist
│   │   │   ├── manual_sync.sh
│   │   │   ├── memory_sync.sh
│   │   │   ├── pull_remote_changes.sh
│   │   │   ├── push_to_remote.sh
│   │   │   ├── README.md
│   │   │   ├── resolve_conflicts.sh
│   │   │   ├── setup_local_litestream.sh
│   │   │   ├── setup_remote_litestream.sh
│   │   │   ├── staging_db_init.sql
│   │   │   ├── stash_local_changes.sh
│   │   │   ├── sync_from_remote_noconfig.sh
│   │   │   └── sync_from_remote.sh
│   │   ├── README.md
│   │   ├── safe_cloudflare_update.sh
│   │   ├── sync_memory_backends.py
│   │   └── sync_now.py
│   ├── testing
│   │   ├── run_complete_test.py
│   │   ├── run_memory_test.sh
│   │   ├── simple_test.py
│   │   ├── test_cleanup_logic.py
│   │   ├── test_cloudflare_backend.py
│   │   ├── test_docker_functionality.py
│   │   ├── test_installation.py
│   │   ├── test_mdns.py
│   │   ├── test_memory_api.py
│   │   ├── test_memory_simple.py
│   │   ├── test_migration.py
│   │   ├── test_search_api.py
│   │   ├── test_sqlite_vec_embeddings.py
│   │   ├── test_sse_events.py
│   │   ├── test-connection.py
│   │   └── test-hook.js
│   ├── utils
│   │   ├── claude_commands_utils.py
│   │   ├── generate_personalized_claude_md.sh
│   │   ├── groq
│   │   ├── groq_agent_bridge.py
│   │   ├── list-collections.py
│   │   ├── memory_wrapper_uv.py
│   │   ├── query_memories.py
│   │   ├── smithery_wrapper.py
│   │   ├── test_groq_bridge.sh
│   │   └── uv_wrapper.py
│   └── validation
│       ├── check_dev_setup.py
│       ├── check_documentation_links.py
│       ├── diagnose_backend_config.py
│       ├── validate_configuration_complete.py
│       ├── validate_memories.py
│       ├── validate_migration.py
│       ├── validate_timestamp_integrity.py
│       ├── verify_environment.py
│       ├── verify_pytorch_windows.py
│       └── verify_torch.py
├── SECURITY.md
├── selective_timestamp_recovery.py
├── SPONSORS.md
├── src
│   └── mcp_memory_service
│       ├── __init__.py
│       ├── api
│       │   ├── __init__.py
│       │   ├── client.py
│       │   ├── operations.py
│       │   ├── sync_wrapper.py
│       │   └── types.py
│       ├── backup
│       │   ├── __init__.py
│       │   └── scheduler.py
│       ├── cli
│       │   ├── __init__.py
│       │   ├── ingestion.py
│       │   ├── main.py
│       │   └── utils.py
│       ├── config.py
│       ├── consolidation
│       │   ├── __init__.py
│       │   ├── associations.py
│       │   ├── base.py
│       │   ├── clustering.py
│       │   ├── compression.py
│       │   ├── consolidator.py
│       │   ├── decay.py
│       │   ├── forgetting.py
│       │   ├── health.py
│       │   └── scheduler.py
│       ├── dependency_check.py
│       ├── discovery
│       │   ├── __init__.py
│       │   ├── client.py
│       │   └── mdns_service.py
│       ├── embeddings
│       │   ├── __init__.py
│       │   └── onnx_embeddings.py
│       ├── ingestion
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── chunker.py
│       │   ├── csv_loader.py
│       │   ├── json_loader.py
│       │   ├── pdf_loader.py
│       │   ├── registry.py
│       │   ├── semtools_loader.py
│       │   └── text_loader.py
│       ├── lm_studio_compat.py
│       ├── mcp_server.py
│       ├── models
│       │   ├── __init__.py
│       │   └── memory.py
│       ├── server.py
│       ├── services
│       │   ├── __init__.py
│       │   └── memory_service.py
│       ├── storage
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── cloudflare.py
│       │   ├── factory.py
│       │   ├── http_client.py
│       │   ├── hybrid.py
│       │   └── sqlite_vec.py
│       ├── sync
│       │   ├── __init__.py
│       │   ├── exporter.py
│       │   ├── importer.py
│       │   └── litestream_config.py
│       ├── utils
│       │   ├── __init__.py
│       │   ├── cache_manager.py
│       │   ├── content_splitter.py
│       │   ├── db_utils.py
│       │   ├── debug.py
│       │   ├── document_processing.py
│       │   ├── gpu_detection.py
│       │   ├── hashing.py
│       │   ├── http_server_manager.py
│       │   ├── port_detection.py
│       │   ├── system_detection.py
│       │   └── time_parser.py
│       └── web
│           ├── __init__.py
│           ├── api
│           │   ├── __init__.py
│           │   ├── analytics.py
│           │   ├── backup.py
│           │   ├── consolidation.py
│           │   ├── documents.py
│           │   ├── events.py
│           │   ├── health.py
│           │   ├── manage.py
│           │   ├── mcp.py
│           │   ├── memories.py
│           │   ├── search.py
│           │   └── sync.py
│           ├── app.py
│           ├── dependencies.py
│           ├── oauth
│           │   ├── __init__.py
│           │   ├── authorization.py
│           │   ├── discovery.py
│           │   ├── middleware.py
│           │   ├── models.py
│           │   ├── registration.py
│           │   └── storage.py
│           ├── sse.py
│           └── static
│               ├── app.js
│               ├── index.html
│               ├── README.md
│               ├── sse_test.html
│               └── style.css
├── start_http_debug.bat
├── start_http_server.sh
├── test_document.txt
├── test_version_checker.js
├── tests
│   ├── __init__.py
│   ├── api
│   │   ├── __init__.py
│   │   ├── test_compact_types.py
│   │   └── test_operations.py
│   ├── bridge
│   │   ├── mock_responses.js
│   │   ├── package-lock.json
│   │   ├── package.json
│   │   └── test_http_mcp_bridge.js
│   ├── conftest.py
│   ├── consolidation
│   │   ├── __init__.py
│   │   ├── conftest.py
│   │   ├── test_associations.py
│   │   ├── test_clustering.py
│   │   ├── test_compression.py
│   │   ├── test_consolidator.py
│   │   ├── test_decay.py
│   │   └── test_forgetting.py
│   ├── contracts
│   │   └── api-specification.yml
│   ├── integration
│   │   ├── package-lock.json
│   │   ├── package.json
│   │   ├── test_api_key_fallback.py
│   │   ├── test_api_memories_chronological.py
│   │   ├── test_api_tag_time_search.py
│   │   ├── test_api_with_memory_service.py
│   │   ├── test_bridge_integration.js
│   │   ├── test_cli_interfaces.py
│   │   ├── test_cloudflare_connection.py
│   │   ├── test_concurrent_clients.py
│   │   ├── test_data_serialization_consistency.py
│   │   ├── test_http_server_startup.py
│   │   ├── test_mcp_memory.py
│   │   ├── test_mdns_integration.py
│   │   ├── test_oauth_basic_auth.py
│   │   ├── test_oauth_flow.py
│   │   ├── test_server_handlers.py
│   │   └── test_store_memory.py
│   ├── performance
│   │   ├── test_background_sync.py
│   │   └── test_hybrid_live.py
│   ├── README.md
│   ├── smithery
│   │   └── test_smithery.py
│   ├── sqlite
│   │   └── simple_sqlite_vec_test.py
│   ├── test_client.py
│   ├── test_content_splitting.py
│   ├── test_database.py
│   ├── test_hybrid_cloudflare_limits.py
│   ├── test_hybrid_storage.py
│   ├── test_memory_ops.py
│   ├── test_semantic_search.py
│   ├── test_sqlite_vec_storage.py
│   ├── test_time_parser.py
│   ├── test_timestamp_preservation.py
│   ├── timestamp
│   │   ├── test_hook_vs_manual_storage.py
│   │   ├── test_issue99_final_validation.py
│   │   ├── test_search_retrieval_inconsistency.py
│   │   ├── test_timestamp_issue.py
│   │   └── test_timestamp_simple.py
│   └── unit
│       ├── conftest.py
│       ├── test_cloudflare_storage.py
│       ├── test_csv_loader.py
│       ├── test_fastapi_dependencies.py
│       ├── test_import.py
│       ├── test_json_loader.py
│       ├── test_mdns_simple.py
│       ├── test_mdns.py
│       ├── test_memory_service.py
│       ├── test_memory.py
│       ├── test_semtools_loader.py
│       ├── test_storage_interface_compatibility.py
│       └── test_tag_time_filtering.py
├── tools
│   ├── docker
│   │   ├── DEPRECATED.md
│   │   ├── docker-compose.http.yml
│   │   ├── docker-compose.pythonpath.yml
│   │   ├── docker-compose.standalone.yml
│   │   ├── docker-compose.uv.yml
│   │   ├── docker-compose.yml
│   │   ├── docker-entrypoint-persistent.sh
│   │   ├── docker-entrypoint-unified.sh
│   │   ├── docker-entrypoint.sh
│   │   ├── Dockerfile
│   │   ├── Dockerfile.glama
│   │   ├── Dockerfile.slim
│   │   ├── README.md
│   │   └── test-docker-modes.sh
│   └── README.md
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/archive/docs-removed-2025-08-23/master-guide.md:
--------------------------------------------------------------------------------

```markdown
  1 | # MCP Memory Service - Installation Guide
  2 | 
  3 | **Version**: 0.2.2+  
  4 | **Last Updated**: 2025-07-26  
  5 | **Supports**: ChromaDB + SQLite-vec backends, HTTP/SSE API
  6 | 
  7 | ## Prerequisites
  8 | 
  9 | - Python 3.10 or newer
 10 | - pip (latest version recommended)
 11 | - A virtual environment (venv or conda)
 12 | - Git (to clone the repository)
 13 | 
 14 | ## Quick Installation by Hardware Type
 15 | 
 16 | ### 🖥️ Legacy Hardware (2013-2017 Intel Macs)
 17 | **Best for**: 2015 MacBook Pro, older Intel Macs without GPU
 18 | 
 19 | ```bash
 20 | git clone https://github.com/doobidoo/mcp-memory-service.git
 21 | cd mcp-memory-service
 22 | python install.py --legacy-hardware
 23 | ```
 24 | 
 25 | **What this does:**
 26 | - ✅ Detects older Intel Mac hardware
 27 | - ✅ Recommends SQLite-vec backend (lightweight)
 28 | - ✅ Uses Homebrew PyTorch if available (better compatibility)
 29 | - ✅ Configures ONNX runtime for CPU-only inference
 30 | - ✅ Optimizes for limited memory systems
 31 | 
 32 | ### 🚀 Modern Hardware (2018+ Macs, GPU-enabled systems)
 33 | **Best for**: M1/M2/M3 Macs, modern Intel systems, Windows with GPU
 34 | 
 35 | ```bash
 36 | git clone https://github.com/doobidoo/mcp-memory-service.git
 37 | cd mcp-memory-service
 38 | python install.py
 39 | ```
 40 | 
 41 | **What this does:**
 42 | - ✅ Auto-detects available hardware acceleration
 43 | - ✅ Recommends ChromaDB for full features
 44 | - ✅ Configures GPU acceleration (CUDA/MPS/DirectML)
 45 | - ✅ Installs latest PyTorch and sentence-transformers
 46 | 
 47 | ### 🌐 Server/Headless Installation
 48 | **Best for**: Linux servers, Docker deployments, CI/CD
 49 | 
 50 | ```bash
 51 | git clone https://github.com/doobidoo/mcp-memory-service.git
 52 | cd mcp-memory-service
 53 | python install.py --server-mode --storage-backend sqlite_vec
 54 | ```
 55 | 
 56 | ## Standard Installation Steps
 57 | 
 58 | ### 1. Clone and Setup Environment
 59 | 
 60 | ```bash
 61 | git clone https://github.com/doobidoo/mcp-memory-service.git
 62 | cd mcp-memory-service
 63 | 
 64 | # Create and activate virtual environment
 65 | python -m venv venv
 66 | 
 67 | # On Windows
 68 | venv\Scripts\activate
 69 | 
 70 | # On macOS/Linux
 71 | source venv/bin/activate
 72 | ```
 73 | 
 74 | ### 2. Run Installation
 75 | 
 76 | ```bash
 77 | python install.py
 78 | ```
 79 | 
 80 | 🌟 **Multi-Client Setup**: The installer will automatically detect MCP applications (Claude Desktop, VS Code, Continue, etc.) and offer to configure shared memory access. Choose 'y' for universal multi-client setup.
 81 | 
 82 | ### 3. Verify Installation
 83 | 
 84 | ```bash
 85 | python scripts/verify_environment.py
 86 | ```
 87 | 
 88 | ## Docker Installation
 89 | 
 90 | For cross-platform deployment:
 91 | 
 92 | ```bash
 93 | # Clone repository
 94 | git clone https://github.com/doobidoo/mcp-memory-service.git
 95 | cd mcp-memory-service
 96 | 
 97 | # Build and run with Docker Compose
 98 | docker-compose up -d
 99 | ```
100 | 
101 | ## Command Line Options
102 | 
103 | ### Basic Usage
104 | ```bash
105 | python install.py [OPTIONS]
106 | ```
107 | 
108 | ### Core Options
109 | 
110 | | Option | Description | Example |
111 | |--------|-------------|---------|
112 | | `--dev` | Install in development mode | `python install.py --dev` |
113 | | `--chroma-path PATH` | Custom ChromaDB storage path | `python install.py --chroma-path /custom/path` |
114 | | `--backups-path PATH` | Custom backup storage path | `python install.py --backups-path /custom/backups` |
115 | 
116 | ### Storage Backend Options
117 | 
118 | | Backend | Description | Best For |
119 | |---------|-------------|----------|
120 | | `chromadb` | Full-featured vector database | High-memory systems, full features |
121 | | `sqlite_vec` | Lightweight alternative | Resource-constrained systems |
122 | | `auto_detect` | Auto-selection with fallback | Uncertain hardware capabilities |
123 | 
124 | ```bash
125 | # Force SQLite-vec backend
126 | python install.py --storage-backend sqlite_vec
127 | 
128 | # Force ChromaDB backend  
129 | python install.py --storage-backend chromadb
130 | 
131 | # Auto-detection with fallback
132 | python install.py --storage-backend auto_detect
133 | ```
134 | 
135 | ### Hardware-Specific Options
136 | 
137 | | Option | Description | Use Case |
138 | |--------|-------------|----------|
139 | | `--legacy-hardware` | Optimize for older systems | 2013-2017 Intel Macs |
140 | | `--server-mode` | Headless server installation | Linux servers, Docker |
141 | | `--force-cpu` | Disable GPU acceleration | Troubleshooting GPU issues |
142 | 
143 | ### Multi-Client Options
144 | 
145 | | Option | Description | Example |
146 | |--------|-------------|---------|
147 | | `--multi-client` | Enable shared memory access | `python install.py --multi-client` |
148 | | `--claude-only` | Configure for Claude Desktop only | `python install.py --claude-only` |
149 | 
150 | ### Claude Code Integration (v2.2.0)
151 | 
152 | | Option | Description | Example |
153 | |--------|-------------|---------|
154 | | `--install-claude-commands` | Install conversational memory commands | `python install.py --install-claude-commands` |
155 | | `--skip-claude-commands-prompt` | Skip interactive commands installation prompt | `python install.py --skip-claude-commands-prompt` |
156 | 
157 | ## Platform-Specific Installation
158 | 
159 | - **Windows**: See [windows-setup.md](../platforms/windows.md)
160 | - **Ubuntu/Linux**: See [ubuntu-setup.md](../platforms/ubuntu.md)
161 | - **macOS Intel (Legacy)**: See [macos-intel.md](../platforms/macos-intel.md)
162 | 
163 | ## Troubleshooting
164 | 
165 | Common installation issues and solutions can be found in [troubleshooting.md](../troubleshooting/general.md).
166 | 
167 | ## Next Steps
168 | 
169 | After installation:
170 | 1. Configure your MCP client (Claude Desktop, VS Code, etc.)
171 | 2. Test the connection with `python scripts/test-connection.py`
172 | 3. Read the [User Guide](../guides/claude_integration.md) for usage instructions
```

--------------------------------------------------------------------------------
/scripts/development/remote_ingest.sh:
--------------------------------------------------------------------------------

```bash
  1 | #!/bin/bash
  2 | # Remote Document Ingestion Script for MCP Memory Service
  3 | # This script uploads and ingests documents on the remote server
  4 | 
  5 | set -e
  6 | 
  7 | # Configuration
  8 | REMOTE_HOST="${REMOTE_HOST:-10.0.1.30}"
  9 | REMOTE_USER="${REMOTE_USER:-hkr}"
 10 | # Auto-detect the mcp-memory-service repository location
 11 | REMOTE_PATH=$(ssh ${REMOTE_USER}@${REMOTE_HOST} "find /home/${REMOTE_USER} -iname 'mcp-memory-service' -type d -exec test -f {}/pyproject.toml \; -print 2>/dev/null | head -n1")
 12 | REMOTE_PATH="${REMOTE_PATH:-/home/${REMOTE_USER}/repositories/mcp-memory-service}"
 13 | 
 14 | # Colors for output
 15 | RED='\033[0;31m'
 16 | GREEN='\033[0;32m'
 17 | YELLOW='\033[1;33m'
 18 | BLUE='\033[0;34m'
 19 | NC='\033[0m' # No Color
 20 | 
 21 | # Function to print colored output
 22 | print_info() {
 23 |     echo -e "${BLUE}ℹ️  $1${NC}"
 24 | }
 25 | 
 26 | print_success() {
 27 |     echo -e "${GREEN}✅ $1${NC}"
 28 | }
 29 | 
 30 | print_error() {
 31 |     echo -e "${RED}❌ $1${NC}"
 32 | }
 33 | 
 34 | print_warning() {
 35 |     echo -e "${YELLOW}⚠️  $1${NC}"
 36 | }
 37 | 
 38 | # Function to show usage
 39 | usage() {
 40 |     cat << EOF
 41 | Usage: $0 [OPTIONS] <file_or_directory>
 42 | 
 43 | Remote document ingestion for MCP Memory Service
 44 | 
 45 | Options:
 46 |     -t, --tags TAGS         Comma-separated tags to apply (default: "documentation")
 47 |     -c, --chunk-size SIZE   Chunk size in characters (default: 800)
 48 |     -r, --recursive         Process directories recursively
 49 |     -e, --extensions EXTS   File extensions to process (default: all supported)
 50 |     -h, --host HOST         Remote host (default: 10.0.1.30)
 51 |     -u, --user USER         Remote user (default: hkr)
 52 |     --help                  Show this help message
 53 | 
 54 | Examples:
 55 |     # Ingest a single file
 56 |     $0 README.md
 57 | 
 58 |     # Ingest with custom tags
 59 |     $0 -t "documentation,important" CLAUDE.md
 60 | 
 61 |     # Ingest entire directory
 62 |     $0 -r docs/
 63 | 
 64 |     # Ingest specific file types
 65 |     $0 -r -e "md,txt" docs/
 66 | 
 67 | EOF
 68 |     exit 0
 69 | }
 70 | 
 71 | # Parse command line arguments
 72 | TAGS="documentation"
 73 | CHUNK_SIZE="800"
 74 | RECURSIVE=""
 75 | EXTENSIONS=""
 76 | FILES=()
 77 | 
 78 | while [[ $# -gt 0 ]]; do
 79 |     case $1 in
 80 |         -t|--tags)
 81 |             TAGS="$2"
 82 |             shift 2
 83 |             ;;
 84 |         -c|--chunk-size)
 85 |             CHUNK_SIZE="$2"
 86 |             shift 2
 87 |             ;;
 88 |         -r|--recursive)
 89 |             RECURSIVE="--recursive"
 90 |             shift
 91 |             ;;
 92 |         -e|--extensions)
 93 |             EXTENSIONS="--extensions $2"
 94 |             shift 2
 95 |             ;;
 96 |         -h|--host)
 97 |             REMOTE_HOST="$2"
 98 |             shift 2
 99 |             ;;
100 |         -u|--user)
101 |             REMOTE_USER="$2"
102 |             shift 2
103 |             ;;
104 |         --help)
105 |             usage
106 |             ;;
107 |         *)
108 |             FILES+=("$1")
109 |             shift
110 |             ;;
111 |     esac
112 | done
113 | 
114 | # Check if files were provided
115 | if [ ${#FILES[@]} -eq 0 ]; then
116 |     print_error "No files or directories specified"
117 |     usage
118 | fi
119 | 
120 | # Process each file/directory
121 | for item in "${FILES[@]}"; do
122 |     if [ ! -e "$item" ]; then
123 |         print_error "File or directory not found: $item"
124 |         continue
125 |     fi
126 |     
127 |     # Get absolute path
128 |     ITEM_PATH=$(realpath "$item")
129 |     ITEM_NAME=$(basename "$item")
130 |     
131 |     print_info "Processing: $ITEM_NAME"
132 |     
133 |     if [ -f "$item" ]; then
134 |         # Single file ingestion
135 |         print_info "Uploading file to remote server..."
136 |         
137 |         # Create temp directory on remote
138 |         REMOTE_TEMP=$(ssh ${REMOTE_USER}@${REMOTE_HOST} "mktemp -d")
139 |         
140 |         # Upload file
141 |         scp -q "$ITEM_PATH" ${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_TEMP}/
142 |         
143 |         # Run ingestion on remote
144 |         print_info "Running remote ingestion..."
145 |         ssh ${REMOTE_USER}@${REMOTE_HOST} "cd \"${REMOTE_PATH}\" && \
146 |             .venv/bin/python -m mcp_memory_service.cli.main ingest-document \
147 |             ${REMOTE_TEMP}/${ITEM_NAME} \
148 |             --tags '${TAGS}' \
149 |             --chunk-size ${CHUNK_SIZE} \
150 |             --verbose 2>&1 | grep -E '✅|📄|💾|⚡|⏱️'"
151 |         
152 |         # Cleanup
153 |         ssh ${REMOTE_USER}@${REMOTE_HOST} "rm -rf ${REMOTE_TEMP}"
154 |         
155 |         print_success "Completed ingestion of $ITEM_NAME"
156 |         
157 |     elif [ -d "$item" ]; then
158 |         # Directory ingestion
159 |         print_info "Uploading directory to remote server..."
160 |         
161 |         # Create temp directory on remote
162 |         REMOTE_TEMP=$(ssh ${REMOTE_USER}@${REMOTE_HOST} "mktemp -d")
163 |         
164 |         # Upload directory
165 |         scp -rq "$ITEM_PATH" ${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_TEMP}/
166 |         
167 |         # Run ingestion on remote
168 |         print_info "Running remote directory ingestion..."
169 |         ssh ${REMOTE_USER}@${REMOTE_HOST} "cd \"${REMOTE_PATH}\" && \
170 |             .venv/bin/python -m mcp_memory_service.cli.main ingest-directory \
171 |             ${REMOTE_TEMP}/${ITEM_NAME} \
172 |             --tags '${TAGS}' \
173 |             --chunk-size ${CHUNK_SIZE} \
174 |             ${RECURSIVE} \
175 |             ${EXTENSIONS} \
176 |             --verbose 2>&1 | grep -E '✅|📁|📄|💾|⚡|⏱️|❌'"
177 |         
178 |         # Cleanup
179 |         ssh ${REMOTE_USER}@${REMOTE_HOST} "rm -rf ${REMOTE_TEMP}"
180 |         
181 |         print_success "Completed ingestion of directory $ITEM_NAME"
182 |     fi
183 | done
184 | 
185 | print_success "Remote ingestion complete!"
186 | print_info "View memories at: https://${REMOTE_HOST}:8443/"
```

--------------------------------------------------------------------------------
/scripts/sync/sync_now.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | """
  3 | Manual sync utility for Hybrid Storage Backend.
  4 | Triggers an immediate sync from SQLite-vec to Cloudflare.
  5 | 
  6 | Usage:
  7 |     python sync_now.py [--db-path PATH] [--verbose]
  8 | 
  9 | Environment Variables:
 10 |     MCP_MEMORY_SQLITE_PATH: Override default database path
 11 | """
 12 | import asyncio
 13 | import argparse
 14 | import logging
 15 | import os
 16 | import sys
 17 | from pathlib import Path
 18 | from typing import TypedDict
 19 | 
 20 | try:
 21 |     from dotenv import load_dotenv
 22 |     from mcp_memory_service.storage.factory import create_storage_instance
 23 |     from mcp_memory_service.storage.hybrid import HybridMemoryStorage
 24 |     from mcp_memory_service.config import SQLITE_VEC_PATH
 25 | except ImportError as e:
 26 |     print(f"❌ Import error: {e}", file=sys.stderr)
 27 |     print("Make sure the package is installed: pip install -e .", file=sys.stderr)
 28 |     sys.exit(1)
 29 | 
 30 | # Configure logging
 31 | logging.basicConfig(
 32 |     level=logging.INFO,
 33 |     format='%(message)s'
 34 | )
 35 | logger = logging.getLogger(__name__)
 36 | 
 37 | 
 38 | class SyncResult(TypedDict, total=False):
 39 |     """Type-safe structure for sync operation results."""
 40 |     status: str
 41 |     synced_to_secondary: int
 42 |     duration: float
 43 |     failed: int
 44 |     error: str
 45 | 
 46 | 
 47 | class SyncStatus(TypedDict, total=False):
 48 |     """Type-safe structure for sync status information."""
 49 |     queue_size: int
 50 |     cloudflare_available: bool
 51 |     failed_operations: int
 52 | 
 53 | 
 54 | async def main(db_path: str | None = None, verbose: bool = False) -> int:
 55 |     """
 56 |     Run immediate sync.
 57 | 
 58 |     Args:
 59 |         db_path: Optional path to SQLite database. If not provided,
 60 |                 uses MCP_MEMORY_SQLITE_PATH env var or default config.
 61 |         verbose: Enable verbose error reporting with full tracebacks.
 62 | 
 63 |     Returns:
 64 |         0 on success, 1 on failure
 65 |     """
 66 |     # Load environment variables
 67 |     load_dotenv()
 68 | 
 69 |     logger.info("🔄 Starting manual sync...")
 70 | 
 71 |     # Determine database path
 72 |     sqlite_path = Path(db_path or os.getenv('MCP_MEMORY_SQLITE_PATH') or SQLITE_VEC_PATH)
 73 | 
 74 |     if not sqlite_path.exists():
 75 |         logger.error(f"❌ Database not found: {sqlite_path}")
 76 |         return 1
 77 | 
 78 |     logger.info(f"📁 Using database: {sqlite_path}")
 79 | 
 80 |     # Create storage instance
 81 |     try:
 82 |         storage = await create_storage_instance(str(sqlite_path))
 83 |     except (ValueError, RuntimeError, FileNotFoundError, OSError) as e:
 84 |         logger.error(f"❌ Failed to create storage instance: {e}")
 85 |         if verbose:
 86 |             logger.exception("Full traceback for failed storage instance creation:")
 87 |         return 1
 88 | 
 89 |     # Type-safe check for hybrid storage
 90 |     if not isinstance(storage, HybridMemoryStorage):
 91 |         logger.error("❌ Not a hybrid backend - sync not available")
 92 |         logger.error(f"   Found: {storage.__class__.__name__}")
 93 |         return 1
 94 | 
 95 |     # Get sync status before
 96 |     try:
 97 |         status_before = await storage.get_sync_status()
 98 |         logger.info(f"📊 Before sync:")
 99 |         logger.info(f"   Queue size: {status_before['queue_size']}")
100 |         logger.info(f"   Cloudflare available: {status_before['cloudflare_available']}")
101 |     except Exception as e:
102 |         logger.warning(f"⚠️  Could not get sync status: {e}")
103 | 
104 |     # Trigger immediate sync
105 |     logger.info("\n⏳ Triggering sync...")
106 |     try:
107 |         result = await storage.force_sync()
108 | 
109 |         # Check sync status
110 |         if result.get('status') != 'completed':
111 |             logger.error(f"❌ Sync failed with status: {result.get('status')}")
112 |             if result.get('error'):
113 |                 logger.error(f"   Error: {result.get('error')}")
114 |             return 1
115 | 
116 |         logger.info("✅ Sync completed successfully!")
117 |         logger.info(f"   Synced: {result.get('synced_to_secondary', 0)} operations")
118 |         logger.info(f"   Duration: {result.get('duration', 0):.2f}s")
119 | 
120 |         # Report any failed operations
121 |         if result.get('failed', 0) > 0:
122 |             logger.warning(f"   ⚠️  Failed operations: {result.get('failed', 0)}")
123 |     except Exception as e:
124 |         logger.error(f"❌ Sync failed: {e}")
125 |         if verbose:
126 |             logger.exception("Full traceback for sync failure:")
127 |         return 1
128 | 
129 |     # Get sync status after
130 |     try:
131 |         status_after = await storage.get_sync_status()
132 |         logger.info(f"\n📊 After sync:")
133 |         logger.info(f"   Queue size: {status_after['queue_size']}")
134 |         logger.info(f"   Failed operations: {status_after['failed_operations']}")
135 |     except Exception as e:
136 |         logger.warning(f"⚠️  Could not get final sync status: {e}")
137 | 
138 |     return 0
139 | 
140 | 
141 | def parse_args():
142 |     """Parse command line arguments."""
143 |     parser = argparse.ArgumentParser(
144 |         description="Manual sync utility for Hybrid Storage Backend"
145 |     )
146 |     parser.add_argument(
147 |         '--db-path',
148 |         type=str,
149 |         help='Path to SQLite database (default: from config or env)'
150 |     )
151 |     parser.add_argument(
152 |         '--verbose', '-v',
153 |         action='store_true',
154 |         help='Enable verbose error reporting with full tracebacks'
155 |     )
156 |     return parser.parse_args()
157 | 
158 | 
159 | if __name__ == "__main__":
160 |     args = parse_args()
161 |     exit_code = asyncio.run(main(db_path=args.db_path, verbose=args.verbose))
162 |     sys.exit(exit_code)
163 | 
```

--------------------------------------------------------------------------------
/scripts/sync/litestream/memory_sync.sh:
--------------------------------------------------------------------------------

```bash
  1 | #!/bin/bash
  2 | # Main memory synchronization orchestrator
  3 | # Implements Git-like workflow: stash → pull → apply → push
  4 | 
  5 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
  6 | STAGING_DB="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec_staging.db"
  7 | 
  8 | # Colors for output
  9 | RED='\033[0;31m'
 10 | GREEN='\033[0;32m'
 11 | YELLOW='\033[1;33m'
 12 | BLUE='\033[0;34m'
 13 | NC='\033[0m' # No Color
 14 | 
 15 | print_header() {
 16 |     echo -e "${BLUE}=== $1 ===${NC}"
 17 | }
 18 | 
 19 | print_success() {
 20 |     echo -e "${GREEN}✓ $1${NC}"
 21 | }
 22 | 
 23 | print_warning() {
 24 |     echo -e "${YELLOW}⚠ $1${NC}"
 25 | }
 26 | 
 27 | print_error() {
 28 |     echo -e "${RED}✗ $1${NC}"
 29 | }
 30 | 
 31 | show_status() {
 32 |     print_header "Memory Sync Status"
 33 |     
 34 |     if [ ! -f "$STAGING_DB" ]; then
 35 |         echo "Staging database not initialized"
 36 |         return
 37 |     fi
 38 |     
 39 |     STAGED_COUNT=$(sqlite3 "$STAGING_DB" "SELECT COUNT(*) FROM staged_memories WHERE conflict_status = 'none';" 2>/dev/null || echo "0")
 40 |     CONFLICT_COUNT=$(sqlite3 "$STAGING_DB" "SELECT COUNT(*) FROM staged_memories WHERE conflict_status = 'detected';" 2>/dev/null || echo "0")
 41 |     FAILED_COUNT=$(sqlite3 "$STAGING_DB" "SELECT COUNT(*) FROM staged_memories WHERE conflict_status = 'push_failed';" 2>/dev/null || echo "0")
 42 |     
 43 |     LAST_REMOTE_SYNC=$(sqlite3 "$STAGING_DB" "SELECT value FROM sync_status WHERE key = 'last_remote_sync';" 2>/dev/null || echo "Never")
 44 |     LAST_PUSH=$(sqlite3 "$STAGING_DB" "SELECT value FROM sync_status WHERE key = 'last_push_attempt';" 2>/dev/null || echo "Never")
 45 |     
 46 |     echo "Staged changes ready: $STAGED_COUNT"
 47 |     echo "Conflicts detected: $CONFLICT_COUNT"
 48 |     echo "Failed pushes: $FAILED_COUNT"
 49 |     echo "Last remote sync: $LAST_REMOTE_SYNC"
 50 |     echo "Last push attempt: $LAST_PUSH"
 51 | }
 52 | 
 53 | full_sync() {
 54 |     print_header "Starting Full Memory Sync"
 55 |     
 56 |     # Step 1: Stash local changes
 57 |     print_header "Step 1: Stashing Local Changes"
 58 |     if ! "$SCRIPT_DIR/stash_local_changes.sh"; then
 59 |         print_error "Failed to stash local changes"
 60 |         return 1
 61 |     fi
 62 |     print_success "Local changes stashed"
 63 |     
 64 |     # Step 2: Pull remote changes  
 65 |     print_header "Step 2: Pulling Remote Changes"
 66 |     if ! "$SCRIPT_DIR/pull_remote_changes.sh"; then
 67 |         print_error "Failed to pull remote changes"
 68 |         return 1
 69 |     fi
 70 |     print_success "Remote changes pulled"
 71 |     
 72 |     # Step 3: Apply staged changes
 73 |     print_header "Step 3: Applying Staged Changes"
 74 |     if ! "$SCRIPT_DIR/apply_local_changes.sh"; then
 75 |         print_warning "Some issues applying staged changes (check output above)"
 76 |     else
 77 |         print_success "Staged changes applied"
 78 |     fi
 79 |     
 80 |     # Step 4: Push remaining changes to remote
 81 |     print_header "Step 4: Pushing to Remote API"
 82 |     if ! "$SCRIPT_DIR/push_to_remote.sh"; then
 83 |         print_warning "Some issues pushing to remote (check output above)"
 84 |     else
 85 |         print_success "Changes pushed to remote"
 86 |     fi
 87 |     
 88 |     print_header "Full Sync Completed"
 89 |     show_status
 90 | }
 91 | 
 92 | quick_push() {
 93 |     print_header "Quick Push to Remote"
 94 |     
 95 |     if ! "$SCRIPT_DIR/push_to_remote.sh"; then
 96 |         print_error "Push failed"
 97 |         return 1
 98 |     fi
 99 |     
100 |     print_success "Quick push completed"
101 |     show_status
102 | }
103 | 
104 | quick_pull() {
105 |     print_header "Quick Pull from Remote"
106 |     
107 |     if ! "$SCRIPT_DIR/pull_remote_changes.sh"; then
108 |         print_error "Pull failed"
109 |         return 1
110 |     fi
111 |     
112 |     print_success "Quick pull completed" 
113 |     show_status
114 | }
115 | 
116 | show_help() {
117 |     echo "Memory Sync Tool - Git-like workflow for MCP Memory Service"
118 |     echo ""
119 |     echo "Usage: $0 [command]"
120 |     echo ""
121 |     echo "Commands:"
122 |     echo "  sync, full     - Full synchronization (stash → pull → apply → push)"
123 |     echo "  status, st     - Show current sync status"
124 |     echo "  push           - Push staged changes to remote API"
125 |     echo "  pull           - Pull latest changes from remote"
126 |     echo "  stash          - Stash local changes to staging area"
127 |     echo "  apply          - Apply staged changes to local database"
128 |     echo "  init           - Initialize staging database"
129 |     echo "  help, -h       - Show this help message"
130 |     echo ""
131 |     echo "Examples:"
132 |     echo "  $0 sync        # Full synchronization workflow"
133 |     echo "  $0 status      # Check sync status"
134 |     echo "  $0 push        # Push staged changes only"
135 |     echo ""
136 |     echo "Environment Variables:"
137 |     echo "  MCP_API_KEY    - API key for remote server authentication"
138 | }
139 | 
140 | # Main command handling
141 | case "${1:-status}" in
142 |     "sync"|"full")
143 |         full_sync
144 |         ;;
145 |     "status"|"st")
146 |         show_status
147 |         ;;
148 |     "push")
149 |         quick_push
150 |         ;;
151 |     "pull")
152 |         quick_pull
153 |         ;;
154 |     "stash")
155 |         print_header "Stashing Changes"
156 |         "$SCRIPT_DIR/stash_local_changes.sh"
157 |         ;;
158 |     "apply")
159 |         print_header "Applying Changes"
160 |         "$SCRIPT_DIR/apply_local_changes.sh"
161 |         ;;
162 |     "init")
163 |         print_header "Initializing Staging Database"
164 |         "$SCRIPT_DIR/init_staging_db.sh"
165 |         ;;
166 |     "help"|"-h"|"--help")
167 |         show_help
168 |         ;;
169 |     *)
170 |         echo "Unknown command: $1"
171 |         echo "Use '$0 help' for usage information"
172 |         exit 1
173 |         ;;
174 | esac
```

--------------------------------------------------------------------------------
/scripts/database/check_sqlite_vec_status.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | """
  3 | Check the status of SQLite-vec database and identify issues.
  4 | """
  5 | 
  6 | import sqlite3
  7 | import sys
  8 | import os
  9 | 
 10 | def check_sqlite_vec_status(db_path):
 11 |     """Check the status of the SQLite-vec database."""
 12 |     print(f"Checking SQLite-vec database: {db_path}")
 13 |     print("="*60)
 14 |     
 15 |     if not os.path.exists(db_path):
 16 |         print(f"❌ Database not found: {db_path}")
 17 |         return False
 18 |         
 19 |     conn = sqlite3.connect(db_path)
 20 |     cursor = conn.cursor()
 21 |     
 22 |     issues_found = []
 23 |     
 24 |     try:
 25 |         # Check basic tables
 26 |         cursor.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
 27 |         tables = [row[0] for row in cursor.fetchall()]
 28 |         
 29 |         print(f"Tables: {', '.join(tables)}")
 30 |         
 31 |         if 'memories' not in tables:
 32 |             issues_found.append("Missing 'memories' table")
 33 |         else:
 34 |             cursor.execute("SELECT COUNT(*) FROM memories")
 35 |             memory_count = cursor.fetchone()[0]
 36 |             print(f"📝 Memories: {memory_count}")
 37 |             
 38 |         if 'memory_embeddings' not in tables:
 39 |             issues_found.append("Missing 'memory_embeddings' table")
 40 |         else:
 41 |             # Try to access the embeddings table
 42 |             try:
 43 |                 cursor.execute("SELECT COUNT(*) FROM memory_embeddings")
 44 |                 embedding_count = cursor.fetchone()[0]
 45 |                 print(f"🧠 Embeddings: {embedding_count}")
 46 |                 
 47 |                 # Check if counts match
 48 |                 if 'memories' in tables:
 49 |                     if memory_count != embedding_count:
 50 |                         issues_found.append(f"Count mismatch: {memory_count} memories vs {embedding_count} embeddings")
 51 |                         
 52 |             except Exception as e:
 53 |                 if "no such module: vec0" in str(e):
 54 |                     issues_found.append("sqlite-vec extension not loaded - cannot access embeddings")
 55 |                 else:
 56 |                     issues_found.append(f"Cannot access embeddings table: {e}")
 57 |                     
 58 |         # Check if extension loading is possible
 59 |         try:
 60 |             conn.enable_load_extension(True)
 61 |             extension_support = True
 62 |         except:
 63 |             extension_support = False
 64 |             issues_found.append("Extension loading not supported")
 65 |             
 66 |         print(f"Extension loading: {'✅ Supported' if extension_support else '❌ Not supported'}")
 67 |         
 68 |         # Try to load sqlite-vec
 69 |         if extension_support:
 70 |             try:
 71 |                 # This will fail if sqlite-vec is not installed
 72 |                 import sqlite_vec
 73 |                 sqlite_vec.load(conn)
 74 |                 print("✅ sqlite-vec extension loaded successfully")
 75 |                 
 76 |                 # Now try to access embeddings
 77 |                 try:
 78 |                     cursor.execute("SELECT COUNT(*) FROM memory_embeddings")
 79 |                     embedding_count = cursor.fetchone()[0]
 80 |                     print(f"✅ Can now access embeddings: {embedding_count}")
 81 |                     
 82 |                     # Test a simple search
 83 |                     if embedding_count > 0:
 84 |                         cursor.execute("SELECT * FROM memory_embeddings LIMIT 1")
 85 |                         row = cursor.fetchone()
 86 |                         print("✅ Embedding data accessible")
 87 |                     
 88 |                 except Exception as e:
 89 |                     issues_found.append(f"Still cannot access embeddings after loading extension: {e}")
 90 |                     
 91 |             except ImportError:
 92 |                 issues_found.append("sqlite-vec Python module not installed")
 93 |             except Exception as e:
 94 |                 issues_found.append(f"Failed to load sqlite-vec extension: {e}")
 95 |                 
 96 |     except Exception as e:
 97 |         issues_found.append(f"Database error: {e}")
 98 |         
 99 |     finally:
100 |         conn.close()
101 |         
102 |     print("\n" + "="*60)
103 |     if issues_found:
104 |         print("⚠️  Issues Found:")
105 |         for i, issue in enumerate(issues_found, 1):
106 |             print(f"  {i}. {issue}")
107 |             
108 |         print("\nRecommendations:")
109 |         if "sqlite-vec Python module not installed" in str(issues_found):
110 |             print("  • Install sqlite-vec: uv pip install sqlite-vec")
111 |         if "sentence-transformers" in str(issues_found) or "embedding" in str(issues_found).lower():
112 |             print("  • Install sentence-transformers: uv pip install sentence-transformers torch")
113 |         if "Count mismatch" in str(issues_found):
114 |             print("  • Run repair script to regenerate missing embeddings")
115 |         if "cannot access embeddings" in str(issues_found).lower():
116 |             print("  • Database may need migration to fix schema issues")
117 |             
118 |     else:
119 |         print("✅ No issues found - database appears healthy!")
120 |         
121 |     return len(issues_found) == 0
122 | 
123 | def main():
124 |     if len(sys.argv) != 2:
125 |         print("Usage: python check_sqlite_vec_status.py <database_path>")
126 |         sys.exit(1)
127 |         
128 |     db_path = sys.argv[1]
129 |     healthy = check_sqlite_vec_status(db_path)
130 |     sys.exit(0 if healthy else 1)
131 | 
132 | if __name__ == "__main__":
133 |     main()
```

--------------------------------------------------------------------------------
/docs/IMPLEMENTATION_PLAN_HTTP_SSE.md:
--------------------------------------------------------------------------------

```markdown
  1 | # HTTP/SSE + SQLite-vec Implementation Plan
  2 | 
  3 | **Date**: 2025-07-25  
  4 | **Status**: Extracted from previous planning session  
  5 | **Context**: Issue #57 - Add HTTP/SSE interface to MCP Memory Service
  6 | 
  7 | ## Executive Summary
  8 | 
  9 | Implement HTTP REST API and Server-Sent Events (SSE) interface for the MCP Memory Service using the existing sqlite-vec backend instead of ChromaDB. This creates a lightweight, edge-ready solution that maintains all existing MCP functionality while adding modern web capabilities.
 10 | 
 11 | ## Key Architectural Decision
 12 | 
 13 | **Combine HTTP/SSE with sqlite-vec backend** instead of ChromaDB for:
 14 | - Simplified deployment (single file database)
 15 | - Better performance (10x faster operations)
 16 | - Edge-ready deployment (Cloudflare Workers, Vercel)
 17 | - No external dependencies
 18 | - Instant SSE updates via SQLite triggers
 19 | 
 20 | ## Implementation Phases
 21 | 
 22 | ### Phase 1: Foundation (Week 1)
 23 | - ✅ Create feature branch from sqlite-vec-backend
 24 | - ✅ Create PROJECT_STATUS.md tracking file
 25 | - [ ] Validate sqlite-vec functionality
 26 | - [ ] Add FastAPI dependencies
 27 | - [ ] Set up development environment
 28 | 
 29 | ### Phase 2: HTTP Implementation (Week 2)
 30 | - [ ] Create web server structure
 31 | - [ ] Implement health check endpoint
 32 | - [ ] Add memory CRUD endpoints
 33 | - [ ] Add search endpoints
 34 | - [ ] OpenAPI documentation
 35 | 
 36 | ### Phase 3: SSE Implementation (Week 3)
 37 | - [ ] Design SSE event architecture
 38 | - [ ] Implement SQLite triggers
 39 | - [ ] Create /events endpoint
 40 | - [ ] Connection management
 41 | - [ ] Real-time update testing
 42 | 
 43 | ### Phase 4: Dashboard (Week 4)
 44 | - [ ] Minimal UI design (vanilla JS)
 45 | - [ ] Memory visualization
 46 | - [ ] SSE connection handling
 47 | - [ ] Search interface
 48 | - [ ] Responsive design
 49 | 
 50 | ## Technical Architecture
 51 | 
 52 | ### Directory Structure
 53 | ```
 54 | src/mcp_memory_service/
 55 | ├── web/
 56 | │   ├── __init__.py
 57 | │   ├── app.py          # FastAPI application
 58 | │   ├── sse.py          # SSE event handling
 59 | │   ├── api/
 60 | │   │   ├── __init__.py
 61 | │   │   ├── memories.py # Memory CRUD
 62 | │   │   ├── search.py   # Search operations
 63 | │   │   └── health.py   # Health monitoring
 64 | │   └── static/
 65 | │       ├── index.html  # Dashboard
 66 | │       ├── app.js      # Frontend JS
 67 | │       └── style.css   # Styling
 68 | ├── storage/
 69 | │   ├── sqlite_vec.py   # Existing
 70 | │   └── sqlite_sse.py   # New: SSE triggers
 71 | ```
 72 | 
 73 | ### Server Modes
 74 | 1. **MCP Mode**: Original stdio protocol (unchanged)
 75 | 2. **HTTP Mode**: FastAPI server with SSE
 76 | 3. **Hybrid Mode**: Both protocols simultaneously
 77 | 
 78 | ### SSE Events
 79 | - `memory_stored`: New memory added
 80 | - `memory_deleted`: Memory removed
 81 | - `search_completed`: Search results ready
 82 | - `backup_status`: Backup progress
 83 | - `health_update`: System status changes
 84 | 
 85 | ### API Endpoints
 86 | - `GET /api/health` - Health check
 87 | - `GET /api/memories` - List memories (paginated)
 88 | - `POST /api/memories` - Store new memory
 89 | - `GET /api/memories/{id}` - Get specific memory
 90 | - `DELETE /api/memories/{id}` - Delete memory
 91 | - `POST /api/search` - Semantic search
 92 | - `POST /api/search/by-tag` - Tag search
 93 | - `POST /api/search/by-time` - Time-based recall
 94 | - `GET /events` - SSE endpoint
 95 | 
 96 | ## Dependencies to Add
 97 | ```
 98 | fastapi>=0.115.0
 99 | uvicorn>=0.30.0
100 | python-multipart>=0.0.9
101 | sse-starlette>=2.1.0
102 | aiofiles>=23.2.1
103 | ```
104 | 
105 | ## Configuration
106 | ```python
107 | # New environment variables
108 | HTTP_ENABLED = 'MCP_HTTP_ENABLED'
109 | HTTP_PORT = 'MCP_HTTP_PORT' (default: 8000)
110 | HTTP_HOST = 'MCP_HTTP_HOST' (default: 0.0.0.0)
111 | CORS_ORIGINS = 'MCP_CORS_ORIGINS'
112 | SSE_HEARTBEAT_INTERVAL = 'MCP_SSE_HEARTBEAT' (default: 30s)
113 | API_KEY = 'MCP_API_KEY' (optional auth)
114 | ```
115 | 
116 | ## Performance Targets
117 | - Memory storage: <50ms (vs ChromaDB ~500ms)
118 | - Search response: <100ms for 1M memories
119 | - SSE latency: <10ms from write to event
120 | - Startup time: <1s (vs ChromaDB 5-10s)
121 | 
122 | ## Testing Strategy
123 | - Unit tests for all HTTP endpoints
124 | - Integration tests for SSE connections
125 | - Performance benchmarks vs ChromaDB
126 | - Browser compatibility testing
127 | - Edge deployment validation
128 | 
129 | ## Security Considerations
130 | - Optional API key authentication
131 | - CORS configuration
132 | - Rate limiting
133 | - Input validation
134 | - SSL/TLS documentation
135 | 
136 | ## Migration Path
137 | - Existing MCP users: No changes required
138 | - ChromaDB users: Migration script provided
139 | - New users: SQLite-vec as default for HTTP mode
140 | 
141 | ## Benefits
142 | - **Simplicity**: Single file database, no external services
143 | - **Performance**: Orders of magnitude faster
144 | - **Portability**: Runs anywhere Python runs
145 | - **Reliability**: SQLite's proven track record
146 | - **Modern**: HTTP/SSE/REST for web integration
147 | - **Efficient**: Minimal resource usage
148 | - **Edge-ready**: Deploy to CDN edge locations
149 | 
150 | ## Future Possibilities
151 | - Distributed SQLite with Litestream replication
152 | - Cloudflare Workers deployment with D1
153 | - Offline-first PWA with WASM SQLite
154 | - Federation between multiple instances
155 | 
156 | ## Success Metrics
157 | - HTTP endpoints respond within performance targets
158 | - SSE connections maintain real-time updates <10ms
159 | - Dashboard provides intuitive memory management
160 | - Documentation enables easy deployment
161 | - Migration from ChromaDB is seamless
162 | - Edge deployment works on major platforms
163 | 
164 | ---
165 | 
166 | This plan represents a significant architectural improvement while maintaining full backward compatibility with existing MCP usage patterns.
```

--------------------------------------------------------------------------------
/docs/mastery/architecture-overview.md:
--------------------------------------------------------------------------------

```markdown
 1 | # MCP Memory Service — Architecture Overview
 2 | 
 3 | This document summarizes the Memory Service architecture, components, data flow, and how MCP integration is implemented.
 4 | 
 5 | ## High-Level Design
 6 | 
 7 | - Clients: Claude Desktop/Code, VS Code, Cursor, Continue, and other MCP-compatible clients.
 8 | - Protocol Layer:
 9 |   - MCP stdio server: `src/mcp_memory_service/server.py` (uses `mcp.server.Server`).
10 |   - FastAPI MCP server: `src/mcp_memory_service/mcp_server.py` (via `FastMCP`, exposes streamable HTTP for remote access).
11 | - Core Domain:
12 |   - Models: `src/mcp_memory_service/models/memory.py` defining `Memory` and `MemoryQueryResult`.
13 |   - Utilities: hashing, time parsing, system detection, HTTP server coordination.
14 | - Storage Abstraction:
15 |   - Interface: `src/mcp_memory_service/storage/base.py` (`MemoryStorage` ABC).
16 |   - Backends:
17 |     - SQLite-vec: `src/mcp_memory_service/storage/sqlite_vec.py` (recommended default).
18 |     - ChromaDB: `src/mcp_memory_service/storage/chroma.py` (deprecated; migration path provided).
19 |     - Cloudflare: `src/mcp_memory_service/storage/cloudflare.py` (Vectorize + D1 + optional R2).
20 |     - HTTP client: `src/mcp_memory_service/storage/http_client.py` (multi-client coordination).
21 | - CLI:
22 |   - Entry points: `memory`, `memory-server`, `mcp-memory-server` (pyproject scripts).
23 |   - Implementation: `src/mcp_memory_service/cli/main.py` (server, status, ingestion commands).
24 | - Config and Env:
25 |   - Central config: `src/mcp_memory_service/config.py` (paths, backend selection, HTTP/HTTPS, mDNS, consolidation, hostname tagging, Cloudflare settings).
26 | - Consolidation (optional): associations, clustering, compression, forgetting; initialized lazily when enabled.
27 | 
28 | ## Data Flow
29 | 
30 | 1. Client invokes MCP tool/prompt (stdio or FastMCP HTTP transport).
31 | 2. Server resolves the configured backend via `config.py` and lazy/eager initializes storage.
32 | 3. For SQLite-vec:
33 |    - Embeddings generated via `sentence-transformers` (or ONNX disabled path) and stored alongside content and metadata in SQLite; vector search via `vec0` virtual table.
34 |    - WAL mode + busy timeouts for concurrent access; optional HTTP coordination for multi-client scenarios.
35 | 4. For ChromaDB: uses DuckDB+Parquet persistence and HNSW settings (deprecated path; migration messaging built-in).
36 | 5. For Cloudflare: Vectorize (vectors), D1 (metadata), R2 (large content); HTTPx for API calls.
37 | 6. Results map back to `Memory`/`MemoryQueryResult` and are returned to the MCP client.
38 | 
39 | ## MCP Integration Patterns
40 | 
41 | - Stdio MCP (`server.py`):
42 |   - Uses `mcp.server.Server` and registers tools/prompts for memory operations, diagnostics, and analysis.
43 |   - Client-aware logging (`DualStreamHandler`) to keep JSON wire clean for Claude Desktop; richer stdout for LM Studio.
44 |   - Coordination: detects if an HTTP sidecar is needed for multi-client access; starts/uses `HTTPClientStorage` when appropriate.
45 | 
46 | - FastMCP (`mcp_server.py`):
47 |   - Wraps storage via `lifespan` context; exposes core tools like `store_memory`, `retrieve_memory`, `search_by_tag`, `delete_memory`, `check_database_health` using `@mcp.tool()`.
48 |   - Designed for remote/HTTP access and Claude Code compatibility via `streamable-http` transport.
49 | 
50 | ## Storage Layer Abstraction
51 | 
52 | - `MemoryStorage` interface defines: `initialize`, `store`, `retrieve/search`, `search_by_tag(s)`, `delete`, `delete_by_tag`, `cleanup_duplicates`, `update_memory_metadata`, `get_stats`, plus optional helpers for tags/time ranges.
53 | - Backends adhere to the interface and can be swapped via `MCP_MEMORY_STORAGE_BACKEND`.
54 | 
55 | ## Configuration Management
56 | 
57 | - Paths: base dir and per-backend storage paths (auto-created, validated for writability).
58 | - Backend selection: `MCP_MEMORY_STORAGE_BACKEND` ∈ `{sqlite_vec, chroma, cloudflare}` (normalized).
59 | - HTTP/HTTPS server, CORS, API key, SSE heartbeat.
60 | - mDNS discovery toggles and timeouts.
61 | - Consolidation: enabled flag, archive path, decay/association/clustering/compression/forgetting knobs; schedules for APScheduler.
62 | - Hostname tagging: `MCP_MEMORY_INCLUDE_HOSTNAME` annotates source host.
63 | - Cloudflare: tokens, account, Vectorize index, D1 DB, optional R2, retry behavior.
64 | 
65 | ## Dependencies and Roles
66 | 
67 | - `mcp`: MCP protocol server/runtime.
68 | - `sqlite-vec`: vector index for SQLite; provides `vec0` virtual table.
69 | - `sentence-transformers`, `torch`: embedding generation; can be disabled.
70 | - `chromadb`: legacy backend (DuckDB+Parquet).
71 | - `fastapi`, `uvicorn`, `sse-starlette`, `aiofiles`, `aiohttp/httpx`: HTTP transports and Cloudflare/API.
72 | - `psutil`, `zeroconf`: client detection and mDNS discovery.
73 | 
74 | ## Logging and Diagnostics
75 | 
76 | - Client-aware logging handler prevents stdout noise for Claude (keeps JSON clean) and surfaces info on LM Studio.
77 | - `LOG_LEVEL` env to set root logger (defaults WARNING). Performance-critical third-party loggers elevated to WARNING unless `DEBUG_MODE` set.
78 | 
79 | ## Performance and Concurrency
80 | 
81 | - SQLite-vec pragmas: WAL, busy_timeout, synchronous=NORMAL, cache_size, temp_store.
82 | - Custom pragmas via `MCP_MEMORY_SQLITE_PRAGMAS`.
83 | - Embedding model is cached and loaded once; ONNX path available when enabled.
84 | - Average query time tracking, async operations, and optional consolidation scheduler.
85 | 
86 | 
```

--------------------------------------------------------------------------------
/archive/release-notes/release-notes-v7.1.4.md:
--------------------------------------------------------------------------------

```markdown
  1 | # MCP Memory Service v7.1.4 - Unified Cross-Platform Hook Installer
  2 | 
  3 | ## 🚀 **Major Feature Release**
  4 | 
  5 | This release introduces a **unified cross-platform Python installer** that consolidates 4+ separate installer scripts into a single, robust solution with enhanced features for Claude Code Memory Awareness Hooks.
  6 | 
  7 | ## ✨ **What's New**
  8 | 
  9 | ### 🔧 **Unified Installation Experience**
 10 | - **Single installer**: `install_hooks.py` replaces all platform-specific scripts
 11 | - **Cross-platform compatibility**: Works seamlessly on Windows, macOS, and Linux
 12 | - **Intelligent configuration merging**: Preserves existing Claude Code hook configurations
 13 | - **Dynamic path resolution**: Eliminates hardcoded paths and works in any location
 14 | 
 15 | ### 🎯 **Enhanced Safety & Reliability**
 16 | - **Atomic installations**: Automatic rollback on failure
 17 | - **Comprehensive backups**: Timestamped restore points before changes
 18 | - **Smart JSON merging**: Prevents settings.json overwrite and configuration loss
 19 | - **Empty directory cleanup**: Proper uninstall process with orphaned folder removal
 20 | 
 21 | ### ⚡ **Natural Memory Triggers v7.1.3**
 22 | - **Advanced trigger detection**: 85%+ accuracy for intelligent memory injection
 23 | - **Multi-tier performance**: Optimized response times (50ms/150ms/500ms)
 24 | - **Mid-conversation hooks**: Real-time memory awareness during conversations
 25 | - **CLI management tools**: Live configuration and performance tuning
 26 | - **Git-aware context**: Repository integration for enhanced context
 27 | 
 28 | ## 📋 **Installation Commands**
 29 | 
 30 | ### New Unified Installation
 31 | ```bash
 32 | # Navigate to hooks directory
 33 | cd claude-hooks
 34 | 
 35 | # Install Natural Memory Triggers (recommended)
 36 | python install_hooks.py --natural-triggers
 37 | 
 38 | # Install basic memory awareness hooks
 39 | python install_hooks.py --basic
 40 | 
 41 | # Install everything
 42 | python install_hooks.py --all
 43 | 
 44 | # Test installation (dry-run)
 45 | python install_hooks.py --dry-run --natural-triggers
 46 | ```
 47 | 
 48 | ### Integrated with Main Installer
 49 | ```bash
 50 | # Install service + hooks together
 51 | python scripts/installation/install.py --install-natural-triggers
 52 | ```
 53 | 
 54 | ## 🔄 **Migration Guide**
 55 | 
 56 | ### For Existing Users
 57 | 1. **Backup existing installation** (automatic during upgrade)
 58 | 2. **Run unified installer**: `python install_hooks.py --natural-triggers`
 59 | 3. **Verify functionality**: Hooks preserve existing configurations
 60 | 
 61 | ### From Legacy Scripts
 62 | - ❌ `install.sh` → ✅ `python install_hooks.py --basic`
 63 | - ❌ `install-natural-triggers.sh` → ✅ `python install_hooks.py --natural-triggers`
 64 | - ❌ `install_claude_hooks_windows.bat` → ✅ `python install_hooks.py --all`
 65 | 
 66 | **Complete migration guide**: See `claude-hooks/MIGRATION.md`
 67 | 
 68 | ## 🛠 **Technical Improvements**
 69 | 
 70 | ### Cross-Platform Enhancements
 71 | - **Proper path quoting**: Handles spaces in Windows installation paths
 72 | - **Platform-specific hooks directory detection**: Works across different OS configurations
 73 | - **Consistent CLI interface**: Same commands work on all platforms
 74 | 
 75 | ### Code Quality
 76 | - **Type hints throughout**: Better maintainability and IDE support
 77 | - **Comprehensive error handling**: Graceful degradation and detailed feedback
 78 | - **Modular architecture**: Clear separation of concerns and extensibility
 79 | - **Professional UX**: Enhanced output formatting and user guidance
 80 | 
 81 | ## ⚠️ **Breaking Changes**
 82 | 
 83 | - **Legacy shell scripts removed**: `install.sh`, `install-natural-triggers.sh`, `install_claude_hooks_windows.bat`
 84 | - **Installation commands updated**: Must use unified Python installer
 85 | - **Configuration structure**: Enhanced v7.1.3 dual protocol support
 86 | 
 87 | ## 🧪 **Testing & Validation**
 88 | 
 89 | ### Comprehensive Test Results
 90 | - **Natural Memory Triggers**: 18/18 tests passing (100% success rate)
 91 | - **Cross-platform compatibility**: Validated on Linux, macOS simulation, Windows paths
 92 | - **Installation integrity**: All components verified with syntax validation
 93 | - **Configuration merging**: Tested with various existing setups
 94 | 
 95 | ### Performance Metrics
 96 | - **Installation time**: ~30 seconds for complete Natural Memory Triggers setup
 97 | - **Average test execution**: 3.3ms per test
 98 | - **Memory footprint**: Minimal impact with intelligent caching
 99 | 
100 | ## 🎯 **Benefits Summary**
101 | 
102 | This unified installer provides:
103 | - ✅ **Better reliability** across all platforms
104 | - ✅ **Safer installations** with intelligent configuration merging
105 | - ✅ **Consistent experience** regardless of operating system
106 | - ✅ **Advanced features** like Natural Memory Triggers v7.1.3
107 | - ✅ **Professional tooling** with comprehensive testing and validation
108 | - ✅ **Future-proof architecture** with extensible Python design
109 | 
110 | ## 📞 **Support & Documentation**
111 | 
112 | - **Installation Guide**: `claude-hooks/MIGRATION.md`
113 | - **Troubleshooting**: Run with `--dry-run` flag to diagnose issues
114 | - **CLI Help**: `python install_hooks.py --help`
115 | - **Issues**: [GitHub Issues](https://github.com/doobidoo/mcp-memory-service/issues)
116 | 
117 | ## 🙏 **Acknowledgments**
118 | 
119 | Special thanks to **Gemini Code Assist** for comprehensive code review feedback that drove the safety and reliability improvements in this release.
120 | 
121 | ---
122 | 
123 | This release represents a significant milestone in the evolution of Claude Code Memory Awareness Hooks, providing a unified, cross-platform installation experience with enhanced safety and advanced features.
```

--------------------------------------------------------------------------------
/archive/docs-root-cleanup-2025-08-23/DOCUMENTATION_ANALYSIS.md:
--------------------------------------------------------------------------------

```markdown
  1 | # MCP Memory Service - Documentation Analysis & Consolidation Plan
  2 | 
  3 | **Analysis Date**: 2025-08-23  
  4 | **Total Files**: 87 markdown files (75 in `/docs/`, 12 in root)  
  5 | **Total Size**: ~1MB of documentation  
  6 | 
  7 | ## 🚨 **Critical Redundancy Areas**
  8 | 
  9 | ### 1. Installation Guides (MASSIVE OVERLAP)
 10 | **6+ files covering nearly identical installation steps:**
 11 | 
 12 | - **docs/guides/service-installation.md** (10KB) - Cross-platform service installation
 13 | - **docs/installation/complete-setup-guide.md** (7.7KB) - Complete setup with consolidation features  
 14 | - **docs/installation/master-guide.md** (5KB) - Hardware-specific installation paths
 15 | - **docs/installation/distributed-sync.md** (11KB) - Installation + sync setup
 16 | - **docs/guides/claude-desktop-setup.md** (3.4KB) - Claude Desktop specific setup
 17 | - **README.md** (56KB) - Contains full installation instructions + everything else
 18 | 
 19 | **Redundancy**: Same basic steps (clone → install → configure) repeated 6 times with slight variations
 20 | 
 21 | ### 2. Platform-Specific Setup (DUPLICATE CONTENT)
 22 | **4+ files with overlapping platform instructions:**
 23 | 
 24 | - **docs/platforms/windows.md** (11KB) - Windows setup
 25 | - **docs/guides/windows-setup.md** (3.9KB) - Windows setup (shorter version)
 26 | - **docs/platforms/ubuntu.md** (12.8KB) - Linux setup  
 27 | - **docs/guides/UBUNTU_SETUP.md** (5.9KB) - Linux setup (different approach)
 28 | - **docs/platforms/macos-intel.md** (9.8KB) - macOS Intel setup
 29 | 
 30 | **Redundancy**: Platform-specific steps repeated across different file structures
 31 | 
 32 | ### 3. Claude Integration (SCATTERED APPROACH)
 33 | **4+ files covering Claude integration:**
 34 | 
 35 | - **docs/guides/claude-code-integration.md** (10.6KB) - Claude Code integration
 36 | - **docs/guides/claude-code-quickstart.md** (3.9KB) - Quick start version
 37 | - **docs/guides/claude-desktop-setup.md** (3.4KB) - Desktop setup
 38 | - **docs/guides/claude_integration.md** (2.5KB) - Basic integration
 39 | - **docs/guides/claude-code-compatibility.md** (3.8KB) - Compatibility guide
 40 | 
 41 | **Redundancy**: Same configuration steps and JSON examples repeated
 42 | 
 43 | ### 4. Development/Session Files (SHOULD BE ARCHIVED)
 44 | **10+ development artifacts mixed with user docs:**
 45 | 
 46 | - **docs/sessions/MCP_ENHANCEMENT_SESSION_MEMORY_v4.1.0.md** (12KB) - Development session
 47 | - **docs/development/** (multiple CLEANUP_*, TIMESTAMP_*, etc. files)
 48 | - **SESSION_MEMORY_2025-08-11.md** (4.6KB) - Personal session notes
 49 | - **CLAUDE_PERSONALIZED.md** (10.6KB) - Personal notes
 50 | 
 51 | **Issue**: Development artifacts shouldn't be in user-facing documentation
 52 | 
 53 | ## 📊 **File Categories Analysis**
 54 | 
 55 | ### **KEEP in Repository (4 files max)**
 56 | - **README.md** - Streamlined overview + wiki links
 57 | - **CLAUDE.md** - Claude Code development guidance  
 58 | - **CHANGELOG.md** - Version history
 59 | - **CONTRIBUTING.md** - Development guidelines (if exists)
 60 | 
 61 | ### **MOVE TO WIKI - Installation** (consolidate 6→1)
 62 | - All installation guides → Single comprehensive installation wiki page
 63 | - Platform-specific details → Sub-sections in installation page
 64 | 
 65 | ### **MOVE TO WIKI - Integration** (consolidate 5→1) 
 66 | - All Claude integration guides → Single integration wiki page
 67 | - Other IDE integrations → Sub-sections
 68 | 
 69 | ### **MOVE TO WIKI - Technical** (organize existing)
 70 | - API documentation → Technical reference section
 71 | - Architecture docs → System design section
 72 | - Troubleshooting → Dedicated troubleshooting section
 73 | 
 74 | ### **ARCHIVE/DELETE** (20+ files)
 75 | - All development session files
 76 | - Cleanup summaries and development artifacts  
 77 | - Duplicate/outdated guides
 78 | - Personal session memories
 79 | 
 80 | ## 🎯 **Consolidation Targets**
 81 | 
 82 | ### **Target 1: Single Installation Guide**
 83 | **From**: 6 redundant installation files  
 84 | **To**: 1 comprehensive wiki page with sections:
 85 | - Quick start (universal installer)
 86 | - Platform-specific notes (Windows/macOS/Linux)
 87 | - Hardware optimization (legacy vs modern)
 88 | - Service installation options
 89 | - Troubleshooting common issues
 90 | 
 91 | ### **Target 2: Single Integration Guide** 
 92 | **From**: 5 Claude integration files
 93 | **To**: 1 comprehensive integration page with:
 94 | - Claude Desktop setup
 95 | - Claude Code integration
 96 | - VS Code extension setup
 97 | - Other IDE configurations
 98 | - Configuration examples
 99 | 
100 | ### **Target 3: Technical Reference**
101 | **From**: Scattered API/technical docs
102 | **To**: Organized technical section:
103 | - API documentation
104 | - Architecture overview
105 | - Storage backends comparison
106 | - Performance optimization
107 | - Development guidelines
108 | 
109 | ## 📈 **Expected Results**
110 | 
111 | **Before**: 87 markdown files, difficult to navigate, redundant content  
112 | **After**: ~4 essential repo files + organized wiki with ~15 comprehensive pages
113 | 
114 | **Benefits**:
115 | - **90% reduction** in documentation files in repository
116 | - **Eliminated redundancy** - single source of truth for each topic
117 | - **Improved discoverability** - logical wiki structure vs scattered files
118 | - **Easier maintenance** - update once vs updating 6 installation guides
119 | - **Cleaner repository** - focus on code, not documentation chaos
120 | - **Better user experience** - clear paths vs overwhelming choice paralysis
121 | 
122 | ## ✅ **Next Steps**
123 | 
124 | 1. **Create wiki structure** with consolidated pages
125 | 2. **Migrate and merge content** from redundant files  
126 | 3. **Update README.md** to point to wiki
127 | 4. **Remove redundant documentation** from repository
128 | 5. **Archive development artifacts** to separate location
```

--------------------------------------------------------------------------------
/scripts/testing/test_migration.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | # Copyright 2024 Heinrich Krupp
  3 | #
  4 | # Licensed under the Apache License, Version 2.0 (the "License");
  5 | # you may not use this file except in compliance with the License.
  6 | # You may obtain a copy of the License at
  7 | #
  8 | #     http://www.apache.org/licenses/LICENSE-2.0
  9 | #
 10 | # Unless required by applicable law or agreed to in writing, software
 11 | # distributed under the License is distributed on an "AS IS" BASIS,
 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 13 | # See the License for the specific language governing permissions and
 14 | # limitations under the License.
 15 | 
 16 | """
 17 | Test script to verify ChromaDB to SQLite-vec migration.
 18 | 
 19 | This script compares data between ChromaDB and SQLite-vec to ensure
 20 | the migration was successful.
 21 | """
 22 | 
 23 | import asyncio
 24 | import sys
 25 | from pathlib import Path
 26 | from typing import List, Dict, Any
 27 | 
 28 | # Add project root to path
 29 | project_root = Path(__file__).parent.parent
 30 | sys.path.insert(0, str(project_root / "src"))
 31 | 
 32 | from mcp_memory_service.storage.chroma import ChromaMemoryStorage
 33 | from mcp_memory_service.storage.sqlite_vec import SqliteVecMemoryStorage
 34 | from mcp_memory_service.config import CHROMA_PATH, EMBEDDING_MODEL_NAME
 35 | 
 36 | async def compare_storage_backends(chroma_path: str, sqlite_path: str):
 37 |     """Compare data between ChromaDB and SQLite-vec backends."""
 38 |     
 39 |     print("🔍 Testing Migration Results")
 40 |     print("=" * 50)
 41 |     
 42 |     # Initialize both storages
 43 |     chroma_storage = ChromaMemoryStorage(
 44 |         persist_directory=chroma_path,
 45 |         embedding_model_name=EMBEDDING_MODEL_NAME
 46 |     )
 47 |     await chroma_storage.initialize()
 48 |     
 49 |     sqlite_storage = SqliteVecMemoryStorage(
 50 |         db_path=sqlite_path,
 51 |         embedding_model=EMBEDDING_MODEL_NAME
 52 |     )
 53 |     await sqlite_storage.initialize()
 54 |     
 55 |     try:
 56 |         # Get all memories from both storages
 57 |         print("📥 Fetching memories from ChromaDB...")
 58 |         chroma_memories = await chroma_storage.retrieve("", n_results=10000)
 59 |         
 60 |         print("📥 Fetching memories from SQLite-vec...")
 61 |         sqlite_memories = await sqlite_storage.retrieve("", n_results=10000)
 62 |         
 63 |         # Compare counts
 64 |         chroma_count = len(chroma_memories)
 65 |         sqlite_count = len(sqlite_memories)
 66 |         
 67 |         print(f"📊 ChromaDB memories: {chroma_count}")
 68 |         print(f"📊 SQLite-vec memories: {sqlite_count}")
 69 |         
 70 |         if sqlite_count >= chroma_count:
 71 |             print("✅ Memory count check: PASSED")
 72 |         else:
 73 |             print("❌ Memory count check: FAILED")
 74 |             print(f"   Missing {chroma_count - sqlite_count} memories")
 75 |         
 76 |         # Compare content hashes
 77 |         chroma_hashes = {m.memory.content_hash for m in chroma_memories}
 78 |         sqlite_hashes = {m.memory.content_hash for m in sqlite_memories}
 79 |         
 80 |         missing_in_sqlite = chroma_hashes - sqlite_hashes
 81 |         extra_in_sqlite = sqlite_hashes - chroma_hashes
 82 |         
 83 |         if not missing_in_sqlite:
 84 |             print("✅ Content hash check: PASSED")
 85 |         else:
 86 |             print("❌ Content hash check: FAILED")
 87 |             print(f"   {len(missing_in_sqlite)} memories missing in SQLite-vec")
 88 |             if len(missing_in_sqlite) <= 5:
 89 |                 for hash_val in list(missing_in_sqlite)[:5]:
 90 |                     print(f"   - {hash_val[:12]}...")
 91 |         
 92 |         if extra_in_sqlite:
 93 |             print(f"ℹ️  SQLite-vec has {len(extra_in_sqlite)} additional memories")
 94 |         
 95 |         # Test search functionality
 96 |         print("\\n🔍 Testing search functionality...")
 97 |         
 98 |         if chroma_memories:
 99 |             # Use first memory's content as search query
100 |             test_query = chroma_memories[0].memory.content[:50]
101 |             
102 |             chroma_results = await chroma_storage.retrieve(test_query, n_results=5)
103 |             sqlite_results = await sqlite_storage.retrieve(test_query, n_results=5)
104 |             
105 |             print(f"📊 Search results - ChromaDB: {len(chroma_results)}, SQLite-vec: {len(sqlite_results)}")
106 |             
107 |             if len(sqlite_results) > 0:
108 |                 print("✅ Search functionality: WORKING")
109 |             else:
110 |                 print("❌ Search functionality: FAILED")
111 |         
112 |         print("\\n🎉 Migration test completed!")
113 |         
114 |     finally:
115 |         await chroma_storage.close()
116 |         await sqlite_storage.close()
117 | 
118 | async def main():
119 |     """Main test function."""
120 |     import os
121 |     
122 |     # Default paths
123 |     chroma_path = CHROMA_PATH
124 |     sqlite_path = os.path.join(os.path.dirname(chroma_path), 'memory_migrated.db')
125 |     
126 |     # Allow custom paths via command line
127 |     if len(sys.argv) > 1:
128 |         sqlite_path = sys.argv[1]
129 |     
130 |     print(f"📂 ChromaDB path: {chroma_path}")
131 |     print(f"📂 SQLite-vec path: {sqlite_path}")
132 |     print()
133 |     
134 |     # Check if files exist
135 |     if not os.path.exists(chroma_path):
136 |         print(f"❌ ChromaDB not found at: {chroma_path}")
137 |         return 1
138 |     
139 |     if not os.path.exists(sqlite_path):
140 |         print(f"❌ SQLite-vec database not found at: {sqlite_path}")
141 |         print("💡 Run the migration script first: python scripts/migrate_chroma_to_sqlite.py")
142 |         return 1
143 |     
144 |     await compare_storage_backends(chroma_path, sqlite_path)
145 |     return 0
146 | 
147 | if __name__ == "__main__":
148 |     sys.exit(asyncio.run(main()))
```

--------------------------------------------------------------------------------
/scripts/maintenance/find_cloudflare_duplicates.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | """Find all near-duplicate memories in Cloudflare D1 database."""
  3 | 
  4 | import asyncio
  5 | import os
  6 | import sys
  7 | from pathlib import Path
  8 | from collections import defaultdict
  9 | import hashlib
 10 | import re
 11 | 
 12 | 
 13 | async def main():
 14 |     # Set OAuth to false to avoid validation issues
 15 |     os.environ['MCP_OAUTH_ENABLED'] = 'false'
 16 | 
 17 |     # Import after setting environment
 18 |     from mcp_memory_service.storage.cloudflare import CloudflareStorage
 19 |     from mcp_memory_service.config import (
 20 |         CLOUDFLARE_API_TOKEN, CLOUDFLARE_ACCOUNT_ID,
 21 |         CLOUDFLARE_VECTORIZE_INDEX, CLOUDFLARE_D1_DATABASE_ID,
 22 |         EMBEDDING_MODEL_NAME
 23 |     )
 24 | 
 25 |     def normalize_content(content):
 26 |         """Normalize content by removing timestamps and session-specific data."""
 27 |         # Remove common timestamp patterns
 28 |         normalized = content
 29 |         normalized = re.sub(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z', 'TIMESTAMP', normalized)
 30 |         normalized = re.sub(r'\*\*Date\*\*: \d{2,4}[./]\d{2}[./]\d{2,4}', '**Date**: DATE', normalized)
 31 |         normalized = re.sub(r'Timestamp: \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}', 'Timestamp: TIMESTAMP', normalized)
 32 |         return normalized.strip()
 33 | 
 34 |     def content_hash(content):
 35 |         """Create a hash of normalized content."""
 36 |         normalized = normalize_content(content)
 37 |         return hashlib.md5(normalized.encode()).hexdigest()
 38 | 
 39 |     print("🔗 Connecting to Cloudflare...")
 40 | 
 41 |     # Initialize Cloudflare storage
 42 |     cloudflare = CloudflareStorage(
 43 |         api_token=CLOUDFLARE_API_TOKEN,
 44 |         account_id=CLOUDFLARE_ACCOUNT_ID,
 45 |         vectorize_index=CLOUDFLARE_VECTORIZE_INDEX,
 46 |         d1_database_id=CLOUDFLARE_D1_DATABASE_ID,
 47 |         embedding_model=EMBEDDING_MODEL_NAME
 48 |     )
 49 | 
 50 |     await cloudflare.initialize()
 51 |     print("✅ Connected to Cloudflare\n")
 52 | 
 53 |     print("📊 Fetching all memories from Cloudflare D1...")
 54 | 
 55 |     # Use the public API method for better encapsulation and performance
 56 |     try:
 57 |         all_memories = await cloudflare.get_all_memories_bulk(include_tags=False)
 58 |     except Exception as e:
 59 |         print(f"❌ Failed to fetch memories from Cloudflare D1: {e}")
 60 |         return 1
 61 | 
 62 |     if not all_memories:
 63 |         print("✅ No memories found to check for duplicates.")
 64 |         return 0
 65 | 
 66 |     # Convert Memory objects to the expected format for the rest of the script
 67 |     memories = []
 68 |     for memory in all_memories:
 69 |         memories.append({
 70 |             'content_hash': memory.content_hash,
 71 |             'content': memory.content,
 72 |             'tags': ','.join(memory.tags),  # Convert list to comma-separated string
 73 |             'created_at': memory.created_at
 74 |         })
 75 |     print(f"Total memories in Cloudflare: {len(memories)}\n")
 76 | 
 77 |     # Group by normalized content
 78 |     content_groups = defaultdict(list)
 79 |     for mem in memories:
 80 |         norm_hash = content_hash(mem['content'])
 81 |         content_groups[norm_hash].append({
 82 |             'hash': mem['content_hash'],
 83 |             'content': mem['content'][:200],  # First 200 chars
 84 |             'tags': mem['tags'][:80] if mem['tags'] else '',
 85 |             'created_at': mem['created_at']
 86 |         })
 87 | 
 88 |     # Find duplicates (groups with >1 memory)
 89 |     duplicates = {k: v for k, v in content_groups.items() if len(v) > 1}
 90 | 
 91 |     if not duplicates:
 92 |         print("✅ No duplicates found in Cloudflare!")
 93 |         return 0
 94 | 
 95 |     print(f"\n❌ Found {len(duplicates)} groups of duplicates:\n")
 96 | 
 97 |     total_duplicate_count = 0
 98 |     for i, (norm_hash, group) in enumerate(duplicates.items(), 1):
 99 |         count = len(group)
100 |         total_duplicate_count += count - 1  # Keep one, delete rest
101 | 
102 |         print(f"{i}. Group with {count} duplicates:")
103 |         print(f"   Content preview: {group[0]['content'][:100]}...")
104 |         print(f"   Tags: {group[0]['tags'][:80]}...")
105 |         print(f"   Hashes to keep: {group[0]['hash'][:16]}... (newest)")
106 |         print(f"   Hashes to delete: {count-1} older duplicates")
107 | 
108 |         if i >= 10:  # Show only first 10 groups
109 |             remaining = len(duplicates) - 10
110 |             print(f"\n... and {remaining} more duplicate groups")
111 |             break
112 | 
113 |     print(f"\n📊 Summary:")
114 |     print(f"   Total duplicate groups: {len(duplicates)}")
115 |     print(f"   Total memories to delete: {total_duplicate_count}")
116 |     print(f"   Total memories after cleanup: {len(memories) - total_duplicate_count}")
117 | 
118 |     # Ask if user wants to save hashes for deletion
119 |     save_hashes = input("\n💾 Save duplicate hashes for deletion? (y/n): ").strip().lower()
120 | 
121 |     if save_hashes == 'y':
122 |         hash_file = Path.home() / "cloudflare_duplicates.txt"
123 | 
124 |         # Collect hashes to delete (keep newest, delete older)
125 |         hashes_to_delete = []
126 |         for group in duplicates.values():
127 |             for memory in group[1:]:  # Keep first (newest), delete rest
128 |                 hashes_to_delete.append(memory['hash'])
129 | 
130 |         with open(hash_file, 'w') as f:
131 |             for content_hash in hashes_to_delete:
132 |                 f.write(f"{content_hash}\n")
133 | 
134 |         print(f"\n✅ Saved {len(hashes_to_delete)} hashes to {hash_file}")
135 |         print(f"📋 Next step: Delete from Cloudflare")
136 |         print(f"   Update delete_cloudflare_duplicates.py to read from cloudflare_duplicates.txt")
137 | 
138 |     return 0
139 | 
140 | 
141 | if __name__ == "__main__":
142 |     sys.exit(asyncio.run(main()))
143 | 
```

--------------------------------------------------------------------------------
/scripts/utils/groq_agent_bridge.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | """
  3 | Non-interactive Groq API client for AI agent integration.
  4 | Allows one AI system to call Groq's language models programmatically.
  5 | """
  6 | 
  7 | import json
  8 | import os
  9 | import sys
 10 | from groq import Groq
 11 | from groq import APIError, AuthenticationError, RateLimitError, APIConnectionError
 12 | 
 13 | 
 14 | class GroqAgentBridge:
 15 |     """Bridge for other AI agents to call Groq's language models."""
 16 |     
 17 |     def __init__(self, api_key=None):
 18 |         """Initialize with API key from environment or parameter."""
 19 |         self.api_key = api_key or os.environ.get('GROQ_API_KEY')
 20 |         if not self.api_key:
 21 |             raise ValueError("GROQ_API_KEY environment variable required")
 22 |         
 23 |         self.client = Groq(api_key=self.api_key)
 24 |     
 25 |     def call_model(self, prompt, model="llama-3.3-70b-versatile",
 26 |                    max_tokens=1024, temperature=0.7, system_message=None):
 27 |         """
 28 |         Non-interactive call to Groq's language model.
 29 |         
 30 |         Args:
 31 |             prompt: User prompt to send to the model
 32 |             model: Model to use (default: llama-3.3-70b-versatile)
 33 |             max_tokens: Maximum tokens in response
 34 |             temperature: Sampling temperature
 35 |             system_message: Optional system context message
 36 |         
 37 |         Returns:
 38 |             Dict with response data or error
 39 |         """
 40 |         try:
 41 |             messages = []
 42 |             if system_message:
 43 |                 messages.append({"role": "system", "content": system_message})
 44 |             messages.append({"role": "user", "content": prompt})
 45 |             
 46 |             response = self.client.chat.completions.create(
 47 |                 model=model,
 48 |                 messages=messages,
 49 |                 max_tokens=max_tokens,
 50 |                 temperature=temperature
 51 |             )
 52 |             
 53 |             return {
 54 |                 "status": "success",
 55 |                 "response": response.choices[0].message.content,
 56 |                 "model": model,
 57 |                 "tokens_used": response.usage.total_tokens
 58 |             }
 59 | 
 60 |         except AuthenticationError as e:
 61 |             return {
 62 |                 "status": "error",
 63 |                 "error": f"Authentication failed: {str(e)}. Check GROQ_API_KEY environment variable.",
 64 |                 "error_type": "authentication",
 65 |                 "model": model
 66 |             }
 67 |         except RateLimitError as e:
 68 |             return {
 69 |                 "status": "error",
 70 |                 "error": f"Rate limit exceeded: {str(e)}. Please try again later.",
 71 |                 "error_type": "rate_limit",
 72 |                 "model": model
 73 |             }
 74 |         except APIConnectionError as e:
 75 |             return {
 76 |                 "status": "error",
 77 |                 "error": f"Network connection failed: {str(e)}. Check your internet connection.",
 78 |                 "error_type": "connection",
 79 |                 "model": model
 80 |             }
 81 |         except APIError as e:
 82 |             return {
 83 |                 "status": "error",
 84 |                 "error": f"Groq API error: {str(e)}",
 85 |                 "error_type": "api_error",
 86 |                 "model": model
 87 |             }
 88 |         except Exception as e:
 89 |             # Catch-all for unexpected errors
 90 |             return {
 91 |                 "status": "error",
 92 |                 "error": f"Unexpected error: {str(e)}",
 93 |                 "error_type": "unknown",
 94 |                 "model": model
 95 |             }
 96 |     
 97 |     def call_model_raw(self, prompt, **kwargs):
 98 |         """Raw text response for direct consumption by other agents."""
 99 |         result = self.call_model(prompt, **kwargs)
100 |         if result["status"] == "success":
101 |             return result["response"]
102 |         else:
103 |             raise Exception(f"Groq API error: {result['error']}")
104 | 
105 | 
106 | def main():
107 |     """Command-line interface for non-interactive usage."""
108 |     import argparse
109 |     
110 |     parser = argparse.ArgumentParser(description='Groq API Bridge for AI Agents')
111 |     parser.add_argument('prompt', help='Input prompt for the model')
112 |     parser.add_argument('--model', default='llama-3.3-70b-versatile',
113 |                        help='Model to use (default: llama-3.3-70b-versatile)')
114 |     parser.add_argument('--max-tokens', type=int, default=1024,
115 |                        help='Maximum tokens in response')
116 |     parser.add_argument('--temperature', type=float, default=0.7,
117 |                        help='Sampling temperature')
118 |     parser.add_argument('--system', help='System message for context')
119 |     parser.add_argument('--json', action='store_true',
120 |                        help='Output JSON response')
121 |     
122 |     args = parser.parse_args()
123 |     
124 |     try:
125 |         bridge = GroqAgentBridge()
126 |         result = bridge.call_model(
127 |             prompt=args.prompt,
128 |             model=args.model,
129 |             max_tokens=args.max_tokens,
130 |             temperature=args.temperature,
131 |             system_message=args.system
132 |         )
133 |         
134 |         if args.json:
135 |             print(json.dumps(result, indent=2))
136 |         else:
137 |             if result["status"] == "success":
138 |                 print(result["response"])
139 |             else:
140 |                 print(f"Error: {result['error']}", file=sys.stderr)
141 |                 sys.exit(1)
142 |                 
143 |     except Exception as e:
144 |         print(f"Error: {e}", file=sys.stderr)
145 |         sys.exit(1)
146 | 
147 | 
148 | if __name__ == "__main__":
149 |     main()
```

--------------------------------------------------------------------------------
/scripts/migration/verify_mcp_timestamps.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | """
  3 | Verification script to check timestamp consistency in MCP Memory ChromaDB database.
  4 | Run this before and after migration to see the state of timestamps.
  5 | """
  6 | 
  7 | import sqlite3
  8 | from datetime import datetime
  9 | import os
 10 | 
 11 | DB_PATH = "/Users/hkr/Library/Application Support/mcp-memory/chroma_db/chroma.sqlite3"
 12 | 
 13 | def check_timestamps():
 14 |     """Check current timestamp situation in the database."""
 15 |     
 16 |     if not os.path.exists(DB_PATH):
 17 |         print(f"❌ Database not found at: {DB_PATH}")
 18 |         return
 19 |     
 20 |     conn = sqlite3.connect(DB_PATH)
 21 |     cursor = conn.cursor()
 22 |     
 23 |     print("=" * 70)
 24 |     print("MCP Memory Timestamp Verification Report")
 25 |     print("=" * 70)
 26 |     print(f"Database: {DB_PATH}")
 27 |     print(f"Report generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
 28 |     
 29 |     # 1. Count total memories
 30 |     cursor.execute("SELECT COUNT(*) FROM embeddings")
 31 |     total_memories = cursor.fetchone()[0]
 32 |     print(f"Total memories in database: {total_memories}")
 33 |     
 34 |     # 2. Check timestamp field distribution
 35 |     print("\n📊 Timestamp Field Analysis:")
 36 |     print("-" * 70)
 37 |     
 38 |     cursor.execute("""
 39 |         SELECT 
 40 |             key,
 41 |             COUNT(DISTINCT id) as memories,
 42 |             COUNT(CASE WHEN string_value IS NOT NULL THEN 1 END) as str_vals,
 43 |             COUNT(CASE WHEN int_value IS NOT NULL THEN 1 END) as int_vals,
 44 |             COUNT(CASE WHEN float_value IS NOT NULL THEN 1 END) as float_vals
 45 |         FROM embedding_metadata
 46 |         WHERE key IN ('timestamp', 'created_at', 'created_at_iso', 'timestamp_float', 
 47 |                       'timestamp_str', 'updated_at', 'updated_at_iso', 'date')
 48 |         GROUP BY key
 49 |         ORDER BY memories DESC
 50 |     """)
 51 |     
 52 |     results = cursor.fetchall()
 53 |     
 54 |     print(f"{'Field':<20} {'Memories':<12} {'String':<10} {'Int':<10} {'Float':<10}")
 55 |     print("-" * 70)
 56 |     
 57 |     for row in results:
 58 |         print(f"{row[0]:<20} {row[1]:<12} {row[2]:<10} {row[3]:<10} {row[4]:<10}")
 59 |     
 60 |     # 3. Check for memories without timestamps
 61 |     print("\n📍 Missing Timestamp Analysis:")
 62 |     
 63 |     cursor.execute("""
 64 |         SELECT COUNT(DISTINCT e.id)
 65 |         FROM embeddings e
 66 |         WHERE e.id NOT IN (
 67 |             SELECT id FROM embedding_metadata 
 68 |             WHERE key = 'timestamp' AND int_value IS NOT NULL
 69 |         )
 70 |     """)
 71 |     
 72 |     missing_timestamps = cursor.fetchone()[0]
 73 |     print(f"Memories without 'timestamp' field: {missing_timestamps}")
 74 |     
 75 |     # 4. Show sample of different timestamp formats
 76 |     print("\n📅 Sample Timestamp Values:")
 77 |     print("-" * 70)
 78 |     
 79 |     # Get a sample memory with multiple timestamp formats
 80 |     cursor.execute("""
 81 |         SELECT 
 82 |             em.id,
 83 |             MAX(CASE WHEN em.key = 'timestamp' THEN em.int_value END) as ts_int,
 84 |             MAX(CASE WHEN em.key = 'created_at' THEN em.float_value END) as created_float,
 85 |             MAX(CASE WHEN em.key = 'timestamp_str' THEN em.string_value END) as ts_str,
 86 |             SUBSTR(MAX(CASE WHEN em.key = 'chroma:document' THEN em.string_value END), 1, 50) as content
 87 |         FROM embedding_metadata em
 88 |         WHERE em.id IN (
 89 |             SELECT DISTINCT id FROM embedding_metadata 
 90 |             WHERE key IN ('timestamp', 'created_at', 'timestamp_str')
 91 |         )
 92 |         GROUP BY em.id
 93 |         HAVING COUNT(DISTINCT em.key) > 1
 94 |         LIMIT 3
 95 |     """)
 96 |     
 97 |     samples = cursor.fetchall()
 98 |     
 99 |     for i, (mem_id, ts_int, created_float, ts_str, content) in enumerate(samples, 1):
100 |         print(f"\nMemory ID {mem_id}:")
101 |         print(f"  Content: {content}...")
102 |         if ts_int:
103 |             print(f"  timestamp (int): {ts_int} = {datetime.fromtimestamp(ts_int).strftime('%Y-%m-%d %H:%M:%S')}")
104 |         if created_float:
105 |             print(f"  created_at (float): {created_float} = {datetime.fromtimestamp(created_float).strftime('%Y-%m-%d %H:%M:%S.%f')[:23]}")
106 |         if ts_str:
107 |             print(f"  timestamp_str: {ts_str}")
108 |     
109 |     # 5. Date range analysis
110 |     print("\n📆 Timestamp Date Ranges:")
111 |     print("-" * 70)
112 |     
113 |     # For each timestamp field, show the date range
114 |     for field, dtype in [('timestamp', 'int_value'), ('created_at', 'float_value')]:
115 |         cursor.execute(f"""
116 |             SELECT 
117 |                 MIN({dtype}) as min_val,
118 |                 MAX({dtype}) as max_val,
119 |                 COUNT(DISTINCT id) as count
120 |             FROM embedding_metadata
121 |             WHERE key = ? AND {dtype} IS NOT NULL
122 |         """, (field,))
123 |         
124 |         result = cursor.fetchone()
125 |         if result and result[2] > 0:
126 |             min_date = datetime.fromtimestamp(result[0]).strftime('%Y-%m-%d')
127 |             max_date = datetime.fromtimestamp(result[1]).strftime('%Y-%m-%d')
128 |             print(f"{field:<15} ({result[2]} memories): {min_date} to {max_date}")
129 |     
130 |     # 6. Summary recommendation
131 |     print("\n💡 Recommendations:")
132 |     print("-" * 70)
133 |     
134 |     if missing_timestamps > 0:
135 |         print(f"⚠️  {missing_timestamps} memories need timestamp migration")
136 |     
137 |     if len(results) > 1:
138 |         print(f"⚠️  Found {len(results)} different timestamp fields - consolidation recommended")
139 |         print("   Run cleanup_mcp_timestamps.py to fix this issue")
140 |     else:
141 |         print("✅ Timestamp fields look clean!")
142 |     
143 |     conn.close()
144 | 
145 | if __name__ == "__main__":
146 |     check_timestamps()
147 | 
```

--------------------------------------------------------------------------------
/scripts/pr/amp_quality_gate.sh:
--------------------------------------------------------------------------------

```bash
  1 | #!/bin/bash
  2 | # scripts/pr/amp_quality_gate.sh - Parallel quality checks using Amp CLI
  3 | #
  4 | # Usage: bash scripts/pr/amp_quality_gate.sh <PR_NUMBER>
  5 | # Example: bash scripts/pr/amp_quality_gate.sh 215
  6 | # For local branch (pre-PR): bash scripts/pr/amp_quality_gate.sh 0
  7 | 
  8 | set -e
  9 | 
 10 | PR_NUMBER=$1
 11 | 
 12 | if [ -z "$PR_NUMBER" ]; then
 13 |     echo "Usage: $0 <PR_NUMBER>"
 14 |     echo "Use 0 for local branch (pre-PR checks)"
 15 |     exit 1
 16 | fi
 17 | 
 18 | # Ensure Amp prompt directories exist
 19 | mkdir -p .claude/amp/prompts/pending
 20 | mkdir -p .claude/amp/responses/ready
 21 | 
 22 | echo "=== Amp CLI Quality Gate for PR #$PR_NUMBER ==="
 23 | echo ""
 24 | 
 25 | # Get changed Python files
 26 | if [ "$PR_NUMBER" = "0" ]; then
 27 |     echo "Analyzing local branch changes..."
 28 |     changed_files=$(git diff --name-only origin/main | grep '\.py$' || echo "")
 29 | else
 30 |     if ! command -v gh &> /dev/null; then
 31 |         echo "Error: GitHub CLI (gh) is not installed"
 32 |         exit 1
 33 |     fi
 34 |     echo "Fetching changed files from PR #$PR_NUMBER..."
 35 |     changed_files=$(gh pr diff $PR_NUMBER --name-only | grep '\.py$' || echo "")
 36 | fi
 37 | 
 38 | if [ -z "$changed_files" ]; then
 39 |     echo "No Python files changed."
 40 |     exit 0
 41 | fi
 42 | 
 43 | echo "Changed Python files:"
 44 | echo "$changed_files"
 45 | echo ""
 46 | 
 47 | # Generate UUIDs for each check
 48 | complexity_uuid=$(uuidgen 2>/dev/null || cat /proc/sys/kernel/random/uuid)
 49 | security_uuid=$(uuidgen 2>/dev/null || cat /proc/sys/kernel/random/uuid)
 50 | typehints_uuid=$(uuidgen 2>/dev/null || cat /proc/sys/kernel/random/uuid)
 51 | 
 52 | # Store UUIDs for result collection
 53 | echo "$complexity_uuid,$security_uuid,$typehints_uuid" > /tmp/amp_quality_gate_uuids_${PR_NUMBER}.txt
 54 | 
 55 | echo "Creating Amp prompts for parallel processing..."
 56 | echo ""
 57 | 
 58 | # Create complexity check prompt
 59 | cat > .claude/amp/prompts/pending/complexity-${complexity_uuid}.json << EOF
 60 | {
 61 |   "id": "${complexity_uuid}",
 62 |   "timestamp": "$(date -u +"%Y-%m-%dT%H:%M:%S.000Z")",
 63 |   "prompt": "Analyze code complexity for each function in these files. Rating scale: 1-10 (1=simple, 10=very complex). ONLY report functions with score >7 in this exact format: 'File:Function: Score X - Reason'. If all functions score ≤7, respond: 'COMPLEXITY_OK'. Files:\n\n$(echo "$changed_files" | while read file; do echo "=== $file ==="; cat "$file" 2>/dev/null || echo "File not found"; echo ""; done)",
 64 |   "context": {
 65 |     "project": "mcp-memory-service",
 66 |     "task": "complexity-analysis",
 67 |     "pr_number": "${PR_NUMBER}"
 68 |   },
 69 |   "options": {
 70 |     "timeout": 120000,
 71 |     "format": "text"
 72 |   }
 73 | }
 74 | EOF
 75 | 
 76 | # Create security scan prompt
 77 | cat > .claude/amp/prompts/pending/security-${security_uuid}.json << EOF
 78 | {
 79 |   "id": "${security_uuid}",
 80 |   "timestamp": "$(date -u +"%Y-%m-%dT%H:%M:%S.000Z")",
 81 |   "prompt": "Security audit for vulnerabilities: SQL injection (raw SQL, string formatting in queries), XSS (unescaped HTML output), command injection (os.system, subprocess with shell=True), path traversal (user input in file paths), hardcoded secrets (API keys, passwords). IMPORTANT: Output format - If ANY vulnerability found: 'VULNERABILITY_DETECTED: [type] - [details]'. If NO vulnerabilities: 'SECURITY_CLEAN'. Files:\n\n$(echo "$changed_files" | while read file; do echo "=== $file ==="; cat "$file" 2>/dev/null || echo "File not found"; echo ""; done)",
 82 |   "context": {
 83 |     "project": "mcp-memory-service",
 84 |     "task": "security-scan",
 85 |     "pr_number": "${PR_NUMBER}"
 86 |   },
 87 |   "options": {
 88 |     "timeout": 120000,
 89 |     "format": "text"
 90 |   }
 91 | }
 92 | EOF
 93 | 
 94 | # Create type hints check prompt
 95 | cat > .claude/amp/prompts/pending/typehints-${typehints_uuid}.json << EOF
 96 | {
 97 |   "id": "${typehints_uuid}",
 98 |   "timestamp": "$(date -u +"%Y-%m-%dT%H:%M:%S.000Z")",
 99 |   "prompt": "Check type hint coverage for these Python files. Report: 1) Total functions/methods, 2) Functions with complete type hints, 3) Functions missing type hints (list names), 4) Coverage percentage. Output format: 'COVERAGE: X%' then 'MISSING: function1, function2, ...' (or 'NONE' if all covered). Files:\n\n$(echo "$changed_files" | while read file; do echo "=== $file ==="; cat "$file" 2>/dev/null || echo "File not found"; echo ""; done)",
100 |   "context": {
101 |     "project": "mcp-memory-service",
102 |     "task": "type-hints",
103 |     "pr_number": "${PR_NUMBER}"
104 |   },
105 |   "options": {
106 |     "timeout": 120000,
107 |     "format": "text"
108 |   }
109 | }
110 | EOF
111 | 
112 | echo "✅ Created 3 Amp prompts for parallel processing"
113 | echo ""
114 | echo "=== Run these Amp commands in parallel (in separate terminals or background) ==="
115 | echo ""
116 | echo "amp @.claude/amp/prompts/pending/complexity-${complexity_uuid}.json &"
117 | echo "amp @.claude/amp/prompts/pending/security-${security_uuid}.json &"
118 | echo "amp @.claude/amp/prompts/pending/typehints-${typehints_uuid}.json &"
119 | echo ""
120 | echo "=== Then collect results with ==="
121 | echo "bash scripts/pr/amp_collect_results.sh --timeout 300 --uuids '${complexity_uuid},${security_uuid},${typehints_uuid}'"
122 | echo ""
123 | echo "=== Or use this one-liner to run all in background ==="
124 | echo "(amp @.claude/amp/prompts/pending/complexity-${complexity_uuid}.json > /tmp/amp-complexity.log 2>&1 &); (amp @.claude/amp/prompts/pending/security-${security_uuid}.json > /tmp/amp-security.log 2>&1 &); (amp @.claude/amp/prompts/pending/typehints-${typehints_uuid}.json > /tmp/amp-typehints.log 2>&1 &); sleep 10 && bash scripts/pr/amp_collect_results.sh --timeout 300 --uuids '${complexity_uuid},${security_uuid},${typehints_uuid}'"
125 | 
```

--------------------------------------------------------------------------------
/install_service.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | """
  3 | Cross-platform service installer for MCP Memory Service.
  4 | Automatically detects the operating system and installs the appropriate service.
  5 | """
  6 | import os
  7 | import sys
  8 | import platform
  9 | import argparse
 10 | import subprocess
 11 | from pathlib import Path
 12 | 
 13 | 
 14 | def print_header(text):
 15 |     """Print a formatted header."""
 16 |     print("\n" + "=" * 60)
 17 |     print(f" {text}")
 18 |     print("=" * 60)
 19 | 
 20 | 
 21 | def print_error(text):
 22 |     """Print formatted error text."""
 23 |     print(f"\n❌ ERROR: {text}")
 24 | 
 25 | 
 26 | def print_info(text):
 27 |     """Print formatted info text."""
 28 |     print(f"ℹ️  {text}")
 29 | 
 30 | 
 31 | def detect_platform():
 32 |     """Detect the current platform."""
 33 |     system = platform.system().lower()
 34 |     
 35 |     platforms = {
 36 |         'windows': 'Windows',
 37 |         'darwin': 'macOS',
 38 |         'linux': 'Linux'
 39 |     }
 40 |     
 41 |     return system, platforms.get(system, 'Unknown')
 42 | 
 43 | 
 44 | def check_python_version():
 45 |     """Check if Python version meets requirements."""
 46 |     if sys.version_info < (3, 10):
 47 |         print_error(f"Python 3.10 or newer is required. Found: {sys.version}")
 48 |         sys.exit(1)
 49 | 
 50 | 
 51 | def run_platform_installer(platform_name, args):
 52 |     """Run the appropriate platform-specific installer."""
 53 |     # Get the directory where this script is located
 54 |     script_dir = Path(__file__).parent
 55 |     scripts_dir = script_dir / 'scripts'
 56 |     
 57 |     installers = {
 58 |         'windows': scripts_dir / 'install_windows_service.py',
 59 |         'darwin': scripts_dir / 'install_macos_service.py',
 60 |         'linux': scripts_dir / 'install_linux_service.py'
 61 |     }
 62 |     
 63 |     installer = installers.get(platform_name)
 64 |     
 65 |     if not installer:
 66 |         print_error(f"No installer available for platform: {platform_name}")
 67 |         sys.exit(1)
 68 |     
 69 |     if not installer.exists():
 70 |         print_error(f"Platform installer not found: {installer}")
 71 |         print_info("This installer may not be implemented yet.")
 72 |         
 73 |         # For Linux, fall back to the bash script if Python version doesn't exist
 74 |         if platform_name == 'linux':
 75 |             bash_installer = script_dir / 'install_service.sh'
 76 |             if bash_installer.exists():
 77 |                 print_info("Falling back to bash installer for Linux...")
 78 |                 
 79 |                 # Make sure the script is executable
 80 |                 bash_installer.chmod(0o755)
 81 |                 
 82 |                 # Run the bash script
 83 |                 try:
 84 |                     subprocess.run([str(bash_installer)], check=True)
 85 |                     return
 86 |                 except subprocess.CalledProcessError as e:
 87 |                     print_error(f"Installation failed: {e}")
 88 |                     sys.exit(1)
 89 |         
 90 |         sys.exit(1)
 91 |     
 92 |     # Build command with arguments
 93 |     cmd = [sys.executable, str(installer)]
 94 |     
 95 |     # Pass through command-line arguments
 96 |     if args.command:
 97 |         cmd.extend(['--command', args.command])
 98 |     if args.uninstall:
 99 |         cmd.append('--uninstall')
100 |     if args.start:
101 |         cmd.append('--start')
102 |     if args.stop:
103 |         cmd.append('--stop')
104 |     if args.status:
105 |         cmd.append('--status')
106 |     if args.user:
107 |         cmd.append('--user')
108 |     if args.system:
109 |         cmd.append('--system')
110 |     
111 |     # Run the platform-specific installer
112 |     try:
113 |         subprocess.run(cmd, check=True)
114 |     except subprocess.CalledProcessError as e:
115 |         print_error(f"Installation failed: {e}")
116 |         sys.exit(1)
117 |     except FileNotFoundError:
118 |         print_error(f"Could not run installer: {installer}")
119 |         sys.exit(1)
120 | 
121 | 
122 | def main():
123 |     """Main entry point."""
124 |     parser = argparse.ArgumentParser(
125 |         description="Cross-platform service installer for MCP Memory Service"
126 |     )
127 |     
128 |     # Service operations
129 |     parser.add_argument(
130 |         '--command', 
131 |         choices=['install', 'uninstall', 'start', 'stop', 'restart', 'status'],
132 |         help='Service command to execute'
133 |     )
134 |     parser.add_argument('--uninstall', action='store_true', help='Uninstall the service')
135 |     parser.add_argument('--start', action='store_true', help='Start the service after installation')
136 |     parser.add_argument('--stop', action='store_true', help='Stop the service')
137 |     parser.add_argument('--status', action='store_true', help='Check service status')
138 |     
139 |     # Installation options
140 |     parser.add_argument('--user', action='store_true', help='Install as user service (default)')
141 |     parser.add_argument('--system', action='store_true', help='Install as system service (requires admin)')
142 |     
143 |     args = parser.parse_args()
144 |     
145 |     # Print header
146 |     print_header("MCP Memory Service - Cross-Platform Installer")
147 |     
148 |     # Check Python version
149 |     check_python_version()
150 |     
151 |     # Detect platform
152 |     platform_name, platform_display = detect_platform()
153 |     print_info(f"Detected platform: {platform_display}")
154 |     print_info(f"Python version: {sys.version.split()[0]}")
155 |     
156 |     # Check for virtual environment
157 |     if hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
158 |         print_info(f"Virtual environment: {sys.prefix}")
159 |     else:
160 |         print_info("⚠️  Not running in a virtual environment (recommended)")
161 |     
162 |     # Run platform-specific installer
163 |     print_info(f"Running {platform_display} service installer...")
164 |     run_platform_installer(platform_name, args)
165 | 
166 | 
167 | if __name__ == '__main__':
168 |     main()
```

--------------------------------------------------------------------------------
/claude-hooks/tests/test-session-tracking.json:
--------------------------------------------------------------------------------

```json
  1 | {
  2 |   "sessions": [
  3 |     {
  4 |       "id": "test-session-1759496420980",
  5 |       "startTime": "2025-10-03T13:00:20.980Z",
  6 |       "endTime": null,
  7 |       "projectContext": {
  8 |         "name": "mcp-memory-service",
  9 |         "type": "Multi-language Project",
 10 |         "languages": [
 11 |           "javascript",
 12 |           "python"
 13 |         ],
 14 |         "frameworks": [
 15 |           "node.js",
 16 |           "fastapi"
 17 |         ],
 18 |         "tools": [
 19 |           "git",
 20 |           "npm",
 21 |           "pip"
 22 |         ],
 23 |         "confidence": 0.95
 24 |       },
 25 |       "workingDirectory": "/test/directory",
 26 |       "initialTopics": [],
 27 |       "finalTopics": [],
 28 |       "memoriesLoaded": [],
 29 |       "memoriesCreated": [],
 30 |       "conversationSummary": null,
 31 |       "outcome": null,
 32 |       "threadId": "thread-8e3c1ef4f0d194a7",
 33 |       "parentSessionId": null,
 34 |       "childSessionIds": [],
 35 |       "status": "active"
 36 |     }
 37 |   ],
 38 |   "conversationThreads": [
 39 |     {
 40 |       "id": "thread-580819bbd8f0cd81",
 41 |       "createdAt": "2025-08-20T11:38:33.001Z",
 42 |       "projectContext": {
 43 |         "name": "mcp-memory-service",
 44 |         "type": "Multi-language Project",
 45 |         "languages": [
 46 |           "javascript",
 47 |           "python"
 48 |         ],
 49 |         "frameworks": [
 50 |           "node.js",
 51 |           "fastapi"
 52 |         ],
 53 |         "tools": [
 54 |           "git",
 55 |           "npm",
 56 |           "pip"
 57 |         ],
 58 |         "confidence": 0.95
 59 |       },
 60 |       "sessionIds": [
 61 |         "test-session-1755689913000"
 62 |       ],
 63 |       "topics": [],
 64 |       "outcomes": [],
 65 |       "status": "active"
 66 |     },
 67 |     {
 68 |       "id": "thread-e5d79cf384f81206",
 69 |       "createdAt": "2025-08-20T11:42:10.896Z",
 70 |       "projectContext": {
 71 |         "name": "mcp-memory-service",
 72 |         "type": "Multi-language Project",
 73 |         "languages": [
 74 |           "javascript",
 75 |           "python"
 76 |         ],
 77 |         "frameworks": [
 78 |           "node.js",
 79 |           "fastapi"
 80 |         ],
 81 |         "tools": [
 82 |           "git",
 83 |           "npm",
 84 |           "pip"
 85 |         ],
 86 |         "confidence": 0.95
 87 |       },
 88 |       "sessionIds": [
 89 |         "test-session-1755690130896"
 90 |       ],
 91 |       "topics": [],
 92 |       "outcomes": [],
 93 |       "status": "active"
 94 |     },
 95 |     {
 96 |       "id": "thread-baec3bef4586c544",
 97 |       "createdAt": "2025-08-20T11:43:24.007Z",
 98 |       "projectContext": {
 99 |         "name": "mcp-memory-service",
100 |         "type": "Multi-language Project",
101 |         "languages": [
102 |           "javascript",
103 |           "python"
104 |         ],
105 |         "frameworks": [
106 |           "node.js",
107 |           "fastapi"
108 |         ],
109 |         "tools": [
110 |           "git",
111 |           "npm",
112 |           "pip"
113 |         ],
114 |         "confidence": 0.95
115 |       },
116 |       "sessionIds": [
117 |         "test-session-1755690204007"
118 |       ],
119 |       "topics": [],
120 |       "outcomes": [],
121 |       "status": "active"
122 |     },
123 |     {
124 |       "id": "thread-8ceebf438da3ede6",
125 |       "createdAt": "2025-08-20T11:43:49.796Z",
126 |       "projectContext": {
127 |         "name": "mcp-memory-service",
128 |         "type": "Multi-language Project",
129 |         "languages": [
130 |           "javascript",
131 |           "python"
132 |         ],
133 |         "frameworks": [
134 |           "node.js",
135 |           "fastapi"
136 |         ],
137 |         "tools": [
138 |           "git",
139 |           "npm",
140 |           "pip"
141 |         ],
142 |         "confidence": 0.95
143 |       },
144 |       "sessionIds": [
145 |         "test-session-1755690229795"
146 |       ],
147 |       "topics": [],
148 |       "outcomes": [],
149 |       "status": "active"
150 |     },
151 |     {
152 |       "id": "thread-3596674e63855259",
153 |       "createdAt": "2025-08-20T11:44:35.337Z",
154 |       "projectContext": {
155 |         "name": "mcp-memory-service",
156 |         "type": "Multi-language Project",
157 |         "languages": [
158 |           "javascript",
159 |           "python"
160 |         ],
161 |         "frameworks": [
162 |           "node.js",
163 |           "fastapi"
164 |         ],
165 |         "tools": [
166 |           "git",
167 |           "npm",
168 |           "pip"
169 |         ],
170 |         "confidence": 0.95
171 |       },
172 |       "sessionIds": [
173 |         "test-session-1755690275336"
174 |       ],
175 |       "topics": [],
176 |       "outcomes": [],
177 |       "status": "active"
178 |     },
179 |     {
180 |       "id": "thread-a1b7d615834f2c47",
181 |       "createdAt": "2025-08-20T11:45:38.589Z",
182 |       "projectContext": {
183 |         "name": "mcp-memory-service",
184 |         "type": "Multi-language Project",
185 |         "languages": [
186 |           "javascript",
187 |           "python"
188 |         ],
189 |         "frameworks": [
190 |           "node.js",
191 |           "fastapi"
192 |         ],
193 |         "tools": [
194 |           "git",
195 |           "npm",
196 |           "pip"
197 |         ],
198 |         "confidence": 0.95
199 |       },
200 |       "sessionIds": [
201 |         "test-session-1755690338588"
202 |       ],
203 |       "topics": [],
204 |       "outcomes": [],
205 |       "status": "active"
206 |     },
207 |     {
208 |       "id": "thread-8e3c1ef4f0d194a7",
209 |       "createdAt": "2025-10-03T13:00:20.980Z",
210 |       "projectContext": {
211 |         "name": "mcp-memory-service",
212 |         "type": "Multi-language Project",
213 |         "languages": [
214 |           "javascript",
215 |           "python"
216 |         ],
217 |         "frameworks": [
218 |           "node.js",
219 |           "fastapi"
220 |         ],
221 |         "tools": [
222 |           "git",
223 |           "npm",
224 |           "pip"
225 |         ],
226 |         "confidence": 0.95
227 |       },
228 |       "sessionIds": [
229 |         "test-session-1759496420980"
230 |       ],
231 |       "topics": [],
232 |       "outcomes": [],
233 |       "status": "active"
234 |     }
235 |   ],
236 |   "projectSessions": {
237 |     "mcp-memory-service": [
238 |       "test-session-1759496420980"
239 |     ]
240 |   },
241 |   "lastSaved": "2025-10-03T13:00:20.980Z"
242 | }
```

--------------------------------------------------------------------------------
/.github/workflows/roadmap-review-reminder.yml:
--------------------------------------------------------------------------------

```yaml
  1 | name: Quarterly Roadmap Review Reminder
  2 | 
  3 | on:
  4 |   schedule:
  5 |     # Runs at 09:00 UTC on the 1st of Feb, May, Aug, Nov
  6 |     - cron: '0 9 1 2,5,8,11 *'
  7 |   workflow_dispatch:  # Allow manual trigger for testing or ad-hoc reviews
  8 | 
  9 | jobs:
 10 |   create-review-issue:
 11 |     runs-on: ubuntu-latest
 12 |     permissions:
 13 |       issues: write
 14 |       contents: read
 15 | 
 16 |     steps:
 17 |     - name: Checkout repository
 18 |       uses: actions/checkout@v4
 19 | 
 20 |     - name: Determine quarter
 21 |       id: quarter
 22 |       run: |
 23 |         MONTH=$(date +%m)
 24 |         YEAR=$(date +%Y)
 25 | 
 26 |         case $MONTH in
 27 |           02) QUARTER="Q1" ;;
 28 |           05) QUARTER="Q2" ;;
 29 |           08) QUARTER="Q3" ;;
 30 |           11) QUARTER="Q4" ;;
 31 |           *) QUARTER="Q?" ;;
 32 |         esac
 33 | 
 34 |         echo "quarter=$QUARTER" >> $GITHUB_OUTPUT
 35 |         echo "year=$YEAR" >> $GITHUB_OUTPUT
 36 |         echo "Detected: $QUARTER $YEAR"
 37 | 
 38 |     - name: Create roadmap review issue
 39 |       uses: actions/github-script@v7
 40 |       with:
 41 |         script: |
 42 |           const quarter = '${{ steps.quarter.outputs.quarter }}';
 43 |           const year = '${{ steps.quarter.outputs.year }}';
 44 | 
 45 |           const issueTitle = `Quarterly Roadmap Review - ${quarter} ${year}`;
 46 |           const issueBody = `## 📊 Quarterly Roadmap Review
 47 | 
 48 |           It's time for the quarterly roadmap review! Please review and update the development roadmap on the wiki.
 49 | 
 50 |           **📖 Wiki Roadmap**: [13-Development-Roadmap](https://github.com/doobidoo/mcp-memory-service/wiki/13-Development-Roadmap)
 51 | 
 52 |           ## ✅ Review Checklist
 53 | 
 54 |           - [ ] **Completed Milestones**: Move finished features from "Current Focus" to "Completed Milestones"
 55 |           - [ ] **Current Focus**: Update v8.39-v9.0 goals based on actual progress
 56 |           - [ ] **Timeline Accuracy**: Verify Q1 2026 timeline is still realistic
 57 |           - [ ] **Future Enhancements**: Adjust Q2 2026+ plans based on community feedback
 58 |           - [ ] **Version References**: Update current version number in "Project Status" section
 59 |           - [ ] **GitHub Projects Alignment**: Check if [GitHub Projects](https://github.com/doobidoo/mcp-memory-service/projects) need updates
 60 |           - [ ] **Community Opportunities**: Review and update contribution opportunities section
 61 |           - [ ] **Next Review Date**: Set next quarterly review date (3 months from now)
 62 | 
 63 |           ## 🔍 What to Check
 64 | 
 65 |           **Recent Releases** (since last review):
 66 |           - Check [CHANGELOG.md](../CHANGELOG.md) for completed features
 67 |           - Review [Recent Commits](https://github.com/doobidoo/mcp-memory-service/commits/main) for major changes
 68 |           - Check [Closed Issues](https://github.com/doobidoo/mcp-memory-service/issues?q=is%3Aissue+is%3Aclosed) for resolved items
 69 | 
 70 |           **Community Feedback**:
 71 |           - Review [Open Issues](https://github.com/doobidoo/mcp-memory-service/issues) for feature requests
 72 |           - Check [Discussions](https://github.com/doobidoo/mcp-memory-service/discussions) for community input
 73 |           - Consider [Pull Requests](https://github.com/doobidoo/mcp-memory-service/pulls) for emerging patterns
 74 | 
 75 |           ## 📝 Update Guidelines
 76 | 
 77 |           **Why Wiki?**
 78 |           - ✅ No PR required - edit directly for faster updates
 79 |           - ✅ Better navigation - integrated with other wiki guides
 80 |           - ✅ Community collaboration - lower barrier for community input
 81 | 
 82 |           **Documentation Matrix**:
 83 |           - **Wiki Roadmap**: Strategic vision, quarterly goals, long-term aspirations
 84 |           - **GitHub Projects**: Sprint planning, task boards, issue tracking (if enabled)
 85 |           - **CHANGELOG.md**: Release history, completed features
 86 |           - **Open Issues**: Bug reports, feature requests, immediate priorities
 87 | 
 88 |           ## 🎯 Success Metrics
 89 | 
 90 |           After review, the roadmap should:
 91 |           - Accurately reflect current version and recent achievements
 92 |           - Provide clear guidance for next quarter's priorities
 93 |           - Inspire community contributions with well-defined opportunities
 94 |           - Align strategic vision with tactical execution (GitHub Projects/Issues)
 95 | 
 96 |           ---
 97 | 
 98 |           **Automated Reminder**: This issue is created quarterly by [roadmap-review-reminder workflow](../.github/workflows/roadmap-review-reminder.yml)
 99 |           **Maintainer**: @doobidoo
100 |           **Due Date**: Within 2 weeks of creation`;
101 | 
102 |           // Check if issue already exists for this quarter
103 |           const { data: existingIssues } = await github.rest.issues.listForRepo({
104 |             owner: context.repo.owner,
105 |             repo: context.repo.repo,
106 |             state: 'open',
107 |             labels: 'roadmap',
108 |             per_page: 100
109 |           });
110 | 
111 |           const isDuplicate = existingIssues.some(issue =>
112 |             issue.title.includes(issueTitle)
113 |           );
114 | 
115 |           if (isDuplicate) {
116 |             console.log(`✅ Issue already exists for ${quarter} ${year} - skipping creation`);
117 |             return;
118 |           }
119 | 
120 |           // Create the issue
121 |           const { data: issue } = await github.rest.issues.create({
122 |             owner: context.repo.owner,
123 |             repo: context.repo.repo,
124 |             title: issueTitle,
125 |             body: issueBody,
126 |             labels: ['documentation', 'maintenance', 'roadmap'],
127 |             assignees: ['doobidoo']
128 |           });
129 | 
130 |           console.log(`✅ Created issue #${issue.number}: ${issueTitle}`);
131 | 
```

--------------------------------------------------------------------------------
/scripts/sync/check_drift.py:
--------------------------------------------------------------------------------

```python
  1 | #!/usr/bin/env python3
  2 | """
  3 | Check for metadata drift between hybrid backends (dry-run support).
  4 | 
  5 | This script checks for memories with divergent metadata (tags, types, custom fields)
  6 | between local SQLite-vec and Cloudflare backends, without making any changes.
  7 | 
  8 | Usage:
  9 |     python scripts/sync/check_drift.py              # Dry-run mode (preview only)
 10 |     python scripts/sync/check_drift.py --apply      # Apply changes
 11 |     python scripts/sync/check_drift.py --limit 50   # Check 50 memories max
 12 | 
 13 | Requires:
 14 |     - Hybrid storage backend configured
 15 |     - MCP_HYBRID_SYNC_UPDATES=true (or enabled by default)
 16 | 
 17 | Output:
 18 |     - Number of memories checked
 19 |     - Number with metadata drift detected
 20 |     - Number that would be synced (or were synced with --apply)
 21 |     - Number of failures
 22 | 
 23 | Version: 8.25.0+
 24 | """
 25 | 
 26 | import asyncio
 27 | import logging
 28 | import sys
 29 | import argparse
 30 | from pathlib import Path
 31 | 
 32 | # Add parent directory to path for imports
 33 | sys.path.insert(0, str(Path(__file__).parent.parent.parent))
 34 | 
 35 | from src.mcp_memory_service.storage.hybrid import HybridMemoryStorage
 36 | from src.mcp_memory_service import config as app_config
 37 | 
 38 | # Set up logging
 39 | logging.basicConfig(
 40 |     level=logging.INFO,
 41 |     format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
 42 | )
 43 | logger = logging.getLogger(__name__)
 44 | 
 45 | 
 46 | async def main():
 47 |     """Run drift detection check."""
 48 |     parser = argparse.ArgumentParser(
 49 |         description="Check for metadata drift between hybrid backends"
 50 |     )
 51 |     parser.add_argument(
 52 |         '--apply',
 53 |         action='store_true',
 54 |         help='Apply changes (default is dry-run mode)'
 55 |     )
 56 |     parser.add_argument(
 57 |         '--limit',
 58 |         type=int,
 59 |         default=None,
 60 |         help='Maximum number of memories to check (default: from config)'
 61 |     )
 62 |     parser.add_argument(
 63 |         '--verbose',
 64 |         '-v',
 65 |         action='store_true',
 66 |         help='Enable verbose debug logging'
 67 |     )
 68 | 
 69 |     args = parser.parse_args()
 70 | 
 71 |     if args.verbose:
 72 |         logging.getLogger().setLevel(logging.DEBUG)
 73 | 
 74 |     # Check that hybrid backend is configured
 75 |     if app_config.STORAGE_BACKEND != 'hybrid':
 76 |         logger.error(f"Drift detection requires hybrid backend, but configured backend is: {app_config.STORAGE_BACKEND}")
 77 |         logger.error("Set MCP_MEMORY_STORAGE_BACKEND=hybrid in your environment or .env file")
 78 |         return 1
 79 | 
 80 |     # Override batch size if limit specified
 81 |     if args.limit:
 82 |         app_config.HYBRID_DRIFT_BATCH_SIZE = args.limit
 83 | 
 84 |     logger.info("=== Hybrid Backend Drift Detection ===")
 85 |     logger.info(f"Mode: {'APPLY CHANGES' if args.apply else 'DRY RUN (preview only)'}")
 86 |     logger.info(f"Batch size: {args.limit or app_config.HYBRID_DRIFT_BATCH_SIZE}")
 87 |     logger.info(f"Drift detection enabled: {app_config.HYBRID_SYNC_UPDATES}")
 88 | 
 89 |     if not app_config.HYBRID_SYNC_UPDATES:
 90 |         logger.warning("Drift detection is disabled (MCP_HYBRID_SYNC_UPDATES=false)")
 91 |         logger.warning("Set MCP_HYBRID_SYNC_UPDATES=true to enable this feature")
 92 |         return 1
 93 | 
 94 |     try:
 95 |         # Initialize hybrid storage with db path and Cloudflare config
 96 |         db_path = app_config.SQLITE_VEC_PATH
 97 | 
 98 |         # Build Cloudflare config from environment
 99 |         cloudflare_keys = [
100 |             'CLOUDFLARE_API_TOKEN',
101 |             'CLOUDFLARE_ACCOUNT_ID',
102 |             'CLOUDFLARE_D1_DATABASE_ID',
103 |             'CLOUDFLARE_VECTORIZE_INDEX',
104 |             'CLOUDFLARE_R2_BUCKET',
105 |             'CLOUDFLARE_EMBEDDING_MODEL',
106 |             'CLOUDFLARE_LARGE_CONTENT_THRESHOLD',
107 |             'CLOUDFLARE_MAX_RETRIES',
108 |             'CLOUDFLARE_BASE_DELAY',
109 |         ]
110 |         cloudflare_config = {
111 |             key.lower().replace('cloudflare_', ''): getattr(app_config, key, None)
112 |             for key in cloudflare_keys
113 |         }
114 | 
115 |         storage = HybridMemoryStorage(
116 |             sqlite_db_path=db_path,
117 |             cloudflare_config=cloudflare_config
118 |         )
119 |         await storage.initialize()
120 | 
121 |         # Check that sync service is available
122 |         if not storage.sync_service:
123 |             logger.error("Sync service not available - hybrid backend may not be configured correctly")
124 |             return 1
125 | 
126 |         logger.info(f"Sync service initialized (drift check interval: {storage.sync_service.drift_check_interval}s)")
127 | 
128 |         # Run drift detection
129 |         logger.info("\nStarting drift detection scan...\n")
130 |         stats = await storage.sync_service._detect_and_sync_drift(dry_run=not args.apply)
131 | 
132 |         # Print results
133 |         print("\n" + "="*60)
134 |         print(f"DRIFT DETECTION RESULTS {'(DRY RUN)' if not args.apply else '(CHANGES APPLIED)'}")
135 |         print("="*60)
136 |         print(f"  Memories checked:    {stats['checked']}")
137 |         print(f"  Drift detected:      {stats['drift_detected']}")
138 |         print(f"  {'Would sync' if not args.apply else 'Synced'}:          {stats['synced']}")
139 |         print(f"  Failed:              {stats['failed']}")
140 |         print("="*60)
141 | 
142 |         if stats['drift_detected'] > 0 and not args.apply:
143 |             print("\nℹ️  Run with --apply to synchronize these memories")
144 |         elif stats['drift_detected'] > 0 and args.apply:
145 |             print("\n✅ Metadata synchronized successfully")
146 |         else:
147 |             print("\n✅ No drift detected - backends are in sync")
148 | 
149 |         return 0
150 | 
151 |     except Exception as e:
152 |         logger.error(f"Error during drift detection: {e}", exc_info=True)
153 |         return 1
154 | 
155 | 
156 | if __name__ == '__main__':
157 |     exit_code = asyncio.run(main())
158 |     sys.exit(exit_code)
159 | 
```

--------------------------------------------------------------------------------
/scripts/sync/litestream/push_to_remote.sh:
--------------------------------------------------------------------------------

```bash
  1 | #!/bin/bash
  2 | # Push staged changes to remote MCP Memory Service API
  3 | 
  4 | STAGING_DB="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec_staging.db"
  5 | REMOTE_API="https://narrowbox.local:8443/api/memories"
  6 | # API_KEY can be set as environment variable or read from config
  7 | API_KEY="${MCP_API_KEY:-}"
  8 | HOSTNAME=$(hostname)
  9 | 
 10 | echo "$(date): Pushing staged changes to remote API..."
 11 | 
 12 | if [ ! -f "$STAGING_DB" ]; then
 13 |     echo "$(date): No staging database found - nothing to push"
 14 |     exit 0
 15 | fi
 16 | 
 17 | # Check if we have API key (if required)
 18 | if [ -z "$API_KEY" ]; then
 19 |     echo "$(date): WARNING: No API key configured. Set MCP_API_KEY environment variable if required"
 20 | fi
 21 | 
 22 | # Get count of changes ready to push
 23 | PUSH_COUNT=$(sqlite3 "$STAGING_DB" "
 24 | SELECT COUNT(*) FROM staged_memories 
 25 | WHERE conflict_status = 'none' 
 26 |   AND operation IN ('INSERT', 'UPDATE');
 27 | " 2>/dev/null || echo "0")
 28 | 
 29 | if [ "$PUSH_COUNT" -eq 0 ]; then
 30 |     echo "$(date): No changes ready to push"
 31 |     exit 0
 32 | fi
 33 | 
 34 | echo "$(date): Found $PUSH_COUNT changes ready to push to remote API"
 35 | 
 36 | # Test connectivity to remote API
 37 | echo "$(date): Testing connectivity to remote API..."
 38 | HTTP_STATUS=$(curl -k -s -o /dev/null -w "%{http_code}" "$REMOTE_API" --connect-timeout 10)
 39 | 
 40 | if [ "$HTTP_STATUS" -eq 000 ]; then
 41 |     echo "$(date): ERROR: Cannot connect to remote API at $REMOTE_API"
 42 |     echo "$(date): Changes will remain staged for next attempt"
 43 |     exit 1
 44 | elif [ "$HTTP_STATUS" -eq 404 ]; then
 45 |     echo "$(date): WARNING: API endpoint not found (404). Checking if server is running..."
 46 | elif [ "$HTTP_STATUS" -ge 200 ] && [ "$HTTP_STATUS" -lt 300 ]; then
 47 |     echo "$(date): API connectivity confirmed (HTTP $HTTP_STATUS)"
 48 | else
 49 |     echo "$(date): WARNING: Unexpected HTTP status: $HTTP_STATUS"
 50 | fi
 51 | 
 52 | # Process each change ready for push
 53 | PUSHED_COUNT=0
 54 | FAILED_COUNT=0
 55 | 
 56 | sqlite3 "$STAGING_DB" "
 57 | SELECT id, content, content_hash, tags, metadata, memory_type, 
 58 |        operation, staged_at, original_created_at, source_machine
 59 | FROM staged_memories 
 60 | WHERE conflict_status = 'none' 
 61 |   AND operation IN ('INSERT', 'UPDATE')
 62 | ORDER BY staged_at ASC;
 63 | " | while IFS='|' read -r id content content_hash tags metadata memory_type operation staged_at created_at source_machine; do
 64 | 
 65 |     echo "$(date): Pushing: ${content:0:50}..."
 66 |     
 67 |     # Prepare JSON payload
 68 |     # Note: This assumes the API accepts the memory service format
 69 |     JSON_PAYLOAD=$(cat << EOF
 70 | {
 71 |     "content": $(echo "$content" | jq -R .),
 72 |     "tags": $tags,
 73 |     "metadata": $metadata,
 74 |     "memory_type": "$memory_type",
 75 |     "client_hostname": "$HOSTNAME"
 76 | }
 77 | EOF
 78 | )
 79 |     
 80 |     # Prepare curl command with optional API key
 81 |     CURL_CMD="curl -k -s -X POST"
 82 |     CURL_CMD="$CURL_CMD -H 'Content-Type: application/json'"
 83 |     CURL_CMD="$CURL_CMD -H 'X-Client-Hostname: $HOSTNAME'"
 84 |     
 85 |     if [ -n "$API_KEY" ]; then
 86 |         CURL_CMD="$CURL_CMD -H 'Authorization: Bearer $API_KEY'"
 87 |     fi
 88 |     
 89 |     CURL_CMD="$CURL_CMD -d '$JSON_PAYLOAD'"
 90 |     CURL_CMD="$CURL_CMD '$REMOTE_API'"
 91 |     
 92 |     # Execute push to remote API
 93 |     RESPONSE=$(eval "$CURL_CMD" 2>&1)
 94 |     CURL_EXIT_CODE=$?
 95 |     
 96 |     if [ $CURL_EXIT_CODE -eq 0 ]; then
 97 |         # Check if response indicates success
 98 |         if echo "$RESPONSE" | grep -q '"success":\s*true\|"status":\s*"success"\|content_hash'; then
 99 |             echo "$(date): Successfully pushed: ${content:0:30}..."
100 |             
101 |             # Remove from staging on successful push
102 |             sqlite3 "$STAGING_DB" "DELETE FROM staged_memories WHERE id = '$id';"
103 |             PUSHED_COUNT=$((PUSHED_COUNT + 1))
104 |             
105 |         elif echo "$RESPONSE" | grep -q '"error"\|"message"\|HTTP.*[45][0-9][0-9]'; then
106 |             echo "$(date): API error for: ${content:0:30}..."
107 |             echo "$(date): Response: $RESPONSE"
108 |             FAILED_COUNT=$((FAILED_COUNT + 1))
109 |             
110 |             # Mark as failed but don't delete (for retry)
111 |             sqlite3 "$STAGING_DB" "
112 |             UPDATE staged_memories 
113 |             SET conflict_status = 'push_failed' 
114 |             WHERE id = '$id';
115 |             "
116 |         else
117 |             echo "$(date): Unexpected response: $RESPONSE"
118 |             FAILED_COUNT=$((FAILED_COUNT + 1))
119 |         fi
120 |     else
121 |         echo "$(date): Network error pushing: ${content:0:30}..."
122 |         echo "$(date): Error: $RESPONSE"
123 |         FAILED_COUNT=$((FAILED_COUNT + 1))
124 |         
125 |         # Don't mark as failed if it's a network issue - keep for retry
126 |     fi
127 |     
128 |     # Small delay to avoid overwhelming the API
129 |     sleep 0.5
130 | done
131 | 
132 | # Get final counts
133 | REMAINING_COUNT=$(sqlite3 "$STAGING_DB" "SELECT COUNT(*) FROM staged_memories WHERE conflict_status = 'none';" 2>/dev/null || echo "0")
134 | FAILED_FINAL=$(sqlite3 "$STAGING_DB" "SELECT COUNT(*) FROM staged_memories WHERE conflict_status = 'push_failed';" 2>/dev/null || echo "0")
135 | 
136 | echo "$(date): Push operation completed"
137 | echo "$(date): Successfully pushed: $PUSHED_COUNT changes"
138 | echo "$(date): Failed to push: $FAILED_FINAL changes"
139 | echo "$(date): Remaining staged: $REMAINING_COUNT changes"
140 | 
141 | # Update sync status
142 | sqlite3 "$STAGING_DB" "
143 | INSERT OR REPLACE INTO sync_status (key, value) VALUES 
144 | ('last_push_attempt', datetime('now'));
145 | "
146 | 
147 | if [ "$FAILED_FINAL" -gt 0 ]; then
148 |     echo "$(date): WARNING: $FAILED_FINAL changes failed to push"
149 |     echo "$(date): These changes will be retried on next push attempt"
150 | fi
151 | 
152 | if [ "$REMAINING_COUNT" -gt 0 ]; then
153 |     echo "$(date): NOTE: $REMAINING_COUNT changes still staged"
154 | fi
155 | 
156 | echo "$(date): Push to remote API completed"
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/consolidation/base.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright 2024 Heinrich Krupp
  2 | #
  3 | # Licensed under the Apache License, Version 2.0 (the "License");
  4 | # you may not use this file except in compliance with the License.
  5 | # You may obtain a copy of the License at
  6 | #
  7 | #     http://www.apache.org/licenses/LICENSE-2.0
  8 | #
  9 | # Unless required by applicable law or agreed to in writing, software
 10 | # distributed under the License is distributed on an "AS IS" BASIS,
 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | # See the License for the specific language governing permissions and
 13 | # limitations under the License.
 14 | 
 15 | """Base classes and interfaces for memory consolidation components."""
 16 | 
 17 | from abc import ABC, abstractmethod
 18 | from typing import List, Dict, Any, Optional, Tuple
 19 | from dataclasses import dataclass, field
 20 | from datetime import datetime
 21 | import logging
 22 | 
 23 | from ..models.memory import Memory
 24 | 
 25 | logger = logging.getLogger(__name__)
 26 | 
 27 | @dataclass
 28 | class ConsolidationConfig:
 29 |     """Configuration for consolidation operations."""
 30 |     
 31 |     # Decay settings
 32 |     decay_enabled: bool = True
 33 |     retention_periods: Dict[str, int] = field(default_factory=lambda: {
 34 |         'critical': 365,
 35 |         'reference': 180, 
 36 |         'standard': 30,
 37 |         'temporary': 7
 38 |     })
 39 |     
 40 |     # Association settings
 41 |     associations_enabled: bool = True
 42 |     min_similarity: float = 0.3
 43 |     max_similarity: float = 0.7
 44 |     max_pairs_per_run: int = 100
 45 |     
 46 |     # Clustering settings
 47 |     clustering_enabled: bool = True
 48 |     min_cluster_size: int = 5
 49 |     clustering_algorithm: str = 'dbscan'  # 'dbscan', 'hierarchical'
 50 |     
 51 |     # Compression settings
 52 |     compression_enabled: bool = True
 53 |     max_summary_length: int = 500
 54 |     preserve_originals: bool = True
 55 |     
 56 |     # Forgetting settings
 57 |     forgetting_enabled: bool = True
 58 |     relevance_threshold: float = 0.1
 59 |     access_threshold_days: int = 90
 60 |     archive_location: Optional[str] = None
 61 | 
 62 |     # Incremental consolidation settings
 63 |     batch_size: int = 500  # Memories to process per consolidation run
 64 |     incremental_mode: bool = True  # Enable oldest-first batch processing
 65 | 
 66 | @dataclass
 67 | class ConsolidationReport:
 68 |     """Report of consolidation operations performed."""
 69 |     time_horizon: str
 70 |     start_time: datetime
 71 |     end_time: datetime
 72 |     memories_processed: int
 73 |     associations_discovered: int = 0
 74 |     clusters_created: int = 0
 75 |     memories_compressed: int = 0
 76 |     memories_archived: int = 0
 77 |     errors: List[str] = field(default_factory=list)
 78 |     performance_metrics: Dict[str, Any] = field(default_factory=dict)
 79 | 
 80 | @dataclass
 81 | class MemoryAssociation:
 82 |     """Represents a discovered association between memories."""
 83 |     source_memory_hashes: List[str]
 84 |     similarity_score: float
 85 |     connection_type: str
 86 |     discovery_method: str
 87 |     discovery_date: datetime
 88 |     metadata: Dict[str, Any] = field(default_factory=dict)
 89 | 
 90 | @dataclass
 91 | class MemoryCluster:
 92 |     """Represents a cluster of semantically related memories."""
 93 |     cluster_id: str
 94 |     memory_hashes: List[str]
 95 |     centroid_embedding: List[float]
 96 |     coherence_score: float
 97 |     created_at: datetime
 98 |     theme_keywords: List[str] = field(default_factory=list)
 99 |     metadata: Dict[str, Any] = field(default_factory=dict)
100 | 
101 | class ConsolidationBase(ABC):
102 |     """Abstract base class for consolidation components."""
103 |     
104 |     def __init__(self, config: ConsolidationConfig):
105 |         self.config = config
106 |         self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
107 |     
108 |     @abstractmethod
109 |     async def process(self, memories: List[Memory], **kwargs) -> Any:
110 |         """Process the given memories and return results."""
111 |         pass
112 |     
113 |     def _validate_memories(self, memories: List[Memory]) -> bool:
114 |         """Validate that memories list is valid for processing."""
115 |         if not memories:
116 |             self.logger.warning("Empty memories list provided")
117 |             return False
118 |         
119 |         for memory in memories:
120 |             if not hasattr(memory, 'content_hash') or not memory.content_hash:
121 |                 self.logger.error(f"Memory missing content_hash: {memory}")
122 |                 return False
123 |         
124 |         return True
125 |     
126 |     def _get_memory_age_days(self, memory: Memory, reference_time: Optional[datetime] = None) -> int:
127 |         """Get the age of a memory in days."""
128 |         ref_time = reference_time or datetime.now()
129 |         
130 |         if memory.created_at:
131 |             created_dt = datetime.utcfromtimestamp(memory.created_at)
132 |             return (ref_time - created_dt).days
133 |         elif memory.timestamp:
134 |             return (ref_time - memory.timestamp).days
135 |         else:
136 |             self.logger.warning(f"Memory {memory.content_hash} has no timestamp")
137 |             return 0
138 |     
139 |     def _extract_memory_type(self, memory: Memory) -> str:
140 |         """Extract the memory type, with fallback to 'standard'."""
141 |         return memory.memory_type or 'standard'
142 |     
143 |     def _is_protected_memory(self, memory: Memory) -> bool:
144 |         """Check if a memory is protected from consolidation operations."""
145 |         protected_tags = {'critical', 'important', 'reference', 'permanent'}
146 |         return bool(set(memory.tags).intersection(protected_tags))
147 | 
148 | class ConsolidationError(Exception):
149 |     """Base exception for consolidation operations."""
150 |     pass
151 | 
152 | class ConsolidationConfigError(ConsolidationError):
153 |     """Exception raised for configuration errors."""
154 |     pass
155 | 
156 | class ConsolidationProcessingError(ConsolidationError):
157 |     """Exception raised during processing operations."""
158 |     pass
```

--------------------------------------------------------------------------------
/docs/troubleshooting/cloudflare-authentication.md:
--------------------------------------------------------------------------------

```markdown
  1 | # Cloudflare Authentication Troubleshooting
  2 | 
  3 | This guide helps resolve common Cloudflare authentication issues with the MCP Memory Service.
  4 | 
  5 | ## Overview
  6 | 
  7 | Cloudflare API tokens come in different types with varying scopes and verification methods. Understanding these differences is crucial for proper authentication.
  8 | 
  9 | ## Token Types and Verification
 10 | 
 11 | ### Account-Scoped Tokens (Recommended)
 12 | 
 13 | **What they are:** Tokens with specific permissions limited to a particular Cloudflare account.
 14 | 
 15 | **Required Permissions:**
 16 | - `D1 Database:Edit` - For D1 database operations
 17 | - `Vectorize:Edit` - For vector index operations
 18 | 
 19 | **Verification Endpoint:**
 20 | ```bash
 21 | curl "https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/tokens/verify" \
 22 |      -H "Authorization: Bearer {YOUR_TOKEN}"
 23 | ```
 24 | 
 25 | **Success Response:**
 26 | ```json
 27 | {
 28 |   "result": {
 29 |     "id": "token_id_here",
 30 |     "status": "active",
 31 |     "expires_on": "2026-04-30T23:59:59Z"
 32 |   },
 33 |   "success": true,
 34 |   "errors": [],
 35 |   "messages": [
 36 |     {
 37 |       "code": 10000,
 38 |       "message": "This API Token is valid and active"
 39 |     }
 40 |   ]
 41 | }
 42 | ```
 43 | 
 44 | ### Global Tokens (Legacy)
 45 | 
 46 | **What they are:** Tokens with broader permissions across all accounts.
 47 | 
 48 | **Verification Endpoint:**
 49 | ```bash
 50 | curl "https://api.cloudflare.com/client/v4/user/tokens/verify" \
 51 |      -H "Authorization: Bearer {YOUR_TOKEN}"
 52 | ```
 53 | 
 54 | ## Common Error Messages
 55 | 
 56 | ### "Invalid API Token" (Error 1000)
 57 | 
 58 | **Cause:** Using the wrong verification endpoint for your token type.
 59 | 
 60 | **Solution:**
 61 | 1. If using account-scoped token, use the account-specific endpoint
 62 | 2. If using global token, use the user endpoint
 63 | 3. Check token expiration date
 64 | 4. Verify token permissions
 65 | 
 66 | **Example:**
 67 | ```bash
 68 | # ❌ Wrong: Testing account-scoped token with user endpoint
 69 | curl "https://api.cloudflare.com/client/v4/user/tokens/verify" \
 70 |      -H "Authorization: Bearer account_scoped_token"
 71 | # Returns: {"success":false,"errors":[{"code":1000,"message":"Invalid API Token"}]}
 72 | 
 73 | # ✅ Correct: Testing account-scoped token with account endpoint
 74 | curl "https://api.cloudflare.com/client/v4/accounts/your_account_id/tokens/verify" \
 75 |      -H "Authorization: Bearer account_scoped_token"
 76 | # Returns: {"success":true,...}
 77 | ```
 78 | 
 79 | ### "401 Unauthorized" During Operations
 80 | 
 81 | **Cause:** Token lacks required permissions for specific operations.
 82 | 
 83 | **Solution:**
 84 | 1. Verify token has `D1 Database:Edit` permission
 85 | 2. Verify token has `Vectorize:Edit` permission
 86 | 3. Check if token has expired
 87 | 4. Ensure account ID matches token scope
 88 | 
 89 | ### "Client error '401 Unauthorized'" in MCP Service
 90 | 
 91 | **Cause:** Environment variables may not be properly loaded or token is invalid.
 92 | 
 93 | **Debugging Steps:**
 94 | 1. Check environment variable loading:
 95 |    ```bash
 96 |    python scripts/validation/diagnose_backend_config.py
 97 |    ```
 98 | 
 99 | 2. Test token manually:
100 |    ```bash
101 |    curl "https://api.cloudflare.com/client/v4/accounts/$CLOUDFLARE_ACCOUNT_ID/tokens/verify" \
102 |         -H "Authorization: Bearer $CLOUDFLARE_API_TOKEN"
103 |    ```
104 | 
105 | 3. Test D1 database access:
106 |    ```bash
107 |    curl -X POST "https://api.cloudflare.com/client/v4/accounts/$CLOUDFLARE_ACCOUNT_ID/d1/database/$CLOUDFLARE_D1_DATABASE_ID/query" \
108 |         -H "Authorization: Bearer $CLOUDFLARE_API_TOKEN" \
109 |         -H "Content-Type: application/json" \
110 |         -d '{"sql": "SELECT name FROM sqlite_master WHERE type='"'"'table'"'"';"}'
111 |    ```
112 | 
113 | ## Token Creation Guide
114 | 
115 | ### Creating Account-Scoped Tokens
116 | 
117 | 1. Go to [Cloudflare Dashboard](https://dash.cloudflare.com/profile/api-tokens)
118 | 2. Click "Create Token"
119 | 3. Use "Custom token" template
120 | 4. Set permissions:
121 |    - `Account` → `Cloudflare D1:Edit`
122 |    - `Account` → `Vectorize:Edit`
123 | 5. Set account resources to your specific account
124 | 6. Add client IP restrictions (optional but recommended)
125 | 7. Set expiration date
126 | 8. Create and copy the token immediately
127 | 
128 | ### Token Security Best Practices
129 | 
130 | - ✅ Use account-scoped tokens with minimal required permissions
131 | - ✅ Set expiration dates (e.g., 1 year maximum)
132 | - ✅ Add IP restrictions when possible
133 | - ✅ Store tokens securely (environment variables, not in code)
134 | - ✅ Rotate tokens regularly
135 | - ❌ Never commit tokens to version control
136 | - ❌ Don't use global tokens unless absolutely necessary
137 | 
138 | ## Environment Configuration
139 | 
140 | ### Required Variables
141 | 
142 | ```bash
143 | # Account-scoped token (recommended)
144 | CLOUDFLARE_API_TOKEN=your_account_scoped_token_here
145 | CLOUDFLARE_ACCOUNT_ID=your_account_id_here
146 | CLOUDFLARE_D1_DATABASE_ID=your_d1_database_id_here
147 | CLOUDFLARE_VECTORIZE_INDEX=mcp-memory-index
148 | ```
149 | 
150 | ### Validation Command
151 | 
152 | ```bash
153 | # Test all configuration
154 | python scripts/validation/diagnose_backend_config.py
155 | 
156 | # Quick token test
157 | curl "https://api.cloudflare.com/client/v4/accounts/$CLOUDFLARE_ACCOUNT_ID/tokens/verify" \
158 |      -H "Authorization: Bearer $CLOUDFLARE_API_TOKEN"
159 | ```
160 | 
161 | ## Troubleshooting Checklist
162 | 
163 | - [ ] Token is account-scoped and has correct permissions
164 | - [ ] Using correct verification endpoint (`/accounts/{id}/tokens/verify`)
165 | - [ ] Environment variables are loaded correctly
166 | - [ ] Account ID matches token scope
167 | - [ ] Token has not expired
168 | - [ ] D1 database ID is correct
169 | - [ ] Vectorize index exists
170 | - [ ] MCP service has been restarted after configuration changes
171 | 
172 | ## Getting Help
173 | 
174 | If you're still experiencing issues:
175 | 
176 | 1. Run the diagnostic script: `python scripts/validation/diagnose_backend_config.py`
177 | 2. Check the [GitHub Issues](https://github.com/doobidoo/mcp-memory-service/issues)
178 | 3. Review the main [README.md](../../README.md) for setup instructions
179 | 4. Check the [CLAUDE.md](../../CLAUDE.md) for Claude Code specific guidance
```

--------------------------------------------------------------------------------
/scripts/quality/README_PHASE1.md:
--------------------------------------------------------------------------------

```markdown
  1 | # Phase 1: Dead Code Removal - Quick Reference
  2 | 
  3 | **Issue:** #240 Code Quality Improvement
  4 | **Phase:** 1 of 3 (Dead Code Removal)
  5 | **Status:** Analysis Complete, Ready for Fix
  6 | 
  7 | ---
  8 | 
  9 | ## Quick Summary
 10 | 
 11 | **Problem:** 27 dead code issues (2 critical) identified by pyscn
 12 | **Root Cause:** Single premature `return False` at line 1358 in `scripts/installation/install.py`
 13 | **Impact:** 77 lines of Claude Desktop configuration code never executed during installation
 14 | **Fix:** Move unreachable code block outside exception handler
 15 | **Estimated Improvement:** +5 to +9 points overall health score (63 → 68-72)
 16 | 
 17 | ---
 18 | 
 19 | ## Files Generated
 20 | 
 21 | 1. **`phase1_dead_code_analysis.md`** - Complete analysis report with detailed breakdown
 22 | 2. **`fix_dead_code_install.sh`** - Interactive script to guide you through the fix
 23 | 3. **`README_PHASE1.md`** - This quick reference guide
 24 | 
 25 | ---
 26 | 
 27 | ## How to Use
 28 | 
 29 | ### Option 1: Interactive Script (Recommended)
 30 | ```bash
 31 | # Run from project root directory
 32 | bash scripts/quality/fix_dead_code_install.sh
 33 | ```
 34 | 
 35 | The script will:
 36 | - Create a backup branch
 37 | - Guide you through manual code editing
 38 | - Verify syntax after fix
 39 | - Run tests (if available)
 40 | - Show diff and commit message
 41 | 
 42 | ### Option 2: Manual Fix
 43 | 
 44 | 1. **Open file:** `scripts/installation/install.py`
 45 | 2. **Go to line 1358** (inside except block)
 46 | 3. **Change:**
 47 |    ```python
 48 |    except Exception as e:
 49 |        print_error(f"Failed to test backups directory: {e}")
 50 |        return False
 51 |    ```
 52 |    **To:**
 53 |    ```python
 54 |    except Exception as e:
 55 |        print_error(f"Failed to test backups directory: {e}")
 56 |        print_warning("Continuing with Claude Desktop configuration despite write test failure")
 57 |    ```
 58 | 4. **Cut lines 1360-1436** (Claude Desktop config block)
 59 | 5. **Paste after the except block** (dedent by 4 spaces)
 60 | 6. **Save and verify:**
 61 |    ```bash
 62 |    python -m py_compile scripts/installation/install.py
 63 |    ```
 64 | 
 65 | ---
 66 | 
 67 | ## Verification Steps
 68 | 
 69 | After applying the fix:
 70 | 
 71 | 1. **Syntax check:**
 72 |    ```bash
 73 |    python -m py_compile scripts/installation/install.py
 74 |    ```
 75 | 
 76 | 2. **Run tests:**
 77 |    ```bash
 78 |    pytest tests/unit/test_installation.py -v
 79 |    ```
 80 | 
 81 | 3. **Test installation:**
 82 |    ```bash
 83 |    python scripts/installation/install.py --storage-backend sqlite_vec
 84 |    cat ~/.claude/claude_desktop_config.json | grep mcp-memory-service
 85 |    ```
 86 | 
 87 | 4. **Re-run pyscn:**
 88 |    ```bash
 89 |    pyscn analyze . --output .pyscn/reports/
 90 |    ```
 91 | 
 92 | 5. **Check new health score** in the HTML report
 93 | 
 94 | ---
 95 | 
 96 | ## Expected Results
 97 | 
 98 | ### Before Fix
 99 | - **Health Score:** 63/100 (Grade C)
100 | - **Dead Code Issues:** 27 (2 critical)
101 | - **Dead Code Score:** 70/100
102 | - **Claude Desktop Config:** Never created during installation
103 | 
104 | ### After Fix
105 | - **Health Score:** 68-72/100 (Grade C+)
106 | - **Dead Code Issues:** 0
107 | - **Dead Code Score:** 85-90/100
108 | - **Claude Desktop Config:** Automatically created during installation
109 | 
110 | ---
111 | 
112 | ## Commit Message Template
113 | 
114 | ```
115 | fix: move Claude Desktop configuration out of unreachable code block
116 | 
117 | Fixes issue #240 Phase 1 - Dead Code Removal
118 | 
119 | The configure_paths() function had a 'return False' statement inside
120 | an exception handler that made 77 lines of Claude Desktop configuration
121 | code unreachable. This caused installations to skip Claude Desktop setup.
122 | 
123 | Changes:
124 | - Move Claude Desktop config code (lines 1360-1436) outside except block
125 | - Replace premature 'return False' with warning message
126 | - Ensure config runs regardless of write test result
127 | 
128 | Impact:
129 | - Resolves all 27 dead code issues identified by pyscn
130 | - Claude Desktop now configured automatically during installation
131 | - Dead code score: 70 → 85-90 (+15 to +20 points)
132 | - Overall health score: 63 → 68-72 (+5 to +9 points)
133 | 
134 | Testing:
135 | - Syntax validated with py_compile
136 | - Unit tests pass: pytest tests/unit/test_installation.py
137 | - Manual installation tested with sqlite_vec backend
138 | - pyscn re-analysis confirms 0 dead code issues
139 | 
140 | Co-authored-by: pyscn analysis tool
141 | ```
142 | 
143 | ---
144 | 
145 | ## Next Steps After Phase 1
146 | 
147 | Once Phase 1 is complete and merged:
148 | 
149 | 1. **Run pyscn again** to get updated health score
150 | 2. **Celebrate!** 🎉 You've eliminated all dead code issues
151 | 3. **Move to Phase 2:** Low-hanging complexity reductions
152 |    - Target complexity score improvement (currently 40/100)
153 |    - Focus on functions with complexity 15-25 (easier wins)
154 | 4. **Move to Phase 3:** Duplication removal
155 |    - Target duplication score improvement (currently 30/100)
156 |    - Focus on test duplication (identified in pyscn report)
157 | 
158 | ---
159 | 
160 | ## Troubleshooting
161 | 
162 | ### Syntax errors after fix
163 | - Check indentation (should match `try` statement level)
164 | - Verify no lines were accidentally deleted
165 | - Restore from backup: `cp scripts/installation/install.py.backup scripts/installation/install.py`
166 | 
167 | ### Tests fail after fix
168 | - Review test expectations - they may need updating
169 | - Check if tests mock the file write test
170 | - Tests may be outdated if they expect old behavior
171 | 
172 | ### pyscn still shows dead code
173 | - Verify the `return False` was changed to a warning
174 | - Confirm code block was moved OUTSIDE the except block
175 | - Check that no extra `return` statements were left behind
176 | 
177 | ---
178 | 
179 | ## Reference Documents
180 | 
181 | - **Full Analysis:** `scripts/quality/phase1_dead_code_analysis.md`
182 | - **pyscn Report:** `.pyscn/reports/analyze_20251123_214224.html`
183 | - **Issue Tracker:** GitHub Issue #240
184 | 
185 | ---
186 | 
187 | ## Contact
188 | 
189 | Questions? See the detailed analysis in `phase1_dead_code_analysis.md` or refer to Issue #240 on GitHub.
190 | 
191 | **Time Estimate:** 10-15 minutes for fix + verification
192 | **Difficulty:** Easy (code movement, no logic changes)
193 | **Risk:** Low (code was never executing anyway)
194 | 
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/ingestion/base.py:
--------------------------------------------------------------------------------

```python
  1 | # Copyright 2024 Heinrich Krupp
  2 | #
  3 | # Licensed under the Apache License, Version 2.0 (the "License");
  4 | # you may not use this file except in compliance with the License.
  5 | # You may obtain a copy of the License at
  6 | #
  7 | #     http://www.apache.org/licenses/LICENSE-2.0
  8 | #
  9 | # Unless required by applicable law or agreed to in writing, software
 10 | # distributed under the License is distributed on an "AS IS" BASIS,
 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12 | # See the License for the specific language governing permissions and
 13 | # limitations under the License.
 14 | 
 15 | """
 16 | Base classes and interfaces for document ingestion.
 17 | """
 18 | 
 19 | import logging
 20 | from abc import ABC, abstractmethod
 21 | from dataclasses import dataclass
 22 | from pathlib import Path
 23 | from typing import List, Dict, Any, Optional, AsyncGenerator
 24 | from datetime import datetime
 25 | 
 26 | logger = logging.getLogger(__name__)
 27 | 
 28 | 
 29 | @dataclass
 30 | class DocumentChunk:
 31 |     """
 32 |     Represents a chunk of text extracted from a document.
 33 |     
 34 |     Attributes:
 35 |         content: The extracted text content
 36 |         metadata: Additional metadata about the chunk
 37 |         chunk_index: Position of this chunk in the document
 38 |         source_file: Original file path
 39 |     """
 40 |     content: str
 41 |     metadata: Dict[str, Any]
 42 |     chunk_index: int
 43 |     source_file: Path
 44 |     
 45 |     def __post_init__(self):
 46 |         """Add default metadata after initialization."""
 47 |         if 'source' not in self.metadata:
 48 |             self.metadata['source'] = str(self.source_file)
 49 |         if 'chunk_index' not in self.metadata:
 50 |             self.metadata['chunk_index'] = self.chunk_index
 51 |         if 'extracted_at' not in self.metadata:
 52 |             self.metadata['extracted_at'] = datetime.now().isoformat()
 53 | 
 54 | 
 55 | @dataclass
 56 | class IngestionResult:
 57 |     """
 58 |     Result of document ingestion operation.
 59 |     
 60 |     Attributes:
 61 |         success: Whether ingestion was successful
 62 |         chunks_processed: Number of chunks created
 63 |         chunks_stored: Number of chunks successfully stored
 64 |         errors: List of error messages encountered
 65 |         source_file: Original file that was processed
 66 |         processing_time: Time taken to process in seconds
 67 |     """
 68 |     success: bool
 69 |     chunks_processed: int
 70 |     chunks_stored: int
 71 |     errors: List[str]
 72 |     source_file: Path
 73 |     processing_time: float
 74 |     
 75 |     @property
 76 |     def success_rate(self) -> float:
 77 |         """Calculate success rate as percentage."""
 78 |         if self.chunks_processed == 0:
 79 |             return 0.0
 80 |         return (self.chunks_stored / self.chunks_processed) * 100
 81 | 
 82 | 
 83 | class DocumentLoader(ABC):
 84 |     """
 85 |     Abstract base class for document loaders.
 86 |     
 87 |     Each document format (PDF, text, etc.) should implement this interface
 88 |     to provide consistent document processing capabilities.
 89 |     """
 90 |     
 91 |     def __init__(self, chunk_size: int = 1000, chunk_overlap: int = 200):
 92 |         """
 93 |         Initialize document loader.
 94 |         
 95 |         Args:
 96 |             chunk_size: Target size for text chunks in characters
 97 |             chunk_overlap: Number of characters to overlap between chunks
 98 |         """
 99 |         self.chunk_size = chunk_size
100 |         self.chunk_overlap = chunk_overlap
101 |         self.supported_extensions: List[str] = []
102 |     
103 |     @abstractmethod
104 |     def can_handle(self, file_path: Path) -> bool:
105 |         """
106 |         Check if this loader can handle the given file.
107 |         
108 |         Args:
109 |             file_path: Path to the file to check
110 |             
111 |         Returns:
112 |             True if this loader can process the file
113 |         """
114 |         pass
115 |     
116 |     @abstractmethod
117 |     async def extract_chunks(self, file_path: Path, **kwargs) -> AsyncGenerator[DocumentChunk, None]:
118 |         """
119 |         Extract text chunks from a document.
120 |         
121 |         Args:
122 |             file_path: Path to the document file
123 |             **kwargs: Additional options specific to the loader
124 |             
125 |         Yields:
126 |             DocumentChunk objects containing extracted text and metadata
127 |             
128 |         Raises:
129 |             FileNotFoundError: If the file doesn't exist
130 |             ValueError: If the file format is not supported
131 |             Exception: Other processing errors
132 |         """
133 |         pass
134 |     
135 |     async def validate_file(self, file_path: Path) -> None:
136 |         """
137 |         Validate that a file can be processed.
138 |         
139 |         Args:
140 |             file_path: Path to validate
141 |             
142 |         Raises:
143 |             FileNotFoundError: If file doesn't exist
144 |             ValueError: If file is not supported or invalid
145 |         """
146 |         if not file_path.exists():
147 |             raise FileNotFoundError(f"File not found: {file_path}")
148 |         
149 |         if not file_path.is_file():
150 |             raise ValueError(f"Path is not a file: {file_path}")
151 |         
152 |         if not self.can_handle(file_path):
153 |             raise ValueError(f"File format not supported: {file_path.suffix}")
154 |     
155 |     def get_base_metadata(self, file_path: Path) -> Dict[str, Any]:
156 |         """
157 |         Get base metadata common to all document types.
158 |         
159 |         Args:
160 |             file_path: Path to the file
161 |             
162 |         Returns:
163 |             Dictionary of base metadata
164 |         """
165 |         stat = file_path.stat()
166 |         return {
167 |             'source_file': str(file_path),
168 |             'file_name': file_path.name,
169 |             'file_extension': file_path.suffix.lower(),
170 |             'file_size': stat.st_size,
171 |             'modified_time': datetime.fromtimestamp(stat.st_mtime).isoformat(),
172 |             'loader_type': self.__class__.__name__
173 |         }
```

--------------------------------------------------------------------------------
/scripts/quality/track_pyscn_metrics.sh:
--------------------------------------------------------------------------------

```bash
  1 | #!/bin/bash
  2 | # scripts/quality/track_pyscn_metrics.sh - Track pyscn metrics over time
  3 | #
  4 | # Usage: bash scripts/quality/track_pyscn_metrics.sh
  5 | #
  6 | # Features:
  7 | # - Run pyscn analysis
  8 | # - Extract metrics to CSV
  9 | # - Store in .pyscn/history/ (gitignored)
 10 | # - Compare to previous run
 11 | # - Alert on regressions (>5% health score drop)
 12 | 
 13 | set -e
 14 | 
 15 | # Colors for output
 16 | RED='\033[0;31m'
 17 | YELLOW='\033[1;33m'
 18 | GREEN='\033[0;32m'
 19 | BLUE='\033[0;34m'
 20 | NC='\033[0m' # No Color
 21 | 
 22 | echo -e "${BLUE}=== pyscn Metrics Tracking ===${NC}"
 23 | echo ""
 24 | 
 25 | # Check for pyscn
 26 | if ! command -v pyscn &> /dev/null; then
 27 |     echo -e "${RED}❌ pyscn not found${NC}"
 28 |     echo "Install with: pip install pyscn"
 29 |     exit 1
 30 | fi
 31 | 
 32 | # Create history directory
 33 | mkdir -p .pyscn/history
 34 | 
 35 | # Generate timestamp
 36 | TIMESTAMP=$(date +%Y%m%d_%H%M%S)
 37 | DATE_READABLE=$(date +"%Y-%m-%d %H:%M:%S")
 38 | 
 39 | # Run pyscn analysis
 40 | echo "Running pyscn analysis..."
 41 | REPORT_FILE=".pyscn/reports/analyze_${TIMESTAMP}.html"
 42 | 
 43 | if pyscn analyze . --output "$REPORT_FILE" > /tmp/pyscn_metrics.log 2>&1; then
 44 |     echo -e "${GREEN}✓${NC} Analysis complete"
 45 | else
 46 |     echo -e "${RED}❌ Analysis failed${NC}"
 47 |     cat /tmp/pyscn_metrics.log
 48 |     exit 1
 49 | fi
 50 | 
 51 | # Extract metrics from HTML report
 52 | HEALTH_SCORE=$(grep -o 'Health Score: [0-9]*' "$REPORT_FILE" | head -1 | grep -o '[0-9]*' || echo "0")
 53 | COMPLEXITY_SCORE=$(grep -o '<span class="score-value">[0-9]*</span>' "$REPORT_FILE" | head -1 | sed 's/<[^>]*>//g' || echo "0")
 54 | DEAD_CODE_SCORE=$(grep -o '<span class="score-value">[0-9]*</span>' "$REPORT_FILE" | sed -n '2p' | sed 's/<[^>]*>//g' || echo "0")
 55 | DUPLICATION_SCORE=$(grep -o '<span class="score-value">[0-9]*</span>' "$REPORT_FILE" | sed -n '3p' | sed 's/<[^>]*>//g' || echo "0")
 56 | COUPLING_SCORE=$(grep -o '<span class="score-value">[0-9]*</span>' "$REPORT_FILE" | sed -n '4p' | sed 's/<[^>]*>//g' || echo "100")
 57 | DEPENDENCIES_SCORE=$(grep -o '<span class="score-value">[0-9]*</span>' "$REPORT_FILE" | sed -n '5p' | sed 's/<[^>]*>//g' || echo "0")
 58 | ARCHITECTURE_SCORE=$(grep -o '<span class="score-value">[0-9]*</span>' "$REPORT_FILE" | sed -n '6p' | sed 's/<[^>]*>//g' || echo "0")
 59 | 
 60 | AVG_COMPLEXITY=$(grep -o '<div class="metric-value">[0-9.]*</div>' "$REPORT_FILE" | sed -n '3p' | sed 's/<[^>]*>//g' || echo "0")
 61 | MAX_COMPLEXITY=$(grep -o '<div class="metric-value">[0-9]*</div>' "$REPORT_FILE" | sed -n '3p' | sed 's/<[^>]*>//g' || echo "0")
 62 | DUPLICATION_PCT=$(grep -o '<div class="metric-value">[0-9.]*%</div>' "$REPORT_FILE" | head -1 | sed 's/<[^>]*>//g' || echo "0%")
 63 | DEAD_CODE_ISSUES=$(grep -o '<div class="metric-value">[0-9]*</div>' "$REPORT_FILE" | sed -n '4p' | sed 's/<[^>]*>//g' || echo "0")
 64 | 
 65 | echo ""
 66 | echo -e "${BLUE}=== Metrics Extracted ===${NC}"
 67 | echo "Health Score: $HEALTH_SCORE/100"
 68 | echo "Complexity: $COMPLEXITY_SCORE/100 (Avg: $AVG_COMPLEXITY, Max: $MAX_COMPLEXITY)"
 69 | echo "Dead Code: $DEAD_CODE_SCORE/100 ($DEAD_CODE_ISSUES issues)"
 70 | echo "Duplication: $DUPLICATION_SCORE/100 ($DUPLICATION_PCT)"
 71 | echo "Coupling: $COUPLING_SCORE/100"
 72 | echo "Dependencies: $DEPENDENCIES_SCORE/100"
 73 | echo "Architecture: $ARCHITECTURE_SCORE/100"
 74 | echo ""
 75 | 
 76 | # Create CSV file if it doesn't exist
 77 | CSV_FILE=".pyscn/history/metrics.csv"
 78 | if [ ! -f "$CSV_FILE" ]; then
 79 |     echo "timestamp,date,health_score,complexity_score,dead_code_score,duplication_score,coupling_score,dependencies_score,architecture_score,avg_complexity,max_complexity,duplication_pct,dead_code_issues" > "$CSV_FILE"
 80 | fi
 81 | 
 82 | # Append metrics
 83 | echo "$TIMESTAMP,$DATE_READABLE,$HEALTH_SCORE,$COMPLEXITY_SCORE,$DEAD_CODE_SCORE,$DUPLICATION_SCORE,$COUPLING_SCORE,$DEPENDENCIES_SCORE,$ARCHITECTURE_SCORE,$AVG_COMPLEXITY,$MAX_COMPLEXITY,$DUPLICATION_PCT,$DEAD_CODE_ISSUES" >> "$CSV_FILE"
 84 | 
 85 | echo -e "${GREEN}✓${NC} Metrics saved to $CSV_FILE"
 86 | echo ""
 87 | 
 88 | # Compare to previous run
 89 | if [ $(wc -l < "$CSV_FILE") -gt 2 ]; then
 90 |     PREV_HEALTH=$(tail -2 "$CSV_FILE" | head -1 | cut -d',' -f3)
 91 |     PREV_DATE=$(tail -2 "$CSV_FILE" | head -1 | cut -d',' -f2)
 92 | 
 93 |     echo -e "${BLUE}=== Comparison to Previous Run ===${NC}"
 94 |     echo "Previous: $PREV_HEALTH/100 ($(echo "$PREV_DATE" | cut -d' ' -f1))"
 95 |     echo "Current:  $HEALTH_SCORE/100 ($(date +%Y-%m-%d))"
 96 | 
 97 |     DELTA=$((HEALTH_SCORE - PREV_HEALTH))
 98 | 
 99 |     if [ $DELTA -gt 0 ]; then
100 |         echo -e "${GREEN}✅ Improvement: +$DELTA points${NC}"
101 |     elif [ $DELTA -lt 0 ]; then
102 |         ABS_DELTA=${DELTA#-}
103 |         echo -e "${RED}⚠️  Regression: -$ABS_DELTA points${NC}"
104 | 
105 |         # Alert on significant regression (>5 points)
106 |         if [ $ABS_DELTA -gt 5 ]; then
107 |             echo ""
108 |             echo -e "${RED}🚨 ALERT: Significant quality regression detected!${NC}"
109 |             echo "Health score dropped by $ABS_DELTA points since last check."
110 |             echo ""
111 |             echo "Recommended actions:"
112 |             echo "  1. Review recent changes: git log --since='$PREV_DATE'"
113 |             echo "  2. Compare reports: open $REPORT_FILE"
114 |             echo "  3. Create GitHub issue to track regression"
115 |         fi
116 |     else
117 |         echo -e "${BLUE}➡️  No change${NC}"
118 |     fi
119 | else
120 |     echo -e "${BLUE}ℹ️  No previous metrics for comparison (first run)${NC}"
121 | fi
122 | 
123 | echo ""
124 | echo -e "${BLUE}=== Trend Summary ===${NC}"
125 | echo "Total measurements: $(tail -n +2 "$CSV_FILE" | wc -l)"
126 | echo "Average health score: $(awk -F',' 'NR>1 {sum+=$3; count++} END {if(count>0) print int(sum/count); else print 0}' "$CSV_FILE")/100"
127 | echo "Highest: $(awk -F',' 'NR>1 {if($3>max || max=="") max=$3} END {print max}' "$CSV_FILE")/100"
128 | echo "Lowest: $(awk -F',' 'NR>1 {if($3<min || min=="") min=$3} END {print min}' "$CSV_FILE")/100"
129 | echo ""
130 | 
131 | echo -e "${GREEN}✓${NC} Tracking complete"
132 | exit 0
133 | 
```
Page 7/47FirstPrevNextLast