This is page 2 of 46. Use http://codebase.md/doobidoo/mcp-memory-service?page={x} to view the full context.
# Directory Structure
```
├── .claude
│ ├── agents
│ │ ├── amp-bridge.md
│ │ ├── amp-pr-automator.md
│ │ ├── code-quality-guard.md
│ │ ├── gemini-pr-automator.md
│ │ └── github-release-manager.md
│ ├── commands
│ │ ├── README.md
│ │ ├── refactor-function
│ │ ├── refactor-function-prod
│ │ └── refactor-function.md
│ ├── consolidation-fix-handoff.md
│ ├── consolidation-hang-fix-summary.md
│ ├── directives
│ │ ├── agents.md
│ │ ├── code-quality-workflow.md
│ │ ├── consolidation-details.md
│ │ ├── development-setup.md
│ │ ├── hooks-configuration.md
│ │ ├── memory-first.md
│ │ ├── memory-tagging.md
│ │ ├── pr-workflow.md
│ │ ├── quality-system-details.md
│ │ ├── README.md
│ │ ├── refactoring-checklist.md
│ │ ├── storage-backends.md
│ │ └── version-management.md
│ ├── prompts
│ │ └── hybrid-cleanup-integration.md
│ ├── settings.local.json.backup
│ └── settings.local.json.local
├── .commit-message
├── .coveragerc
├── .dockerignore
├── .env.example
├── .env.sqlite.backup
├── .envnn#
├── .gitattributes
├── .github
│ ├── FUNDING.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── performance_issue.yml
│ ├── pull_request_template.md
│ └── workflows
│ ├── bridge-tests.yml
│ ├── CACHE_FIX.md
│ ├── claude-branch-automation.yml
│ ├── claude-code-review.yml
│ ├── claude.yml
│ ├── cleanup-images.yml.disabled
│ ├── dev-setup-validation.yml
│ ├── docker-publish.yml
│ ├── dockerfile-lint.yml
│ ├── LATEST_FIXES.md
│ ├── main-optimized.yml.disabled
│ ├── main.yml
│ ├── publish-and-test.yml
│ ├── publish-dual.yml
│ ├── README_OPTIMIZATION.md
│ ├── release-tag.yml.disabled
│ ├── release.yml
│ ├── roadmap-review-reminder.yml
│ ├── SECRET_CONDITIONAL_FIX.md
│ └── WORKFLOW_FIXES.md
├── .gitignore
├── .mcp.json.backup
├── .mcp.json.template
├── .metrics
│ ├── baseline_cc_install_hooks.txt
│ ├── baseline_mi_install_hooks.txt
│ ├── baseline_nesting_install_hooks.txt
│ ├── BASELINE_REPORT.md
│ ├── COMPLEXITY_COMPARISON.txt
│ ├── QUICK_REFERENCE.txt
│ ├── README.md
│ ├── REFACTORED_BASELINE.md
│ ├── REFACTORING_COMPLETION_REPORT.md
│ └── TRACKING_TABLE.md
├── .pyscn
│ ├── .gitignore
│ └── reports
│ └── analyze_20251123_214224.html
├── AGENTS.md
├── ai-optimized-tool-descriptions.py
├── archive
│ ├── deployment
│ │ ├── deploy_fastmcp_fixed.sh
│ │ ├── deploy_http_with_mcp.sh
│ │ └── deploy_mcp_v4.sh
│ ├── deployment-configs
│ │ ├── empty_config.yml
│ │ └── smithery.yaml
│ ├── development
│ │ └── test_fastmcp.py
│ ├── docs-removed-2025-08-23
│ │ ├── authentication.md
│ │ ├── claude_integration.md
│ │ ├── claude-code-compatibility.md
│ │ ├── claude-code-integration.md
│ │ ├── claude-code-quickstart.md
│ │ ├── claude-desktop-setup.md
│ │ ├── complete-setup-guide.md
│ │ ├── database-synchronization.md
│ │ ├── development
│ │ │ ├── autonomous-memory-consolidation.md
│ │ │ ├── CLEANUP_PLAN.md
│ │ │ ├── CLEANUP_README.md
│ │ │ ├── CLEANUP_SUMMARY.md
│ │ │ ├── dream-inspired-memory-consolidation.md
│ │ │ ├── hybrid-slm-memory-consolidation.md
│ │ │ ├── mcp-milestone.md
│ │ │ ├── multi-client-architecture.md
│ │ │ ├── test-results.md
│ │ │ └── TIMESTAMP_FIX_SUMMARY.md
│ │ ├── distributed-sync.md
│ │ ├── invocation_guide.md
│ │ ├── macos-intel.md
│ │ ├── master-guide.md
│ │ ├── mcp-client-configuration.md
│ │ ├── multi-client-server.md
│ │ ├── service-installation.md
│ │ ├── sessions
│ │ │ └── MCP_ENHANCEMENT_SESSION_MEMORY_v4.1.0.md
│ │ ├── UBUNTU_SETUP.md
│ │ ├── ubuntu.md
│ │ ├── windows-setup.md
│ │ └── windows.md
│ ├── docs-root-cleanup-2025-08-23
│ │ ├── AWESOME_LIST_SUBMISSION.md
│ │ ├── CLOUDFLARE_IMPLEMENTATION.md
│ │ ├── DOCUMENTATION_ANALYSIS.md
│ │ ├── DOCUMENTATION_CLEANUP_PLAN.md
│ │ ├── DOCUMENTATION_CONSOLIDATION_COMPLETE.md
│ │ ├── LITESTREAM_SETUP_GUIDE.md
│ │ ├── lm_studio_system_prompt.md
│ │ ├── PYTORCH_DOWNLOAD_FIX.md
│ │ └── README-ORIGINAL-BACKUP.md
│ ├── investigations
│ │ └── MACOS_HOOKS_INVESTIGATION.md
│ ├── litestream-configs-v6.3.0
│ │ ├── install_service.sh
│ │ ├── litestream_master_config_fixed.yml
│ │ ├── litestream_master_config.yml
│ │ ├── litestream_replica_config_fixed.yml
│ │ ├── litestream_replica_config.yml
│ │ ├── litestream_replica_simple.yml
│ │ ├── litestream-http.service
│ │ ├── litestream.service
│ │ └── requirements-cloudflare.txt
│ ├── release-notes
│ │ └── release-notes-v7.1.4.md
│ └── setup-development
│ ├── README.md
│ ├── setup_consolidation_mdns.sh
│ ├── STARTUP_SETUP_GUIDE.md
│ └── test_service.sh
├── CHANGELOG-HISTORIC.md
├── CHANGELOG.md
├── claude_commands
│ ├── memory-context.md
│ ├── memory-health.md
│ ├── memory-ingest-dir.md
│ ├── memory-ingest.md
│ ├── memory-recall.md
│ ├── memory-search.md
│ ├── memory-store.md
│ ├── README.md
│ └── session-start.md
├── claude-hooks
│ ├── config.json
│ ├── config.template.json
│ ├── CONFIGURATION.md
│ ├── core
│ │ ├── auto-capture-hook.js
│ │ ├── auto-capture-hook.ps1
│ │ ├── memory-retrieval.js
│ │ ├── mid-conversation.js
│ │ ├── permission-request.js
│ │ ├── session-end.js
│ │ ├── session-start.js
│ │ └── topic-change.js
│ ├── debug-pattern-test.js
│ ├── install_claude_hooks_windows.ps1
│ ├── install_hooks.py
│ ├── memory-mode-controller.js
│ ├── MIGRATION.md
│ ├── README-AUTO-CAPTURE.md
│ ├── README-NATURAL-TRIGGERS.md
│ ├── README-PERMISSION-REQUEST.md
│ ├── README-phase2.md
│ ├── README.md
│ ├── simple-test.js
│ ├── statusline.sh
│ ├── test-adaptive-weights.js
│ ├── test-dual-protocol-hook.js
│ ├── test-mcp-hook.js
│ ├── test-natural-triggers.js
│ ├── test-recency-scoring.js
│ ├── tests
│ │ ├── integration-test.js
│ │ ├── phase2-integration-test.js
│ │ ├── test-code-execution.js
│ │ ├── test-cross-session.json
│ │ ├── test-permission-request.js
│ │ ├── test-session-tracking.json
│ │ └── test-threading.json
│ ├── utilities
│ │ ├── adaptive-pattern-detector.js
│ │ ├── auto-capture-patterns.js
│ │ ├── context-formatter.js
│ │ ├── context-shift-detector.js
│ │ ├── conversation-analyzer.js
│ │ ├── dynamic-context-updater.js
│ │ ├── git-analyzer.js
│ │ ├── mcp-client.js
│ │ ├── memory-client.js
│ │ ├── memory-scorer.js
│ │ ├── performance-manager.js
│ │ ├── project-detector.js
│ │ ├── session-cache.json
│ │ ├── session-tracker.js
│ │ ├── tiered-conversation-monitor.js
│ │ ├── user-override-detector.js
│ │ └── version-checker.js
│ └── WINDOWS-SESSIONSTART-BUG.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── COMMIT_MESSAGE.md
├── CONTRIBUTING.md
├── Development-Sprint-November-2025.md
├── docs
│ ├── amp-cli-bridge.md
│ ├── api
│ │ ├── code-execution-interface.md
│ │ ├── memory-metadata-api.md
│ │ ├── PHASE1_IMPLEMENTATION_SUMMARY.md
│ │ ├── PHASE2_IMPLEMENTATION_SUMMARY.md
│ │ ├── PHASE2_REPORT.md
│ │ └── tag-standardization.md
│ ├── architecture
│ │ ├── graph-database-design.md
│ │ ├── search-enhancement-spec.md
│ │ └── search-examples.md
│ ├── architecture.md
│ ├── archive
│ │ └── obsolete-workflows
│ │ ├── load_memory_context.md
│ │ └── README.md
│ ├── assets
│ │ └── images
│ │ ├── dashboard-v3.3.0-preview.png
│ │ ├── memory-awareness-hooks-example.png
│ │ ├── project-infographic.svg
│ │ └── README.md
│ ├── CLAUDE_CODE_QUICK_REFERENCE.md
│ ├── cloudflare-setup.md
│ ├── demo-recording-script.md
│ ├── deployment
│ │ ├── docker.md
│ │ ├── dual-service.md
│ │ ├── production-guide.md
│ │ └── systemd-service.md
│ ├── development
│ │ ├── ai-agent-instructions.md
│ │ ├── code-quality
│ │ │ ├── phase-2a-completion.md
│ │ │ ├── phase-2a-handle-get-prompt.md
│ │ │ ├── phase-2a-index.md
│ │ │ ├── phase-2a-install-package.md
│ │ │ └── phase-2b-session-summary.md
│ │ ├── code-quality-workflow.md
│ │ ├── dashboard-workflow.md
│ │ ├── issue-management.md
│ │ ├── pr-280-post-mortem.md
│ │ ├── pr-review-guide.md
│ │ ├── refactoring-notes.md
│ │ ├── release-checklist.md
│ │ └── todo-tracker.md
│ ├── docker-optimized-build.md
│ ├── document-ingestion.md
│ ├── DOCUMENTATION_AUDIT.md
│ ├── enhancement-roadmap-issue-14.md
│ ├── examples
│ │ ├── analysis-scripts.js
│ │ ├── maintenance-session-example.md
│ │ ├── memory-distribution-chart.jsx
│ │ ├── quality-system-configs.md
│ │ └── tag-schema.json
│ ├── features
│ │ └── association-quality-boost.md
│ ├── first-time-setup.md
│ ├── glama-deployment.md
│ ├── guides
│ │ ├── advanced-command-examples.md
│ │ ├── chromadb-migration.md
│ │ ├── commands-vs-mcp-server.md
│ │ ├── mcp-enhancements.md
│ │ ├── mdns-service-discovery.md
│ │ ├── memory-consolidation-guide.md
│ │ ├── memory-quality-guide.md
│ │ ├── migration.md
│ │ ├── scripts.md
│ │ └── STORAGE_BACKENDS.md
│ ├── HOOK_IMPROVEMENTS.md
│ ├── hooks
│ │ └── phase2-code-execution-migration.md
│ ├── http-server-management.md
│ ├── ide-compatability.md
│ ├── IMAGE_RETENTION_POLICY.md
│ ├── images
│ │ ├── dashboard-placeholder.md
│ │ └── update-restart-demo.png
│ ├── implementation
│ │ ├── health_checks.md
│ │ └── performance.md
│ ├── IMPLEMENTATION_PLAN_HTTP_SSE.md
│ ├── integration
│ │ ├── homebrew.md
│ │ └── multi-client.md
│ ├── integrations
│ │ ├── gemini.md
│ │ ├── groq-bridge.md
│ │ ├── groq-integration-summary.md
│ │ └── groq-model-comparison.md
│ ├── integrations.md
│ ├── legacy
│ │ └── dual-protocol-hooks.md
│ ├── LIGHTWEIGHT_ONNX_SETUP.md
│ ├── LM_STUDIO_COMPATIBILITY.md
│ ├── maintenance
│ │ └── memory-maintenance.md
│ ├── mastery
│ │ ├── api-reference.md
│ │ ├── architecture-overview.md
│ │ ├── configuration-guide.md
│ │ ├── local-setup-and-run.md
│ │ ├── testing-guide.md
│ │ └── troubleshooting.md
│ ├── migration
│ │ ├── code-execution-api-quick-start.md
│ │ └── graph-migration-guide.md
│ ├── natural-memory-triggers
│ │ ├── cli-reference.md
│ │ ├── installation-guide.md
│ │ └── performance-optimization.md
│ ├── oauth-setup.md
│ ├── pr-graphql-integration.md
│ ├── quality-system-ui-implementation.md
│ ├── quick-setup-cloudflare-dual-environment.md
│ ├── README.md
│ ├── refactoring
│ │ └── phase-3-3-analysis.md
│ ├── releases
│ │ └── v8.72.0-testing.md
│ ├── remote-configuration-wiki-section.md
│ ├── research
│ │ ├── code-execution-interface-implementation.md
│ │ └── code-execution-interface-summary.md
│ ├── ROADMAP.md
│ ├── sqlite-vec-backend.md
│ ├── statistics
│ │ ├── charts
│ │ │ ├── activity_patterns.png
│ │ │ ├── contributors.png
│ │ │ ├── growth_trajectory.png
│ │ │ ├── monthly_activity.png
│ │ │ └── october_sprint.png
│ │ ├── data
│ │ │ ├── activity_by_day.csv
│ │ │ ├── activity_by_hour.csv
│ │ │ ├── contributors.csv
│ │ │ └── monthly_activity.csv
│ │ ├── generate_charts.py
│ │ └── REPOSITORY_STATISTICS.md
│ ├── technical
│ │ ├── development.md
│ │ ├── memory-migration.md
│ │ ├── migration-log.md
│ │ ├── sqlite-vec-embedding-fixes.md
│ │ └── tag-storage.md
│ ├── testing
│ │ └── regression-tests.md
│ ├── testing-cloudflare-backend.md
│ ├── troubleshooting
│ │ ├── cloudflare-api-token-setup.md
│ │ ├── cloudflare-authentication.md
│ │ ├── database-transfer-migration.md
│ │ ├── general.md
│ │ ├── hooks-quick-reference.md
│ │ ├── memory-management.md
│ │ ├── pr162-schema-caching-issue.md
│ │ ├── session-end-hooks.md
│ │ └── sync-issues.md
│ ├── tutorials
│ │ ├── advanced-techniques.md
│ │ ├── data-analysis.md
│ │ └── demo-session-walkthrough.md
│ ├── wiki-documentation-plan.md
│ └── wiki-Graph-Database-Architecture.md
├── examples
│ ├── claude_desktop_config_template.json
│ ├── claude_desktop_config_windows.json
│ ├── claude-desktop-http-config.json
│ ├── config
│ │ └── claude_desktop_config.json
│ ├── http-mcp-bridge.js
│ ├── memory_export_template.json
│ ├── README.md
│ ├── setup
│ │ └── setup_multi_client_complete.py
│ └── start_https_example.sh
├── IMPLEMENTATION_SUMMARY.md
├── install_service.py
├── install.py
├── LICENSE
├── NOTICE
├── PR_DESCRIPTION.md
├── pyproject-lite.toml
├── pyproject.toml
├── pytest.ini
├── README.md
├── release-notes-v8.61.0.md
├── run_server.py
├── scripts
│ ├── .claude
│ │ └── settings.local.json
│ ├── archive
│ │ └── check_missing_timestamps.py
│ ├── backup
│ │ ├── backup_memories.py
│ │ ├── backup_sqlite_vec.sh
│ │ ├── export_distributable_memories.sh
│ │ └── restore_memories.py
│ ├── benchmarks
│ │ ├── benchmark_code_execution_api.py
│ │ ├── benchmark_hybrid_sync.py
│ │ └── benchmark_server_caching.py
│ ├── ci
│ │ ├── check_dockerfile_args.sh
│ │ └── validate_imports.sh
│ ├── database
│ │ ├── analyze_sqlite_vec_db.py
│ │ ├── check_sqlite_vec_status.py
│ │ ├── db_health_check.py
│ │ └── simple_timestamp_check.py
│ ├── development
│ │ ├── debug_server_initialization.py
│ │ ├── find_orphaned_files.py
│ │ ├── fix_mdns.sh
│ │ ├── fix_sitecustomize.py
│ │ ├── remote_ingest.sh
│ │ ├── setup-git-merge-drivers.sh
│ │ ├── uv-lock-merge.sh
│ │ └── verify_hybrid_sync.py
│ ├── hooks
│ │ └── pre-commit
│ ├── installation
│ │ ├── install_linux_service.py
│ │ ├── install_macos_service.py
│ │ ├── install_uv.py
│ │ ├── install_windows_service.py
│ │ ├── install.py
│ │ ├── setup_backup_cron.sh
│ │ ├── setup_claude_mcp.sh
│ │ └── setup_cloudflare_resources.py
│ ├── linux
│ │ ├── service_status.sh
│ │ ├── start_service.sh
│ │ ├── stop_service.sh
│ │ ├── uninstall_service.sh
│ │ └── view_logs.sh
│ ├── maintenance
│ │ ├── add_project_tags.py
│ │ ├── apply_quality_boost_retroactively.py
│ │ ├── assign_memory_types.py
│ │ ├── auto_retag_memory_merge.py
│ │ ├── auto_retag_memory.py
│ │ ├── backfill_graph_table.py
│ │ ├── check_memory_types.py
│ │ ├── cleanup_association_memories_hybrid.py
│ │ ├── cleanup_association_memories.py
│ │ ├── cleanup_corrupted_encoding.py
│ │ ├── cleanup_low_quality.py
│ │ ├── cleanup_memories.py
│ │ ├── cleanup_organize.py
│ │ ├── consolidate_memory_types.py
│ │ ├── consolidation_mappings.json
│ │ ├── delete_orphaned_vectors_fixed.py
│ │ ├── delete_test_memories.py
│ │ ├── fast_cleanup_duplicates_with_tracking.sh
│ │ ├── find_all_duplicates.py
│ │ ├── find_cloudflare_duplicates.py
│ │ ├── find_duplicates.py
│ │ ├── memory-types.md
│ │ ├── README.md
│ │ ├── recover_timestamps_from_cloudflare.py
│ │ ├── regenerate_embeddings.py
│ │ ├── repair_malformed_tags.py
│ │ ├── repair_memories.py
│ │ ├── repair_sqlite_vec_embeddings.py
│ │ ├── repair_zero_embeddings.py
│ │ ├── restore_from_json_export.py
│ │ ├── retag_valuable_memories.py
│ │ ├── scan_todos.sh
│ │ ├── soft_delete_test_memories.py
│ │ └── sync_status.py
│ ├── migration
│ │ ├── cleanup_mcp_timestamps.py
│ │ ├── legacy
│ │ │ └── migrate_chroma_to_sqlite.py
│ │ ├── mcp-migration.py
│ │ ├── migrate_sqlite_vec_embeddings.py
│ │ ├── migrate_storage.py
│ │ ├── migrate_tags.py
│ │ ├── migrate_timestamps.py
│ │ ├── migrate_to_cloudflare.py
│ │ ├── migrate_to_sqlite_vec.py
│ │ ├── migrate_v5_enhanced.py
│ │ ├── TIMESTAMP_CLEANUP_README.md
│ │ └── verify_mcp_timestamps.py
│ ├── pr
│ │ ├── amp_collect_results.sh
│ │ ├── amp_detect_breaking_changes.sh
│ │ ├── amp_generate_tests.sh
│ │ ├── amp_pr_review.sh
│ │ ├── amp_quality_gate.sh
│ │ ├── amp_suggest_fixes.sh
│ │ ├── auto_review.sh
│ │ ├── detect_breaking_changes.sh
│ │ ├── generate_tests.sh
│ │ ├── lib
│ │ │ └── graphql_helpers.sh
│ │ ├── pre_pr_check.sh
│ │ ├── quality_gate.sh
│ │ ├── resolve_threads.sh
│ │ ├── run_pyscn_analysis.sh
│ │ ├── run_quality_checks_on_files.sh
│ │ ├── run_quality_checks.sh
│ │ ├── thread_status.sh
│ │ └── watch_reviews.sh
│ ├── quality
│ │ ├── bulk_evaluate_onnx.py
│ │ ├── check_test_scores.py
│ │ ├── debug_deberta_scoring.py
│ │ ├── export_deberta_onnx.py
│ │ ├── fix_dead_code_install.sh
│ │ ├── migrate_to_deberta.py
│ │ ├── phase1_dead_code_analysis.md
│ │ ├── phase2_complexity_analysis.md
│ │ ├── README_PHASE1.md
│ │ ├── README_PHASE2.md
│ │ ├── rescore_deberta.py
│ │ ├── rescore_fallback.py
│ │ ├── reset_onnx_scores.py
│ │ ├── track_pyscn_metrics.sh
│ │ └── weekly_quality_review.sh
│ ├── README.md
│ ├── run
│ │ ├── memory_wrapper_cleanup.ps1
│ │ ├── memory_wrapper_cleanup.py
│ │ ├── memory_wrapper_cleanup.sh
│ │ ├── README_CLEANUP_WRAPPER.md
│ │ ├── run_mcp_memory.sh
│ │ ├── run-with-uv.sh
│ │ └── start_sqlite_vec.sh
│ ├── run_memory_server.py
│ ├── server
│ │ ├── check_http_server.py
│ │ ├── check_server_health.py
│ │ ├── memory_offline.py
│ │ ├── preload_models.py
│ │ ├── run_http_server.py
│ │ ├── run_memory_server.py
│ │ ├── start_http_server.bat
│ │ └── start_http_server.sh
│ ├── service
│ │ ├── deploy_dual_services.sh
│ │ ├── http_server_manager.sh
│ │ ├── install_http_service.sh
│ │ ├── mcp-memory-http.service
│ │ ├── mcp-memory.service
│ │ ├── memory_service_manager.sh
│ │ ├── service_control.sh
│ │ ├── service_utils.py
│ │ ├── update_service.sh
│ │ └── windows
│ │ ├── add_watchdog_trigger.ps1
│ │ ├── install_scheduled_task.ps1
│ │ ├── manage_service.ps1
│ │ ├── run_http_server_background.ps1
│ │ ├── uninstall_scheduled_task.ps1
│ │ └── update_and_restart.ps1
│ ├── setup-lightweight.sh
│ ├── sync
│ │ ├── check_drift.py
│ │ ├── claude_sync_commands.py
│ │ ├── export_memories.py
│ │ ├── import_memories.py
│ │ ├── litestream
│ │ │ ├── apply_local_changes.sh
│ │ │ ├── enhanced_memory_store.sh
│ │ │ ├── init_staging_db.sh
│ │ │ ├── io.litestream.replication.plist
│ │ │ ├── manual_sync.sh
│ │ │ ├── memory_sync.sh
│ │ │ ├── pull_remote_changes.sh
│ │ │ ├── push_to_remote.sh
│ │ │ ├── README.md
│ │ │ ├── resolve_conflicts.sh
│ │ │ ├── setup_local_litestream.sh
│ │ │ ├── setup_remote_litestream.sh
│ │ │ ├── staging_db_init.sql
│ │ │ ├── stash_local_changes.sh
│ │ │ ├── sync_from_remote_noconfig.sh
│ │ │ └── sync_from_remote.sh
│ │ ├── README.md
│ │ ├── safe_cloudflare_update.sh
│ │ ├── sync_memory_backends.py
│ │ └── sync_now.py
│ ├── testing
│ │ ├── run_complete_test.py
│ │ ├── run_memory_test.sh
│ │ ├── simple_test.py
│ │ ├── test_cleanup_logic.py
│ │ ├── test_cloudflare_backend.py
│ │ ├── test_docker_functionality.py
│ │ ├── test_installation.py
│ │ ├── test_mdns.py
│ │ ├── test_memory_api.py
│ │ ├── test_memory_simple.py
│ │ ├── test_migration.py
│ │ ├── test_search_api.py
│ │ ├── test_sqlite_vec_embeddings.py
│ │ ├── test_sse_events.py
│ │ ├── test-connection.py
│ │ └── test-hook.js
│ ├── update_and_restart.sh
│ ├── utils
│ │ ├── claude_commands_utils.py
│ │ ├── detect_platform.py
│ │ ├── generate_personalized_claude_md.sh
│ │ ├── groq
│ │ ├── groq_agent_bridge.py
│ │ ├── list-collections.py
│ │ ├── memory_wrapper_uv.py
│ │ ├── query_memories.py
│ │ ├── README_detect_platform.md
│ │ ├── smithery_wrapper.py
│ │ ├── test_groq_bridge.sh
│ │ └── uv_wrapper.py
│ └── validation
│ ├── check_dev_setup.py
│ ├── check_documentation_links.py
│ ├── check_handler_coverage.py
│ ├── diagnose_backend_config.py
│ ├── validate_configuration_complete.py
│ ├── validate_graph_tools.py
│ ├── validate_memories.py
│ ├── validate_migration.py
│ ├── validate_timestamp_integrity.py
│ ├── verify_environment.py
│ ├── verify_pytorch_windows.py
│ └── verify_torch.py
├── SECURITY.md
├── selective_timestamp_recovery.py
├── SPONSORS.md
├── src
│ └── mcp_memory_service
│ ├── __init__.py
│ ├── _version.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── client.py
│ │ ├── operations.py
│ │ ├── sync_wrapper.py
│ │ └── types.py
│ ├── backup
│ │ ├── __init__.py
│ │ └── scheduler.py
│ ├── cli
│ │ ├── __init__.py
│ │ ├── ingestion.py
│ │ ├── main.py
│ │ └── utils.py
│ ├── config.py
│ ├── consolidation
│ │ ├── __init__.py
│ │ ├── associations.py
│ │ ├── base.py
│ │ ├── clustering.py
│ │ ├── compression.py
│ │ ├── consolidator.py
│ │ ├── decay.py
│ │ ├── forgetting.py
│ │ ├── health.py
│ │ └── scheduler.py
│ ├── dependency_check.py
│ ├── discovery
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── mdns_service.py
│ ├── embeddings
│ │ ├── __init__.py
│ │ └── onnx_embeddings.py
│ ├── ingestion
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── chunker.py
│ │ ├── csv_loader.py
│ │ ├── json_loader.py
│ │ ├── pdf_loader.py
│ │ ├── registry.py
│ │ ├── semtools_loader.py
│ │ └── text_loader.py
│ ├── lm_studio_compat.py
│ ├── mcp_server.py
│ ├── models
│ │ ├── __init__.py
│ │ └── memory.py
│ ├── quality
│ │ ├── __init__.py
│ │ ├── ai_evaluator.py
│ │ ├── async_scorer.py
│ │ ├── config.py
│ │ ├── implicit_signals.py
│ │ ├── metadata_codec.py
│ │ ├── onnx_ranker.py
│ │ └── scorer.py
│ ├── server
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ ├── cache_manager.py
│ │ ├── client_detection.py
│ │ ├── environment.py
│ │ ├── handlers
│ │ │ ├── __init__.py
│ │ │ ├── consolidation.py
│ │ │ ├── documents.py
│ │ │ ├── graph.py
│ │ │ ├── memory.py
│ │ │ ├── quality.py
│ │ │ └── utility.py
│ │ └── logging_config.py
│ ├── server_impl.py
│ ├── services
│ │ ├── __init__.py
│ │ └── memory_service.py
│ ├── storage
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── cloudflare.py
│ │ ├── factory.py
│ │ ├── graph.py
│ │ ├── http_client.py
│ │ ├── hybrid.py
│ │ ├── migrations
│ │ │ └── 008_add_graph_table.sql
│ │ └── sqlite_vec.py
│ ├── sync
│ │ ├── __init__.py
│ │ ├── exporter.py
│ │ ├── importer.py
│ │ └── litestream_config.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── cache_manager.py
│ │ ├── content_splitter.py
│ │ ├── db_utils.py
│ │ ├── debug.py
│ │ ├── directory_ingestion.py
│ │ ├── document_processing.py
│ │ ├── gpu_detection.py
│ │ ├── hashing.py
│ │ ├── health_check.py
│ │ ├── http_server_manager.py
│ │ ├── port_detection.py
│ │ ├── quality_analytics.py
│ │ ├── startup_orchestrator.py
│ │ ├── system_detection.py
│ │ └── time_parser.py
│ └── web
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── analytics.py
│ │ ├── backup.py
│ │ ├── consolidation.py
│ │ ├── documents.py
│ │ ├── events.py
│ │ ├── health.py
│ │ ├── manage.py
│ │ ├── mcp.py
│ │ ├── memories.py
│ │ ├── quality.py
│ │ ├── search.py
│ │ └── sync.py
│ ├── app.py
│ ├── dependencies.py
│ ├── oauth
│ │ ├── __init__.py
│ │ ├── authorization.py
│ │ ├── discovery.py
│ │ ├── middleware.py
│ │ ├── models.py
│ │ ├── registration.py
│ │ └── storage.py
│ ├── sse.py
│ └── static
│ ├── app.js
│ ├── i18n
│ │ ├── de.json
│ │ ├── en.json
│ │ ├── es.json
│ │ ├── fr.json
│ │ ├── ja.json
│ │ ├── ko.json
│ │ └── zh.json
│ ├── index.html
│ ├── README.md
│ ├── sse_test.html
│ └── style.css
├── start_http_debug.bat
├── start_http_server.sh
├── test_document.txt
├── test_version_checker.js
├── TESTING_NOTES.md
├── tests
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── test_compact_types.py
│ │ └── test_operations.py
│ ├── bridge
│ │ ├── mock_responses.js
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── test_http_mcp_bridge.js
│ ├── conftest.py
│ ├── consolidation
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── test_associations.py
│ │ ├── test_clustering.py
│ │ ├── test_compression.py
│ │ ├── test_consolidator.py
│ │ ├── test_decay.py
│ │ ├── test_forgetting.py
│ │ └── test_graph_modes.py
│ ├── contracts
│ │ └── api-specification.yml
│ ├── integration
│ │ ├── conftest.py
│ │ ├── HANDLER_COVERAGE_REPORT.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── test_all_memory_handlers.py
│ │ ├── test_api_key_fallback.py
│ │ ├── test_api_memories_chronological.py
│ │ ├── test_api_tag_time_search.py
│ │ ├── test_api_with_memory_service.py
│ │ ├── test_bridge_integration.js
│ │ ├── test_cli_interfaces.py
│ │ ├── test_cloudflare_connection.py
│ │ ├── test_concurrent_clients.py
│ │ ├── test_data_serialization_consistency.py
│ │ ├── test_http_server_startup.py
│ │ ├── test_mcp_memory.py
│ │ ├── test_mdns_integration.py
│ │ ├── test_oauth_basic_auth.py
│ │ ├── test_oauth_flow.py
│ │ ├── test_server_handlers.py
│ │ └── test_store_memory.py
│ ├── performance
│ │ ├── test_background_sync.py
│ │ └── test_hybrid_live.py
│ ├── README.md
│ ├── smithery
│ │ └── test_smithery.py
│ ├── sqlite
│ │ └── simple_sqlite_vec_test.py
│ ├── storage
│ │ ├── conftest.py
│ │ └── test_graph_storage.py
│ ├── test_client.py
│ ├── test_content_splitting.py
│ ├── test_database.py
│ ├── test_deberta_quality.py
│ ├── test_fallback_quality.py
│ ├── test_graph_traversal.py
│ ├── test_hybrid_cloudflare_limits.py
│ ├── test_hybrid_storage.py
│ ├── test_lightweight_onnx.py
│ ├── test_memory_ops.py
│ ├── test_memory_wrapper_cleanup.py
│ ├── test_quality_integration.py
│ ├── test_quality_system.py
│ ├── test_semantic_search.py
│ ├── test_sqlite_vec_storage.py
│ ├── test_time_parser.py
│ ├── test_timestamp_preservation.py
│ ├── timestamp
│ │ ├── test_hook_vs_manual_storage.py
│ │ ├── test_issue99_final_validation.py
│ │ ├── test_search_retrieval_inconsistency.py
│ │ ├── test_timestamp_issue.py
│ │ └── test_timestamp_simple.py
│ └── unit
│ ├── conftest.py
│ ├── test_cloudflare_storage.py
│ ├── test_csv_loader.py
│ ├── test_fastapi_dependencies.py
│ ├── test_import.py
│ ├── test_imports.py
│ ├── test_json_loader.py
│ ├── test_mdns_simple.py
│ ├── test_mdns.py
│ ├── test_memory_service.py
│ ├── test_memory.py
│ ├── test_semtools_loader.py
│ ├── test_storage_interface_compatibility.py
│ ├── test_tag_time_filtering.py
│ └── test_uv_no_pip_installer_fallback.py
├── tools
│ ├── docker
│ │ ├── DEPRECATED.md
│ │ ├── docker-compose.http.yml
│ │ ├── docker-compose.pythonpath.yml
│ │ ├── docker-compose.standalone.yml
│ │ ├── docker-compose.uv.yml
│ │ ├── docker-compose.yml
│ │ ├── docker-entrypoint-persistent.sh
│ │ ├── docker-entrypoint-unified.sh
│ │ ├── docker-entrypoint.sh
│ │ ├── Dockerfile
│ │ ├── Dockerfile.glama
│ │ ├── Dockerfile.slim
│ │ ├── README.md
│ │ └── test-docker-modes.sh
│ └── README.md
├── uv.lock
└── verify_compression.sh
```
# Files
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
```markdown
# Contributing to MCP Memory Service
Thank you for your interest in contributing to MCP Memory Service! 🎉
This project provides semantic memory and persistent storage for AI assistants through the Model Context Protocol. We welcome contributions of all kinds - from bug fixes and features to documentation and testing.
## Table of Contents
- [Code of Conduct](#code-of-conduct)
- [Ways to Contribute](#ways-to-contribute)
- [Getting Started](#getting-started)
- [Development Process](#development-process)
- [Coding Standards](#coding-standards)
- [Testing Requirements](#testing-requirements)
- [Documentation](#documentation)
- [Submitting Changes](#submitting-changes)
- [Reporting Issues](#reporting-issues)
- [Community & Support](#community--support)
- [Recognition](#recognition)
## Code of Conduct
We are committed to providing a welcoming and inclusive environment for all contributors. Please:
- Be respectful and considerate in all interactions
- Welcome newcomers and help them get started
- Focus on constructive criticism and collaborative problem-solving
- Respect differing viewpoints and experiences
- Avoid harassment, discrimination, or inappropriate behavior
## Ways to Contribute
### 🐛 Bug Reports
Help us identify and fix issues by reporting bugs with detailed information.
### ✨ Feature Requests
Suggest new features or improvements to existing functionality.
### 📝 Documentation
Improve README, Wiki pages, code comments, or API documentation.
### 🧪 Testing
Write tests, improve test coverage, or help with manual testing.
### 💻 Code Contributions
Fix bugs, implement features, or improve performance.
### 🌍 Translations
Help make the project accessible to more users (future goal).
### 💬 Community Support
Answer questions in Issues, Discussions, or help other users.
## Getting Started
### Prerequisites
- Python 3.10 or higher
- Git
- Platform-specific requirements:
- **macOS**: Homebrew Python recommended for SQLite extension support
- **Windows**: Visual Studio Build Tools for some dependencies
- **Linux**: Build essentials package
### Setting Up Your Development Environment
1. **Fork the repository** on GitHub
2. **Clone your fork**:
```bash
git clone https://github.com/YOUR_USERNAME/mcp-memory-service.git
cd mcp-memory-service
```
3. **Install dependencies**:
```bash
python install.py
```
This will automatically detect your platform and install appropriate dependencies.
4. **Verify installation**:
```bash
python scripts/verify_environment.py
```
5. **Run the service**:
```bash
uv run memory server
```
6. **Test with MCP Inspector** (optional):
```bash
npx @modelcontextprotocol/inspector uv run memory server
```
### Alternative: Docker Setup
For a containerized environment:
```bash
docker-compose up -d # For MCP mode
docker-compose -f docker-compose.http.yml up -d # For HTTP API mode
```
## Development Process
### 1. Create a Feature Branch
```bash
git checkout -b feature/your-feature-name
# or
git checkout -b fix/issue-description
```
Use descriptive branch names:
- `feature/` for new features
- `fix/` for bug fixes
- `docs/` for documentation
- `test/` for test improvements
- `refactor/` for code refactoring
### 2. Make Your Changes
- Write clean, readable code
- Follow the coding standards (see below)
- Add/update tests as needed
- Update documentation if applicable
- Keep commits focused and atomic
### 3. Test Your Changes
```bash
# Run all tests
pytest tests/
# Run specific test file
pytest tests/test_server.py
# Run with coverage
pytest --cov=mcp_memory_service tests/
```
### 4. Commit Your Changes
Use semantic commit messages:
```bash
git commit -m "feat: add memory export functionality"
git commit -m "fix: resolve timezone handling in memory search"
git commit -m "docs: update installation guide for Windows"
git commit -m "test: add coverage for storage backends"
```
Format: `<type>: <description>`
Types:
- `feat`: New feature
- `fix`: Bug fix
- `docs`: Documentation changes
- `test`: Test additions or changes
- `refactor`: Code refactoring
- `perf`: Performance improvements
- `chore`: Maintenance tasks
### 5. Push to Your Fork
```bash
git push origin your-branch-name
```
### 6. Create a Pull Request
Open a PR from your fork to the main repository with:
- Clear title describing the change
- Description of what and why
- Reference to any related issues
- Screenshots/examples if applicable
## Coding Standards
### Python Style Guide
- Follow PEP 8 with these modifications:
- Line length: 88 characters (Black formatter default)
- Use double quotes for strings
- Use type hints for all function signatures
- Write descriptive variable and function names
- Add docstrings to all public functions/classes (Google style)
### Code Organization
```python
# Import order
import standard_library
import third_party_libraries
from mcp_memory_service import local_modules
# Type hints
from typing import Optional, List, Dict, Any
# Async functions
async def process_memory(content: str) -> Dict[str, Any]:
"""Process and store memory content.
Args:
content: The memory content to process
Returns:
Dictionary containing memory metadata
"""
# Implementation
```
### Error Handling
- Use specific exception types
- Provide helpful error messages
- Log errors appropriately
- Never silently fail
```python
try:
result = await storage.store(memory)
except StorageError as e:
logger.error(f"Failed to store memory: {e}")
raise MemoryServiceError(f"Storage operation failed: {e}") from e
```
## Testing Requirements
### Writing Tests
- Place tests in `tests/` directory
- Name test files with `test_` prefix
- Use descriptive test names
- Include both positive and negative test cases
- Mock external dependencies
Example test:
```python
import pytest
from mcp_memory_service.storage import SqliteVecStorage
@pytest.mark.asyncio
async def test_store_memory_success():
"""Test successful memory storage."""
storage = SqliteVecStorage(":memory:")
result = await storage.store("test content", tags=["test"])
assert result is not None
assert "hash" in result
```
### Test Coverage
- Aim for >80% code coverage
- Focus on critical paths and edge cases
- Test error handling scenarios
- Include integration tests where appropriate
## Documentation
### Code Documentation
- Add docstrings to all public APIs
- Include type hints
- Provide usage examples in docstrings
- Keep comments concise and relevant
### Project Documentation
When adding features or making significant changes:
1. Update README.md if needed
2. Add/update Wiki pages for detailed guides
3. Update CHANGELOG.md following Keep a Changelog format
4. Update AGENTS.md or CLAUDE.md if development workflow changes
**Advanced Workflow Automation**:
- See [Context Provider Workflow Automation](https://github.com/doobidoo/mcp-memory-service/wiki/Context-Provider-Workflow-Automation) for automating development workflows with intelligent patterns
### API Documentation
- Document new MCP tools in `docs/api/tools.md`
- Include parameter descriptions and examples
- Note any breaking changes
## Submitting Changes
### Pull Request Guidelines
1. **PR Title**: Use semantic format (e.g., "feat: add batch memory operations")
2. **PR Description Template**:
```markdown
## Description
Brief description of changes
## Motivation
Why these changes are needed
## Changes
- List of specific changes
- Breaking changes (if any)
## Testing
- How you tested the changes
- Test coverage added
## Screenshots
(if applicable)
## Related Issues
Fixes #123
```
3. **PR Checklist**:
- [ ] Tests pass locally
- [ ] Code follows style guidelines
- [ ] Documentation updated
- [ ] CHANGELOG.md updated
- [ ] No sensitive data exposed
### Review Process
- PRs require at least one review
- Address review feedback promptly
- Keep discussions focused and constructive
- Be patient - reviews may take a few days
## Reporting Issues
### Bug Reports
When reporting bugs, include:
1. **Environment**:
- OS and version
- Python version
- MCP Memory Service version
- Installation method (pip, Docker, source)
2. **Steps to Reproduce**:
- Minimal code example
- Exact commands run
- Configuration used
3. **Expected vs Actual Behavior**:
- What you expected to happen
- What actually happened
- Error messages/stack traces
4. **Additional Context**:
- Screenshots if applicable
- Relevant log output
- Related issues
### Feature Requests
For feature requests, describe:
- The problem you're trying to solve
- Your proposed solution
- Alternative approaches considered
- Potential impact on existing functionality
## Community & Support
### Getting Help
- **Documentation**: Check the [Wiki](https://github.com/doobidoo/mcp-memory-service/wiki) first
- **Issues**: Search existing [issues](https://github.com/doobidoo/mcp-memory-service/issues) before creating new ones
- **Discussions**: Use [GitHub Discussions](https://github.com/doobidoo/mcp-memory-service/discussions) for questions
- **Response Time**: Maintainers typically respond within 2-3 days
### Communication Channels
- **GitHub Issues**: Bug reports and feature requests
- **GitHub Discussions**: General questions and community discussion
- **Pull Requests**: Code contributions and reviews
### For AI Agents
If you're an AI coding assistant, also check:
- [AGENTS.md](AGENTS.md) - Generic AI agent instructions
- [CLAUDE.md](CLAUDE.md) - Claude-specific guidelines
- [Context Provider Workflow Automation](https://github.com/doobidoo/mcp-memory-service/wiki/Context-Provider-Workflow-Automation) - Automate development workflows with intelligent patterns
## Recognition
We value all contributions! Contributors are:
- Listed in release notes for their contributions
- Mentioned in CHANGELOG.md entries
- Credited in commit messages when providing fixes/solutions
- Welcome to add themselves to a CONTRIBUTORS file (future)
### Types of Recognition
- 🐛 Bug reporters who provide detailed, reproducible issues
- 💻 Code contributors who submit PRs
- 📝 Documentation improvers
- 🧪 Test writers and reviewers
- 💬 Community helpers who support other users
- 🎨 UI/UX improvers (for dashboard contributions)
---
Thank you for contributing to MCP Memory Service! Your efforts help make AI assistants more capable and useful for everyone. 🚀
If you have questions not covered here, please open a [Discussion](https://github.com/doobidoo/mcp-memory-service/discussions) or check our [Wiki](https://github.com/doobidoo/mcp-memory-service/wiki).
```
--------------------------------------------------------------------------------
/CLAUDE.md:
--------------------------------------------------------------------------------
```markdown
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with this MCP Memory Service repository.
> **📝 Personal Customizations**: You can create `CLAUDE.local.md` (gitignored) for personal notes, custom workflows, or environment-specific instructions. This file contains shared project conventions.
> **Information Lookup**: Files first, memory second, user last. See [`.claude/directives/memory-first.md`](.claude/directives/memory-first.md) for strategy. Comprehensive project context stored in memory with tags `claude-code-reference`.
## 🔴 Critical Directives
**IMPORTANT**: Before working with this project, read:
- **`.claude/directives/memory-tagging.md`** - MANDATORY: Always tag memories with `mcp-memory-service` as first tag
- **`.claude/directives/README.md`** - Additional topic-specific directives
## ⚡ Quick Update & Restart (RECOMMENDED)
**ALWAYS use these scripts after git pull to update dependencies and restart server:**
```bash
# macOS/Linux - One command, <2 minutes
./scripts/update_and_restart.sh
# Windows PowerShell
.\scripts\service\windows\update_and_restart.ps1
```
**Why?** These scripts automate the complete update workflow:
- ✅ Git pull + auto-stash uncommitted changes
- ✅ Install dependencies (editable mode: `pip install -e .`)
- ✅ Restart HTTP server with version verification
- ✅ Health check (ensures new version is running)
**Without these scripts**, you risk running old code (common mistake: forget `pip install -e .` after pull).
See [Essential Commands](#essential-commands) for options (--no-restart, --force).
## Overview
MCP Memory Service is a Model Context Protocol server providing semantic memory and persistent storage for Claude Desktop with SQLite-vec, Cloudflare, and Hybrid storage backends.
> **🆕 v8.76.0**: **Official Lite Distribution Support** - New `mcp-memory-service-lite` package with 90% size reduction (7.7GB → 805MB), automated dual publishing workflow, conditional transformers loading, and multi-protocol port detection fixes (HTTP/HTTPS health checks, cross-platform fallback chain: lsof → ss → netstat → ps). See [CHANGELOG.md](CHANGELOG.md) for full version history.
>
> **Note**: When releasing new versions, update this line with current version + brief description. Use `.claude/agents/github-release-manager.md` agent for complete release workflow.
## Essential Commands
**Most Used:**
- `./scripts/update_and_restart.sh` - Update & restart (ALWAYS after git pull)
- `curl http://127.0.0.1:8000/api/health` - Health check
- `bash scripts/pr/pre_pr_check.sh` - Pre-PR validation (MANDATORY)
- `curl -X POST http://127.0.0.1:8000/api/consolidation/trigger -H "Content-Type: application/json" -d '{"time_horizon":"weekly"}'` - Trigger consolidation
**Full command reference:** [scripts/README.md](scripts/README.md)
## Architecture
**Core Components:**
- **Server Layer**: MCP protocol with async handlers, global caches (`src/mcp_memory_service/server.py:1`)
- **Storage Backends**: SQLite-Vec (5ms reads), Cloudflare (edge), Hybrid (local + cloud sync)
- **Web Interface**: FastAPI dashboard at `http://127.0.0.1:8000/` with REST API
- **Document Ingestion**: PDF, DOCX, PPTX loaders (see [docs/document-ingestion.md](docs/document-ingestion.md))
- **Memory Hooks**: Natural Memory Triggers v7.1.3+ with 85%+ accuracy (see below)
**Utility Modules** (v8.61.0 - Phase 3 Refactoring):
- `utils/health_check.py` - Strategy Pattern for backend health checks (5 strategies)
- `utils/startup_orchestrator.py` - Orchestrator Pattern for server startup (3 orchestrators)
- `utils/directory_ingestion.py` - Processor Pattern for file ingestion (3 processors)
- `utils/quality_analytics.py` - Analyzer Pattern for quality distribution (3 analyzers)
**Key Patterns:**
- Async/await for I/O, type safety (Python 3.10+), platform hardware optimization (CUDA/MPS/DirectML/ROCm)
- Design Patterns: Strategy, Orchestrator, Processor, Analyzer (all complexity A-B grade)
## Document Ingestion
Supports PDF, DOCX, PPTX, TXT/MD with optional [semtools](https://github.com/run-llama/semtools) for enhanced quality.
```bash
claude /memory-ingest document.pdf --tags documentation
claude /memory-ingest-dir ./docs --tags knowledge-base
```
See [docs/document-ingestion.md](docs/document-ingestion.md) for full configuration and usage.
## Interactive Dashboard
Web interface at `http://127.0.0.1:8000/` with CRUD operations, semantic/tag/time search, real-time updates (SSE), mobile responsive. Performance: 25ms page load, <100ms search.
**API Endpoints:** `/api/search`, `/api/search/by-tag`, `/api/search/by-time`, `/api/events`, `/api/quality/*` (v8.45.0+)
## Memory Quality System (v8.45.0+)
Local-first AI quality scoring (ONNX), zero-cost, privacy-preserving.
**Features:**
- Tier 1: Local ONNX (80-150ms CPU, $0 cost)
- Quality-boosted search: `0.7×semantic + 0.3×quality`
- Quality-based forgetting: High (365d), Medium (180d), Low (30-90d)
**Config:** `export MCP_QUALITY_SYSTEM_ENABLED=true`
→ Details: [`.claude/directives/quality-system-details.md`](.claude/directives/quality-system-details.md)
→ Guide: [docs/guides/memory-quality-guide.md](docs/guides/memory-quality-guide.md)
## Memory Consolidation System
Dream-inspired consolidation with automatic scheduling (v8.23.0+).
**Quick Start:**
```bash
curl -X POST http://127.0.0.1:8000/api/consolidation/trigger \
-H "Content-Type: application/json" -d '{"time_horizon":"weekly"}'
```
**Config:** `export MCP_CONSOLIDATION_ENABLED=true`
→ Details: [`.claude/directives/consolidation-details.md`](.claude/directives/consolidation-details.md)
→ Guide: [docs/guides/memory-consolidation-guide.md](docs/guides/memory-consolidation-guide.md)
## Environment Variables
**Essential Configuration:**
```bash
# Storage Backend (Hybrid is RECOMMENDED for production)
export MCP_MEMORY_STORAGE_BACKEND=hybrid # hybrid|cloudflare|sqlite_vec
# Cloudflare Configuration (REQUIRED for hybrid/cloudflare backends)
export CLOUDFLARE_API_TOKEN="your-token" # Required for Cloudflare backend
export CLOUDFLARE_ACCOUNT_ID="your-account" # Required for Cloudflare backend
export CLOUDFLARE_D1_DATABASE_ID="your-d1-id" # Required for Cloudflare backend
export CLOUDFLARE_VECTORIZE_INDEX="mcp-memory-index" # Required for Cloudflare backend
# Web Interface (Optional)
export MCP_HTTP_ENABLED=true # Enable HTTP server
export MCP_HTTPS_ENABLED=true # Enable HTTPS (production)
export MCP_API_KEY="$(openssl rand -base64 32)" # Generate secure API key
```
**Configuration Precedence:** Environment variables > .env file > Global Claude Config > defaults
**✅ Automatic Configuration Loading (v6.16.0+):** The service now automatically loads `.env` files and respects environment variable precedence. CLI defaults no longer override environment configuration.
**⚠️ Important:** When using hybrid or cloudflare backends, ensure Cloudflare credentials are properly configured. If health checks show "sqlite-vec" when you expect "cloudflare" or "hybrid", this indicates a configuration issue that needs to be resolved.
**Platform Support:** macOS (MPS/CPU), Windows (CUDA/DirectML/CPU), Linux (CUDA/ROCm/CPU)
## Claude Code Hooks Configuration 🆕
> **✅ Windows SessionStart Fixed** (Claude Code 2.0.76+): SessionStart hooks now work correctly on Windows. The subprocess lifecycle bug (#160) was fixed in Claude Code core. No workaround needed.
**Natural Memory Triggers v7.1.3** - 85%+ trigger accuracy, multi-tier processing (50ms → 150ms → 500ms)
**Installation:**
```bash
cd claude-hooks && python install_hooks.py --natural-triggers
# CLI Management
node ~/.claude/hooks/memory-mode-controller.js status
node ~/.claude/hooks/memory-mode-controller.js profile balanced
```
**Performance Profiles:**
- `speed_focused`: <100ms, minimal memory awareness
- `balanced`: <200ms, optimal for general development (recommended)
- `memory_aware`: <500ms, maximum context awareness
→ Complete configuration: [`.claude/directives/hooks-configuration.md`](.claude/directives/hooks-configuration.md)
## Storage Backends
| Backend | Performance | Use Case | Installation |
|---------|-------------|----------|--------------|
| **Hybrid** ⚡ | Fast (5ms read) | **🌟 Production (Recommended)** | `--storage-backend hybrid` |
| **Cloudflare** ☁️ | Network dependent | Cloud-only deployment | `--storage-backend cloudflare` |
| **SQLite-Vec** 🪶 | Fast (5ms read) | Development, single-user | `--storage-backend sqlite_vec` |
**Hybrid Backend Benefits:**
- 5ms read/write + multi-device sync + graceful offline operation
**Database Lock Prevention (v8.9.0+):**
- After adding `MCP_MEMORY_SQLITE_PRAGMAS` to `.env`, **restart all servers**
- SQLite pragmas are per-connection, not global
→ Complete details: [`.claude/directives/storage-backends.md`](.claude/directives/storage-backends.md)
## Development Guidelines
**Read first:**
→ [`.claude/directives/development-setup.md`](.claude/directives/development-setup.md) - Editable install
→ [`.claude/directives/pr-workflow.md`](.claude/directives/pr-workflow.md) - Pre-PR checks
→ [`.claude/directives/refactoring-checklist.md`](.claude/directives/refactoring-checklist.md) - Refactoring safety
→ [`.claude/directives/version-management.md`](.claude/directives/version-management.md) - Release workflow
**Quick:**
- `pip install -e .` (dev mode)
- `bash scripts/pr/pre_pr_check.sh` (before PR, MANDATORY)
- Use github-release-manager agent for releases
- Tag memories with `mcp-memory-service` as first tag
## Code Quality Monitoring
**Three-layer strategy:**
1. **Pre-commit** (<5s) - Groq/Gemini complexity + security (blocks: complexity >8, any security issues)
2. **PR Quality Gate** (10-60s) - `quality_gate.sh --with-pyscn` (blocks: security, health <50)
3. **Periodic Review** (weekly) - pyscn analysis + trend tracking
**Health Score Thresholds:**
- <50: 🔴 Release blocker (cannot merge)
- 50-69: 🟡 Action required (refactor within 2 weeks)
- 70+: ✅ Continue development
→ Complete workflow: [`.claude/directives/code-quality-workflow.md`](.claude/directives/code-quality-workflow.md)
## Configuration Management
**Quick Validation:**
```bash
python scripts/validation/validate_configuration_complete.py # Comprehensive validation
python scripts/validation/diagnose_backend_config.py # Cloudflare diagnostics
```
**Configuration Hierarchy:**
- Global: `~/.claude.json` (authoritative)
- Project: `.env` file (Cloudflare credentials)
- **Avoid**: Local `.mcp.json` overrides
**Common Issues & Quick Fixes:**
| Issue | Quick Fix |
|-------|-----------|
| Wrong backend showing | `python scripts/validation/diagnose_backend_config.py` |
| Port mismatch (hooks timeout) | Verify same port in `~/.claude/hooks/config.json` and server (default: 8000) |
| Schema validation errors after PR merge | Run `/mcp` in Claude Code to reconnect with new schema |
| Accidental `data/memory.db` | Delete safely: `rm -rf data/` (gitignored) |
See [docs/troubleshooting/hooks-quick-reference.md](docs/troubleshooting/hooks-quick-reference.md) for comprehensive troubleshooting.
## Hook Troubleshooting
**SessionEnd Hooks:**
- Trigger on `/exit`, terminal close (NOT Ctrl+C)
- Require 100+ characters, confidence > 0.1
- Memory creation: topics, decisions, insights, code changes
**Windows SessionStart (Fixed in Claude Code 2.0.76+):**
- ✅ SessionStart hooks now work correctly on Windows
- The subprocess lifecycle bug (#160) was fixed in Claude Code core
See [docs/troubleshooting/hooks-quick-reference.md](docs/troubleshooting/hooks-quick-reference.md) for full troubleshooting guide.
## Agent Integrations
Workflow automation: `@agent github-release-manager`, `./scripts/utils/groq "task"`, `bash scripts/pr/auto_review.sh <PR>`
**Agents:** github-release-manager (releases), amp-bridge (refactoring), code-quality-guard (quality), gemini-pr-automator (PRs)
→ Workflows: [`.claude/directives/agents.md`](.claude/directives/agents.md)
> **For detailed troubleshooting, architecture, and deployment guides:**
> - **Backend Configuration Issues**: See [Wiki Troubleshooting Guide](https://github.com/doobidoo/mcp-memory-service/wiki/07-TROUBLESHOOTING#backend-configuration-issues) for comprehensive solutions to missing memories, environment variable issues, Cloudflare auth, hooks timeouts, and more
> - **Historical Context**: Retrieve memories tagged with `claude-code-reference`
> - **Quick Diagnostic**: Run `python scripts/validation/diagnose_backend_config.py`
```
--------------------------------------------------------------------------------
/claude-hooks/utilities/session-cache.json:
--------------------------------------------------------------------------------
```json
```
--------------------------------------------------------------------------------
/tests/consolidation/__init__.py:
--------------------------------------------------------------------------------
```python
# Consolidation tests module
```
--------------------------------------------------------------------------------
/archive/deployment-configs/empty_config.yml:
--------------------------------------------------------------------------------
```yaml
# Empty Litestream config
dbs: []
```
--------------------------------------------------------------------------------
/.metrics/baseline_mi_install_hooks.txt:
--------------------------------------------------------------------------------
```
claude-hooks/install_hooks.py - C (2.38)
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/_version.py:
--------------------------------------------------------------------------------
```python
"""Version information for MCP Memory Service."""
__version__ = "8.76.0"
```
--------------------------------------------------------------------------------
/scripts/run/run-with-uv.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "Running MCP Memory Service with UV..."
python uv_wrapper.py "$@"
```
--------------------------------------------------------------------------------
/scripts/linux/service_status.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "MCP Memory Service Status:"
echo "-" | tr '-' '='
systemctl --user status mcp-memory
```
--------------------------------------------------------------------------------
/scripts/linux/view_logs.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "Viewing MCP Memory Service logs (press Ctrl+C to exit)..."
journalctl -u mcp-memory -f
```
--------------------------------------------------------------------------------
/docs/statistics/data/activity_by_hour.csv:
--------------------------------------------------------------------------------
```
hour,commits
00,22
01,6
06,19
07,76
08,90
09,73
10,43
11,71
12,73
13,92
14,97
15,41
16,73
17,85
18,65
19,98
20,138
21,160
22,150
23,64
```
--------------------------------------------------------------------------------
/docs/statistics/data/contributors.csv:
--------------------------------------------------------------------------------
```
contributor,commits,percentage
Heinrich Krupp,1418,94.8%
zod,20,1.3%
Salih Ergüt,16,1.1%
3dyuval,10,0.7%
muxammadreza,8,0.5%
Henry Mao,6,0.4%
```
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
```python
"""
Test suite for MCP Memory Service.
This package contains all test modules for verifying the functionality
of the memory service components.
"""
```
--------------------------------------------------------------------------------
/docs/statistics/data/activity_by_day.csv:
--------------------------------------------------------------------------------
```
day_of_week,commits,percentage
Sunday,314,20.4%
Saturday,285,18.6%
Monday,271,17.6%
Friday,231,15.0%
Tuesday,177,11.5%
Thursday,131,8.5%
Wednesday,127,8.3%
```
--------------------------------------------------------------------------------
/docs/statistics/data/monthly_activity.csv:
--------------------------------------------------------------------------------
```
month,commits,releases
2024-12,55,1
2025-01,34,0
2025-02,2,0
2025-03,66,0
2025-04,102,0
2025-05,4,0
2025-06,36,0
2025-07,351,9
2025-08,330,64
2025-09,246,34
2025-10,310,65
```
--------------------------------------------------------------------------------
/scripts/linux/stop_service.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "Stopping MCP Memory Service..."
systemctl --user stop mcp-memory
if [ $? -eq 0 ]; then
echo "✅ Service stopped successfully!"
else
echo "❌ Failed to stop service"
fi
```
--------------------------------------------------------------------------------
/scripts/linux/start_service.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "Starting MCP Memory Service..."
systemctl --user start mcp-memory
if [ $? -eq 0 ]; then
echo "✅ Service started successfully!"
else
echo "❌ Failed to start service"
fi
```
--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/requirements-cloudflare.txt:
--------------------------------------------------------------------------------
```
# Additional dependencies for Cloudflare backend support
# These are installed automatically when using the cloudflare backend
# HTTP client for Cloudflare API calls
httpx>=0.24.0
# Optional: Cloudflare Python SDK (if available)
# cloudflare>=2.15.0
```
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
```
[pytest]
asyncio_mode = auto
testpaths = tests
python_files = test_*.py
python_classes = Test*
python_functions = test_*
markers =
unit: unit tests
integration: integration tests
performance: performance tests
asyncio: mark test as async
```
--------------------------------------------------------------------------------
/test_document.txt:
--------------------------------------------------------------------------------
```
This is a test document for MCP Memory Service document ingestion.
It contains some sample content to test the chunking and embedding functionality.
Features:
- Multiple paragraphs
- Some technical content
- Test data for verification
End of document.
```
--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/litestream_replica_simple.yml:
--------------------------------------------------------------------------------
```yaml
# Simple Litestream replica configuration
# Note: Litestream replicas typically push TO destinations, not pull FROM them
# For pulling from HTTP, we'll use restore commands instead
dbs:
- path: /Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/services/__init__.py:
--------------------------------------------------------------------------------
```python
"""
Services package for MCP Memory Service.
This package contains shared business logic services that provide
consistent behavior across different interfaces (API, MCP tools).
"""
from .memory_service import MemoryService, MemoryResult
__all__ = ["MemoryService", "MemoryResult"]
```
--------------------------------------------------------------------------------
/examples/claude-desktop-http-config.json:
--------------------------------------------------------------------------------
```json
{
"mcpServers": {
"memory": {
"command": "node",
"args": ["/path/to/mcp-memory-service/examples/http-mcp-bridge.js"],
"env": {
"MCP_MEMORY_HTTP_ENDPOINT": "http://your-server:8000/api",
"MCP_MEMORY_API_KEY": "your-secure-api-key"
}
}
}
}
```
--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/litestream_replica_config.yml:
--------------------------------------------------------------------------------
```yaml
# Litestream Replica Configuration for local macOS machine
# This configuration syncs from the remote master at narrowbox.local
dbs:
- path: /Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db
replicas:
- type: file
url: http://10.0.1.30:8080/mcp-memory
sync-interval: 10s
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/embeddings/__init__.py:
--------------------------------------------------------------------------------
```python
"""Embedding generation modules for MCP Memory Service."""
from .onnx_embeddings import (
ONNXEmbeddingModel,
get_onnx_embedding_model,
ONNX_AVAILABLE,
TOKENIZERS_AVAILABLE
)
__all__ = [
'ONNXEmbeddingModel',
'get_onnx_embedding_model',
'ONNX_AVAILABLE',
'TOKENIZERS_AVAILABLE'
]
```
--------------------------------------------------------------------------------
/examples/config/claude_desktop_config.json:
--------------------------------------------------------------------------------
```json
{
"mcpServers": {
"memory": {
"command": "python",
"args": [
"-m",
"mcp_memory_service.server"
],
"env": {
"MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
"MCP_MEMORY_BACKUPS_PATH": "C:\\Users\\heinrich.krupp\\AppData\\Local\\mcp-memory"
}
}
}
}
```
--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/litestream_replica_config_fixed.yml:
--------------------------------------------------------------------------------
```yaml
# Litestream Replica Configuration for local macOS machine (FIXED)
# This configuration syncs from the remote master at 10.0.1.30:8080
dbs:
- path: /Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db
replicas:
- name: "remote-master"
type: "http"
url: http://10.0.1.30:8080/mcp-memory
sync-interval: 10s
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/quality/__init__.py:
--------------------------------------------------------------------------------
```python
"""Quality scoring system for memory evaluation."""
from .scorer import QualityScorer
from .onnx_ranker import ONNXRankerModel
from .ai_evaluator import QualityEvaluator
from .implicit_signals import ImplicitSignalsEvaluator
from .config import QualityConfig
__all__ = [
'QualityScorer',
'ONNXRankerModel',
'QualityEvaluator',
'ImplicitSignalsEvaluator',
'QualityConfig'
]
```
--------------------------------------------------------------------------------
/scripts/.claude/settings.local.json:
--------------------------------------------------------------------------------
```json
{
"permissions": {
"allow": [
"mcp__code-context__index_codebase",
"Bash(git commit:*)",
"Bash(gh pr edit:*)",
"Bash(gh api:*)",
"Bash(git checkout:*)",
"Bash(git pull:*)",
"Bash(cat:*)",
"Bash(git tag:*)",
"Bash(git push:*)",
"Bash(gh pr view:*)",
"Bash(uv lock:*)",
"Bash(git add:*)"
],
"deny": [],
"ask": []
}
}
```
--------------------------------------------------------------------------------
/scripts/testing/run_memory_test.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
set -e
# Activate virtual environment
source ./venv/bin/activate
# Set environment variables
export MCP_MEMORY_STORAGE_BACKEND="sqlite_vec"
export MCP_MEMORY_SQLITE_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db"
export MCP_MEMORY_BACKUPS_PATH="/Users/hkr/Library/Application Support/mcp-memory/backups"
export MCP_MEMORY_USE_ONNX="1"
# Run the memory server
python -m mcp_memory_service.server
```
--------------------------------------------------------------------------------
/scripts/service/update_service.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "Updating MCP Memory Service configuration..."
# Copy the updated service file
sudo cp mcp-memory.service /etc/systemd/system/
# Set proper permissions
sudo chmod 644 /etc/systemd/system/mcp-memory.service
# Reload systemd daemon
sudo systemctl daemon-reload
echo "✅ Service updated successfully!"
echo ""
echo "Now try starting the service:"
echo " sudo systemctl start mcp-memory"
echo " sudo systemctl status mcp-memory"
```
--------------------------------------------------------------------------------
/scripts/pr/run_quality_checks.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# scripts/pr/run_quality_checks.sh - Run quality checks on a PR
# Wrapper for quality_gate.sh to maintain consistent naming in workflows
#
# Usage: bash scripts/pr/run_quality_checks.sh <PR_NUMBER>
set -e
PR_NUMBER=$1
if [ -z "$PR_NUMBER" ]; then
echo "Usage: $0 <PR_NUMBER>"
exit 1
fi
# Get script directory
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Run quality gate checks
exec "$SCRIPT_DIR/quality_gate.sh" "$PR_NUMBER"
```
--------------------------------------------------------------------------------
/tests/bridge/package.json:
--------------------------------------------------------------------------------
```json
{
"name": "mcp-bridge-tests",
"version": "1.0.0",
"description": "Unit tests for HTTP-MCP bridge",
"main": "test_http_mcp_bridge.js",
"scripts": {
"test": "mocha test_http_mcp_bridge.js --reporter spec",
"test:watch": "mocha test_http_mcp_bridge.js --reporter spec --watch"
},
"dependencies": {
"mocha": "^10.0.0",
"sinon": "^17.0.0"
},
"devDependencies": {},
"keywords": ["mcp", "bridge", "testing"],
"author": "",
"license": "Apache-2.0"
}
```
--------------------------------------------------------------------------------
/scripts/development/setup-git-merge-drivers.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Setup script for git merge drivers
# Run this once after cloning the repository
echo "Setting up git merge drivers for uv.lock..."
# Configure the uv.lock merge driver
git config merge.uv-lock-merge.driver './scripts/uv-lock-merge.sh %O %A %B %L %P'
git config merge.uv-lock-merge.name 'UV lock file merge driver'
# Make the merge script executable
chmod +x scripts/uv-lock-merge.sh
echo "✓ Git merge drivers configured successfully!"
echo " uv.lock conflicts will now be resolved automatically"
```
--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/litestream_master_config.yml:
--------------------------------------------------------------------------------
```yaml
# Litestream Master Configuration for narrowbox.local
# This configuration sets up the remote server as the master database
dbs:
- path: /home/user/.local/share/mcp-memory/sqlite_vec.db
replicas:
# Local file replica for serving via HTTP
- type: file
path: /var/www/litestream/mcp-memory
sync-interval: 10s
# Local backup
- type: file
path: /backup/litestream/mcp-memory
sync-interval: 1m
# Performance settings
checkpoint-interval: 30s
wal-retention: 10m
```
--------------------------------------------------------------------------------
/tests/integration/package.json:
--------------------------------------------------------------------------------
```json
{
"name": "mcp-integration-tests",
"version": "1.0.0",
"description": "Integration tests for HTTP-MCP bridge",
"main": "test_bridge_integration.js",
"scripts": {
"test": "mocha test_bridge_integration.js --reporter spec --timeout 10000",
"test:watch": "mocha test_bridge_integration.js --reporter spec --timeout 10000 --watch"
},
"dependencies": {
"mocha": "^10.0.0",
"sinon": "^17.0.0"
},
"devDependencies": {},
"keywords": ["mcp", "bridge", "integration", "testing"],
"author": "",
"license": "Apache-2.0"
}
```
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
```yaml
blank_issues_enabled: false
contact_links:
- name: 📚 Documentation & Wiki
url: https://github.com/doobidoo/mcp-memory-service/wiki
about: Check the Wiki for setup guides, troubleshooting, and advanced usage
- name: 💬 GitHub Discussions
url: https://github.com/doobidoo/mcp-memory-service/discussions
about: Ask questions, share ideas, or discuss general topics with the community
- name: 🔍 Search Existing Issues
url: https://github.com/doobidoo/mcp-memory-service/issues
about: Check if your issue has already been reported or solved
```
--------------------------------------------------------------------------------
/scripts/linux/uninstall_service.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "This will uninstall MCP Memory Service."
read -p "Are you sure? (y/N): " confirm
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
exit 0
fi
echo "Stopping service..."
systemctl --user stop mcp-memory 2>/dev/null
systemctl --user disable mcp-memory 2>/dev/null
echo "Removing service files..."
if [ -f "$HOME/.config/systemd/user/mcp-memory.service" ]; then
rm -f "$HOME/.config/systemd/user/mcp-memory.service"
systemctl --user daemon-reload
else
sudo rm -f /etc/systemd/system/mcp-memory.service
sudo systemctl daemon-reload
fi
echo "✅ Service uninstalled"
```
--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/litestream_master_config_fixed.yml:
--------------------------------------------------------------------------------
```yaml
# Litestream Master Configuration for narrowbox.local (FIXED)
# This configuration sets up the remote server as the master database
dbs:
- path: /home/hkr/.local/share/mcp-memory/sqlite_vec.db
replicas:
# HTTP replica for serving to clients
- name: "http-replica"
type: file
path: /var/www/litestream/mcp-memory
sync-interval: 10s
# Local backup
- name: "backup-replica"
type: file
path: /backup/litestream/mcp-memory
sync-interval: 1m
# Performance settings
checkpoint-interval: 30s
wal-retention: 10m
```
--------------------------------------------------------------------------------
/tests/api/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for code execution API."""
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/web/api/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
API routes for the HTTP interface.
"""
```
--------------------------------------------------------------------------------
/tools/docker/docker-compose.pythonpath.yml:
--------------------------------------------------------------------------------
```yaml
services:
memory-service:
image: python:3.10-slim
working_dir: /app
stdin_open: true
tty: true
ports:
- "8000:8000"
volumes:
- .:/app
- ${CHROMA_DB_PATH:-$HOME/mcp-memory/chroma_db}:/app/chroma_db
- ${BACKUPS_PATH:-$HOME/mcp-memory/backups}:/app/backups
environment:
- MCP_MEMORY_CHROMA_PATH=/app/chroma_db
- MCP_MEMORY_BACKUPS_PATH=/app/backups
- LOG_LEVEL=INFO
- MAX_RESULTS_PER_QUERY=10
- SIMILARITY_THRESHOLD=0.7
- PYTHONPATH=/app/src:/app
- PYTHONUNBUFFERED=1
restart: unless-stopped
build:
context: .
dockerfile: Dockerfile
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/models/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .memory import Memory, MemoryQueryResult
__all__ = ['Memory', 'MemoryQueryResult']
```
--------------------------------------------------------------------------------
/verify_compression.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "=== Metadata Compression Verification ==="
echo
echo "1. Sync Status:"
curl -s http://127.0.0.1:8000/api/sync/status | python3 -c "import sys, json; d=json.load(sys.stdin); print(f\" Failed: {d['operations_failed']} (should be 0)\")"
echo
echo "2. Quality Distribution:"
curl -s http://127.0.0.1:8000/api/quality/distribution | python3 -c "import sys, json; d=json.load(sys.stdin); print(f\" ONNX scored: {d['provider_breakdown'].get('onnx_local', 0)}\")"
echo
echo "3. Recent Logs (compression activity):"
tail -20 /tmp/mcp-http-server.log | grep -i "compress\|too large" || echo " No compression warnings (good!)"
echo
echo "✅ Verification complete!"
```
--------------------------------------------------------------------------------
/tools/docker/docker-compose.uv.yml:
--------------------------------------------------------------------------------
```yaml
services:
memory-service:
image: python:3.10-slim
working_dir: /app
stdin_open: true
tty: true
ports:
- "8000:8000"
volumes:
- .:/app
- ${CHROMA_DB_PATH:-$HOME/mcp-memory/chroma_db}:/app/chroma_db
- ${BACKUPS_PATH:-$HOME/mcp-memory/backups}:/app/backups
environment:
- MCP_MEMORY_CHROMA_PATH=/app/chroma_db
- MCP_MEMORY_BACKUPS_PATH=/app/backups
- LOG_LEVEL=INFO
- MAX_RESULTS_PER_QUERY=10
- SIMILARITY_THRESHOLD=0.7
- PYTHONPATH=/app
- PYTHONUNBUFFERED=1
- UV_ACTIVE=1
- CHROMA_TELEMETRY_IMPL=none
- ANONYMIZED_TELEMETRY=false
restart: unless-stopped
build:
context: .
dockerfile: Dockerfile
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/init_staging_db.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Initialize staging database for offline memory changes
STAGING_DB="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec_staging.db"
INIT_SQL="$(dirname "$0")/deployment/staging_db_init.sql"
echo "$(date): Initializing staging database..."
# Create directory if it doesn't exist
mkdir -p "$(dirname "$STAGING_DB")"
# Initialize database with schema
sqlite3 "$STAGING_DB" < "$INIT_SQL"
if [ $? -eq 0 ]; then
echo "$(date): Staging database initialized at: $STAGING_DB"
echo "$(date): Database size: $(du -h "$STAGING_DB" | cut -f1)"
else
echo "$(date): ERROR: Failed to initialize staging database"
exit 1
fi
# Set permissions
chmod 644 "$STAGING_DB"
echo "$(date): Staging database ready for use"
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/utils/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .hashing import generate_content_hash
from .document_processing import create_memory_from_chunk, _process_and_store_chunk
__all__ = ['generate_content_hash', 'create_memory_from_chunk', '_process_and_store_chunk']
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/backup/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Automatic backup module for MCP Memory Service.
Provides scheduled backups and backup management functionality.
"""
from .scheduler import BackupScheduler, BackupService
__all__ = ['BackupScheduler', 'BackupService']
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/cli/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Command Line Interface for MCP Memory Service
Provides CLI commands for document ingestion, memory management, and database operations.
"""
from .ingestion import add_ingestion_commands
__all__ = ['add_ingestion_commands']
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/web/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Web interface for MCP Memory Service.
Provides HTTP REST API and Server-Sent Events (SSE) interface
using FastAPI and SQLite-vec backend.
"""
# Import version from main package to maintain consistency
from .. import __version__
```
--------------------------------------------------------------------------------
/tools/docker/docker-compose.standalone.yml:
--------------------------------------------------------------------------------
```yaml
services:
memory-service:
image: python:3.10-slim
working_dir: /app
stdin_open: true
tty: true
ports:
- "8000:8000"
volumes:
- .:/app
- ${CHROMA_DB_PATH:-$HOME/mcp-memory/chroma_db}:/app/chroma_db
- ${BACKUPS_PATH:-$HOME/mcp-memory/backups}:/app/backups
environment:
- MCP_MEMORY_CHROMA_PATH=/app/chroma_db
- MCP_MEMORY_BACKUPS_PATH=/app/backups
- LOG_LEVEL=INFO
- MAX_RESULTS_PER_QUERY=10
- SIMILARITY_THRESHOLD=0.7
- PYTHONPATH=/app
- PYTHONUNBUFFERED=1
- UV_ACTIVE=1
- MCP_STANDALONE_MODE=1
- CHROMA_TELEMETRY_IMPL=none
- ANONYMIZED_TELEMETRY=false
restart: unless-stopped
build:
context: .
dockerfile: Dockerfile
entrypoint: ["/usr/local/bin/docker-entrypoint-persistent.sh"]
```
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
```yaml
# These are supported funding model platforms
# github: doobidoo # Uncomment when enrolled in GitHub Sponsors
# patreon: # Replace with a single Patreon username
# open_collective: # Replace with a single Open Collective username
ko_fi: doobidoo # Replace with a single Ko-fi username
# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
# liberapay: # Replace with a single Liberapay username
# issuehunt: # Replace with a single IssueHunt username
# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: ['https://www.buymeacoffee.com/doobidoo', 'https://paypal.me/heinrichkrupp1'] # Replace with up to 4 custom
# sponsorship URLs e.g., ['', 'link2']
```
--------------------------------------------------------------------------------
/archive/deployment-configs/smithery.yaml:
--------------------------------------------------------------------------------
```yaml
# Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
startCommand:
type: stdio
configSchema:
# JSON Schema defining the configuration options for the MCP.
type: object
required:
- chromaDbPath
- backupsPath
properties:
chromaDbPath:
type: string
description: Path to ChromaDB storage.
backupsPath:
type: string
description: Path for backups.
commandFunction:
# A function that produces the CLI command to start the MCP on stdio.
|-
(config) => ({
command: 'python',
args: ['-m', 'mcp_memory_service.server'],
env: {
MCP_MEMORY_CHROMA_PATH: config.chromaDbPath,
MCP_MEMORY_BACKUPS_PATH: config.backupsPath,
PYTHONUNBUFFERED: '1',
PYTORCH_ENABLE_MPS_FALLBACK: '1'
}
})
```
--------------------------------------------------------------------------------
/examples/claude_desktop_config_template.json:
--------------------------------------------------------------------------------
```json
{
"mcpServers": {
"memory": {
"_comment": "Recommended: Use Python module approach (most stable, no path dependencies)",
"command": "python",
"args": [
"-m",
"mcp_memory_service.server"
],
"_alternative_approaches": [
"Option 1 (UV): command='uv', args=['--directory', '${PROJECT_PATH}', 'run', 'memory', 'server']",
"Option 2 (New script path): command='python', args=['${PROJECT_PATH}/scripts/server/run_memory_server.py']",
"Option 3 (Legacy, shows migration notice): command='python', args=['${PROJECT_PATH}/scripts/run_memory_server.py']"
],
"env": {
"MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
"MCP_MEMORY_BACKUPS_PATH": "${USER_DATA_PATH}/mcp-memory/backups",
"PYTORCH_ENABLE_MPS_FALLBACK": "1",
"PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:128"
}
}
}
}
```
--------------------------------------------------------------------------------
/scripts/server/start_http_server.bat:
--------------------------------------------------------------------------------
```
@echo off
REM Start the MCP Memory Service HTTP server in the background on Windows
echo Starting MCP Memory Service HTTP server...
REM Check if server is already running
uv run python scripts\server\check_http_server.py -q
if %errorlevel% == 0 (
echo HTTP server is already running!
uv run python scripts\server\check_http_server.py
exit /b 0
)
REM Start the server in a new window
start "MCP Memory HTTP Server" uv run python scripts\server\run_http_server.py
REM Wait up to 5 seconds for the server to start
FOR /L %%i IN (1,1,5) DO (
timeout /t 1 /nobreak >nul
uv run python scripts\server\check_http_server.py -q
if %errorlevel% == 0 (
echo.
echo [OK] HTTP server started successfully!
uv run python scripts\server\check_http_server.py
goto :eof
)
)
echo.
echo [WARN] Server did not start within 5 seconds. Check the server window for errors.
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/sync_from_remote.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Sync script to pull latest database from remote master
DB_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db"
REMOTE_URL="http://10.0.1.30:8080/mcp-memory"
BACKUP_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db.backup"
echo "$(date): Starting sync from remote master..."
# Create backup of current database
if [ -f "$DB_PATH" ]; then
cp "$DB_PATH" "$BACKUP_PATH"
echo "$(date): Created backup at $BACKUP_PATH"
fi
# Restore from remote
litestream restore -o "$DB_PATH" "$REMOTE_URL"
if [ $? -eq 0 ]; then
echo "$(date): Successfully synced database from remote master"
# Remove backup on success
rm -f "$BACKUP_PATH"
else
echo "$(date): ERROR: Failed to sync from remote master"
# Restore backup on failure
if [ -f "$BACKUP_PATH" ]; then
mv "$BACKUP_PATH" "$DB_PATH"
echo "$(date): Restored backup"
fi
exit 1
fi
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/server/handlers/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Handler modules for MCP Memory Service.
Extracted from monolithic server_impl.py for better maintainability.
Each module contains related handler functions for specific functionality.
"""
from . import memory, consolidation, utility, documents, quality, graph
__all__ = ['memory', 'consolidation', 'utility', 'documents', 'quality', 'graph']
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/discovery/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
mDNS service discovery module for MCP Memory Service.
This module provides mDNS service advertisement and discovery capabilities
for the MCP Memory Service HTTP/HTTPS interface.
"""
from .mdns_service import ServiceAdvertiser, ServiceDiscovery
from .client import DiscoveryClient
__all__ = ['ServiceAdvertiser', 'ServiceDiscovery', 'DiscoveryClient']
```
--------------------------------------------------------------------------------
/scripts/development/uv-lock-merge.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Git merge driver for uv.lock files
# Automatically resolves conflicts and regenerates the lock file
# Arguments from git:
# %O = ancestor's version
# %A = current version
# %B = other version
# %L = conflict marker length
# %P = path to file
ANCESTOR="$1"
CURRENT="$2"
OTHER="$3"
MARKER_LENGTH="$4"
PATH="$5"
echo "Auto-resolving uv.lock conflict by regenerating lock file..."
# Accept the incoming version first (this resolves the conflict)
cp "$OTHER" "$PATH"
# Check if uv is available
if command -v uv >/dev/null 2>&1; then
echo "Running uv sync to regenerate lock file..."
# Regenerate the lock file based on pyproject.toml
uv sync --quiet
if [ $? -eq 0 ]; then
echo "✓ uv.lock regenerated successfully"
exit 0
else
echo "⚠ Warning: uv sync failed, using incoming version"
exit 0
fi
else
echo "⚠ Warning: uv not found, using incoming version"
exit 0
fi
```
--------------------------------------------------------------------------------
/.github/workflows/dockerfile-lint.yml:
--------------------------------------------------------------------------------
```yaml
name: Dockerfile Lint
on:
push:
paths:
- 'tools/docker/Dockerfile*'
- '.github/workflows/dockerfile-lint.yml'
pull_request:
paths:
- 'tools/docker/Dockerfile*'
- '.github/workflows/dockerfile-lint.yml'
jobs:
hadolint:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Lint Dockerfile (main)
uses: hadolint/[email protected]
with:
dockerfile: tools/docker/Dockerfile
failure-threshold: warning
- name: Lint Dockerfile.slim
uses: hadolint/[email protected]
with:
dockerfile: tools/docker/Dockerfile.slim
failure-threshold: warning
unused-args:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Check for unused Docker ARGs
run: bash scripts/ci/check_dockerfile_args.sh
```
--------------------------------------------------------------------------------
/archive/deployment/deploy_fastmcp_fixed.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "🚀 Deploying Fixed FastMCP Server v4.0.0-alpha.1..."
# Stop current service
echo "⏹️ Stopping current service..."
sudo systemctl stop mcp-memory
# Install the fixed FastMCP service configuration
echo "📝 Installing fixed FastMCP service configuration..."
sudo cp /tmp/fastmcp-server-fixed.service /etc/systemd/system/mcp-memory.service
# Reload systemd daemon
echo "🔄 Reloading systemd daemon..."
sudo systemctl daemon-reload
# Start the FastMCP server
echo "▶️ Starting FastMCP server..."
sudo systemctl start mcp-memory
# Wait a moment for startup
sleep 3
# Check status
echo "🔍 Checking service status..."
sudo systemctl status mcp-memory --no-pager
echo ""
echo "📊 Service logs (last 10 lines):"
sudo journalctl -u mcp-memory -n 10 --no-pager
echo ""
echo "✅ FastMCP Server deployment complete!"
echo "🔗 Native MCP Protocol should be available on port 8000"
echo "📋 Monitor logs: sudo journalctl -u mcp-memory -f"
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/storage/migrations/008_add_graph_table.sql:
--------------------------------------------------------------------------------
```sql
-- Migration 008: Add graph-based memory associations table
-- Supports triple storage modes: memories_only (backward compat), dual_write (migration), graph_only (future)
-- Connection types: semantic (0.3-0.7), temporal (<24h), causal (explicit refs), thematic (shared tags)
CREATE TABLE IF NOT EXISTS memory_graph (
source_hash TEXT NOT NULL,
target_hash TEXT NOT NULL,
similarity REAL NOT NULL,
connection_types TEXT NOT NULL, -- JSON array: ["semantic", "temporal", "causal", "thematic"]
metadata TEXT, -- JSON object: {discovery_date, confidence, context}
created_at REAL NOT NULL,
PRIMARY KEY (source_hash, target_hash)
);
-- Optimize bidirectional traversal (A→B and B→A queries)
-- Note: PRIMARY KEY already creates an index on (source_hash, target_hash)
CREATE INDEX IF NOT EXISTS idx_graph_source ON memory_graph(source_hash);
CREATE INDEX IF NOT EXISTS idx_graph_target ON memory_graph(target_hash);
```
--------------------------------------------------------------------------------
/archive/development/test_fastmcp.py:
--------------------------------------------------------------------------------
```python
#!/usr/bin/env python3
"""Simple test of FastMCP server structure for memory service."""
import sys
import os
from pathlib import Path
# Add src to path
sys.path.insert(0, 'src')
from mcp.server.fastmcp import FastMCP
# Create a simple FastMCP server for testing
mcp = FastMCP("Test Memory Service")
@mcp.tool()
def test_store_memory(content: str, tags: list[str] = None) -> dict:
"""Test memory storage function."""
return {
"success": True,
"message": f"Stored: {content}",
"tags": tags or []
}
@mcp.tool()
def test_health() -> dict:
"""Test health check."""
return {
"status": "healthy",
"version": "4.0.0-alpha.1"
}
if __name__ == "__main__":
print("FastMCP Memory Service Test")
print("Server configured with basic tools")
print("Available tools:")
print("- test_store_memory")
print("- test_health")
print("\nTo run server: mcp.run(transport='streamable-http')")
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/sync/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Database synchronization module for MCP Memory Service.
This module provides tools for synchronizing SQLite-vec databases across
multiple machines using JSON export/import and Litestream replication.
"""
from .exporter import MemoryExporter
from .importer import MemoryImporter
from .litestream_config import LitestreamManager
__all__ = ['MemoryExporter', 'MemoryImporter', 'LitestreamManager']
```
--------------------------------------------------------------------------------
/start_http_server.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Load .env file if it exists
if [ -f .env ]; then
set -a
source .env
set +a
fi
export MCP_MEMORY_STORAGE_BACKEND=hybrid
export MCP_MEMORY_SQLITE_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db"
export MCP_HTTP_ENABLED=true
export MCP_OAUTH_ENABLED=false
export CLOUDFLARE_API_TOKEN="Y9qwW1rYkwiE63iWYASxnzfTQlIn-mtwCihRTwZa"
export CLOUDFLARE_ACCOUNT_ID="be0e35a26715043ef8df90253268c33f"
export CLOUDFLARE_D1_DATABASE_ID="f745e9b4-ba8e-4d47-b38f-12af91060d5a"
export CLOUDFLARE_VECTORIZE_INDEX="mcp-memory-index"
cd /Users/hkr/Documents/GitHub/mcp-memory-service
# Detect Python command
if command -v python3 &> /dev/null; then
PYTHON_CMD=python3
elif command -v python &> /dev/null; then
PYTHON_CMD=python
else
echo "Error: Python not found in PATH"
exit 1
fi
# Use MCP_HTTP_PORT environment variable, default to 8000
PORT=${MCP_HTTP_PORT:-8000}
$PYTHON_CMD -m uvicorn mcp_memory_service.web.app:app --host 127.0.0.1 --port $PORT --reload
```
--------------------------------------------------------------------------------
/docs/images/dashboard-placeholder.md:
--------------------------------------------------------------------------------
```markdown
# Dashboard Screenshot Placeholder
This directory will contain screenshots of the MCP Memory Service dashboard.
## v3.3.0 Dashboard Features
The new dashboard includes:
- **Modern Design**: Gradient backgrounds with professional card layout
- **Live Statistics**: Real-time server metrics and memory counts
- **Interactive Endpoints**: Organized API documentation with hover effects
- **Tech Stack Badges**: Visual representation of FastAPI, SQLite-vec, PyTorch, etc.
- **Responsive Layout**: Works on desktop and mobile devices
- **Auto-Refresh**: Stats update every 30 seconds
## Access URLs
- Dashboard: http://localhost:8000
- mDNS: http://mcp-memory-service.local:8000
- API Docs: http://localhost:8000/api/docs
- ReDoc: http://localhost:8000/api/redoc
## Screenshot Instructions
To capture the dashboard:
1. Ensure the HTTP server is running
2. Open browser to http://localhost:8000
3. Wait for stats to load (shows actual memory count)
4. Take full-page screenshot
5. Save as `dashboard-v3.3.0.png` in this directory
```
--------------------------------------------------------------------------------
/tests/unit/test_import.py:
--------------------------------------------------------------------------------
```python
#!/usr/bin/env python3
"""
Test script to verify the memory service can be imported and run.
"""
import sys
import os
# Add the src directory to the Python path
script_dir = os.path.dirname(os.path.abspath(__file__))
src_dir = os.path.join(script_dir, "src")
sys.path.insert(0, src_dir)
try:
from mcp_memory_service.server import main
print("SUCCESS: Successfully imported mcp_memory_service.server.main")
# Test basic configuration
from mcp_memory_service.config import (
SERVER_NAME,
SERVER_VERSION,
STORAGE_BACKEND,
DATABASE_PATH
)
print(f"SUCCESS: Server name: {SERVER_NAME}")
print(f"SUCCESS: Server version: {SERVER_VERSION}")
print(f"SUCCESS: Storage backend: {STORAGE_BACKEND}")
print(f"SUCCESS: Database path: {DATABASE_PATH}")
print("SUCCESS: All imports successful - the memory service is ready to use!")
except ImportError as e:
print(f"ERROR: Import failed: {e}")
sys.exit(1)
except Exception as e:
print(f"ERROR: Error: {e}")
sys.exit(1)
```
--------------------------------------------------------------------------------
/archive/docs-removed-2025-08-23/development/CLEANUP_README.md:
--------------------------------------------------------------------------------
```markdown
# MCP-MEMORY-SERVICE Cleanup and Organization
This branch contains cleanup and reorganization changes for the MCP-MEMORY-SERVICE project.
## Changes Implemented
1. **Code Organization**
- Restructured test files into proper directories
- Organized documentation into a docs/ directory
- Archived old backup files
2. **Documentation Updates**
- Updated CHANGELOG.md with v1.2.0 entries
- Created comprehensive documentation structure
- Added READMEs for each directory
3. **Test Infrastructure**
- Created proper pytest configuration
- Added fixtures for common test scenarios
- Organized tests by type (unit, integration, performance)
## Running the Cleanup Script
To apply these changes, run:
```bash
cd C:\REPOSITORIES\mcp-memory-service
python scripts/cleanup_organize.py
```
## Testing on Different Hardware
After organization is complete, create a hardware testing branch:
```bash
git checkout -b test/hardware-validation
```
The changes have been tracked in the memory system with the tag `memory-driven-development`.
```
--------------------------------------------------------------------------------
/scripts/server/start_http_server.sh:
--------------------------------------------------------------------------------
```bash
#!/usr/bin/env bash
# Start the MCP Memory Service HTTP server in the background on Unix/macOS
set -e
echo "Starting MCP Memory Service HTTP server..."
# Check if server is already running
if uv run python scripts/server/check_http_server.py -q; then
echo "✅ HTTP server is already running!"
uv run python scripts/server/check_http_server.py -v
exit 0
fi
# Start the server in the background
nohup uv run python scripts/server/run_http_server.py > /tmp/mcp-http-server.log 2>&1 &
SERVER_PID=$!
echo "Server started with PID: $SERVER_PID"
echo "Logs available at: /tmp/mcp-http-server.log"
# Wait up to 5 seconds for the server to start
for i in {1..5}; do
if uv run python scripts/server/check_http_server.py -q; then
break
fi
sleep 1
done
# Check if it started successfully
if uv run python scripts/server/check_http_server.py -v; then
echo ""
echo "✅ HTTP server started successfully!"
echo "PID: $SERVER_PID"
else
echo ""
echo "⚠️ Server may still be starting... Check logs at /tmp/mcp-http-server.log"
fi
```
--------------------------------------------------------------------------------
/claude-hooks/simple-test.js:
--------------------------------------------------------------------------------
```javascript
#!/usr/bin/env node
const { AdaptivePatternDetector } = require('./utilities/adaptive-pattern-detector');
async function simpleTest() {
const detector = new AdaptivePatternDetector({ sensitivity: 0.7 });
const testCases = [
{ message: "What did we decide about the authentication approach?", shouldTrigger: true },
{ message: "Remind me how we handled user sessions", shouldTrigger: true },
{ message: "Remember when we discussed the database schema?", shouldTrigger: true },
{ message: "Just implementing a new feature", shouldTrigger: false }
];
for (const testCase of testCases) {
const result = await detector.detectPatterns(testCase.message);
const actualTrigger = result.triggerRecommendation;
console.log(`Message: "${testCase.message}"`);
console.log(`Expected: ${testCase.shouldTrigger}, Actual: ${actualTrigger}`);
console.log(`Confidence: ${result.confidence}`);
console.log(`Matches: ${result.matches.length}`);
console.log('---');
}
}
simpleTest().catch(console.error);
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/sync_from_remote_noconfig.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Sync script to pull latest database from remote master (no config file)
DB_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db"
REMOTE_URL="http://10.0.1.30:8080/mcp-memory"
BACKUP_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db.backup"
echo "$(date): Starting sync from remote master..."
# Create backup of current database
if [ -f "$DB_PATH" ]; then
cp "$DB_PATH" "$BACKUP_PATH"
echo "$(date): Created backup at $BACKUP_PATH"
fi
# Restore from remote (no config file)
litestream restore -o "$DB_PATH" "$REMOTE_URL"
if [ $? -eq 0 ]; then
echo "$(date): Successfully synced database from remote master"
# Remove backup on success
rm -f "$BACKUP_PATH"
# Show database info
echo "$(date): Database size: $(du -h "$DB_PATH" | cut -f1)"
echo "$(date): Database modified: $(stat -f "%Sm" "$DB_PATH")"
else
echo "$(date): ERROR: Failed to sync from remote master"
# Restore backup on failure
if [ -f "$BACKUP_PATH" ]; then
mv "$BACKUP_PATH" "$DB_PATH"
echo "$(date): Restored backup"
fi
exit 1
fi
```
--------------------------------------------------------------------------------
/scripts/development/fix_mdns.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
echo "=== Fixing mDNS Configuration ==="
echo "1. Stopping any conflicting processes..."
# Kill the old process that might be interfering
pkill -f "/home/hkr/repositories/mcp-memory-service/.venv/bin/memory"
echo "2. Stopping systemd service..."
sudo systemctl stop mcp-memory
echo "3. Updating systemd service configuration..."
sudo cp mcp-memory.service /etc/systemd/system/
sudo chmod 644 /etc/systemd/system/mcp-memory.service
echo "4. Reloading systemd daemon..."
sudo systemctl daemon-reload
echo "5. Starting service with new configuration..."
sudo systemctl start mcp-memory
echo "6. Checking service status..."
sudo systemctl status mcp-memory --no-pager -l
echo ""
echo "7. Testing mDNS resolution..."
sleep 3
echo "Checking avahi browse:"
avahi-browse -t _http._tcp | grep memory
echo ""
echo "Testing memory.local resolution:"
avahi-resolve-host-name memory.local
echo ""
echo "Testing HTTPS access:"
curl -k -s https://memory.local:8000/api/health --connect-timeout 5 || echo "HTTPS test failed"
echo ""
echo "=== Fix Complete ==="
echo "If memory.local resolves and HTTPS works, you're all set!"
```
--------------------------------------------------------------------------------
/archive/deployment/deploy_mcp_v4.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Deploy FastAPI MCP Server v4.0.0-alpha.1
echo "🚀 Deploying FastAPI MCP Server v4.0.0-alpha.1..."
# Stop current service
echo "⏹️ Stopping current HTTP API service..."
sudo systemctl stop mcp-memory
# Update systemd service file
echo "📝 Updating systemd service configuration..."
sudo cp /tmp/mcp-memory-v4.service /etc/systemd/system/mcp-memory.service
# Reload systemd daemon
echo "🔄 Reloading systemd daemon..."
sudo systemctl daemon-reload
# Start the new MCP server
echo "▶️ Starting FastAPI MCP server..."
sudo systemctl start mcp-memory
# Check status
echo "🔍 Checking service status..."
sudo systemctl status mcp-memory --no-pager
echo ""
echo "✅ FastAPI MCP Server v4.0.0-alpha.1 deployment complete!"
echo ""
echo "🌐 Service Access:"
echo " - MCP Protocol: Available on port 8000"
echo " - Health Check: curl http://localhost:8000/health"
echo " - Service Logs: sudo journalctl -u mcp-memory -f"
echo ""
echo "🔧 Service Management:"
echo " - Status: sudo systemctl status mcp-memory"
echo " - Stop: sudo systemctl stop mcp-memory"
echo " - Start: sudo systemctl start mcp-memory"
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/storage/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .base import MemoryStorage
# Conditional imports based on available dependencies
__all__ = ['MemoryStorage']
try:
from .sqlite_vec import SqliteVecMemoryStorage
__all__.append('SqliteVecMemoryStorage')
except ImportError:
SqliteVecMemoryStorage = None
try:
from .cloudflare import CloudflareStorage
__all__.append('CloudflareStorage')
except ImportError:
CloudflareStorage = None
try:
from .hybrid import HybridMemoryStorage
__all__.append('HybridMemoryStorage')
except ImportError:
HybridMemoryStorage = None
```
--------------------------------------------------------------------------------
/.metrics/baseline_nesting_install_hooks.txt:
--------------------------------------------------------------------------------
```
BASELINE COMPLEXITY METRICS
====================================================================================================
Function: detect_claude_mcp_configuration
Location: Lines 198-236 (39 total)
Total Statements: 23
Control Flow:
- If statements: 3
- For loops: 1
- While loops: 0
- Try/Except blocks: 4
- With statements: 0
- Total branches: 8
Boolean Operators: 4
Maximum Nesting Depth: 5
Estimated Cyclomatic Complexity: 13
Function: _parse_mcp_get_output
Location: Lines 238-268 (31 total)
Total Statements: 24
Control Flow:
- If statements: 7
- For loops: 1
- While loops: 0
- Try/Except blocks: 2
- With statements: 0
- Total branches: 10
Boolean Operators: 5
Maximum Nesting Depth: 9
Estimated Cyclomatic Complexity: 16
Function: validate_mcp_prerequisites
Location: Lines 351-385 (35 total)
Total Statements: 23
Control Flow:
- If statements: 8
- For loops: 0
- While loops: 0
- Try/Except blocks: 0
- With statements: 0
- Total branches: 8
Boolean Operators: 12
Maximum Nesting Depth: 4
Estimated Cyclomatic Complexity: 21
```
--------------------------------------------------------------------------------
/scripts/backup/export_distributable_memories.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Export distributable reference memories for sharing across local network
# Usage: ./export_distributable_memories.sh [output_file]
OUTPUT_FILE="${1:-mcp_reference_memories_$(date +%Y%m%d).json}"
MCP_ENDPOINT="https://10.0.1.30:8443/mcp"
API_KEY="test-key-123"
echo "Exporting distributable reference memories..."
echo "Output file: $OUTPUT_FILE"
curl -k -s -X POST "$MCP_ENDPOINT" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $API_KEY" \
-d '{
"jsonrpc": "2.0",
"id": 1,
"method": "tools/call",
"params": {
"name": "search_by_tag",
"arguments": {
"tags": ["distributable-reference"]
}
}
}' | jq -r '.result.content[0].text' > "$OUTPUT_FILE"
if [ $? -eq 0 ]; then
echo "✅ Export completed: $OUTPUT_FILE"
echo "📊 Memory count: $(cat "$OUTPUT_FILE" | jq '. | length' 2>/dev/null || echo "Unknown")"
echo ""
echo "To import to another MCP Memory Service:"
echo "1. Copy $OUTPUT_FILE to target machine"
echo "2. Use store_memory calls for each entry"
echo "3. Update CLAUDE.md with new memory hashes"
else
echo "❌ Export failed"
exit 1
fi
```
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
```yaml
name: Release (Manual)
on:
workflow_dispatch:
jobs:
release:
runs-on: ubuntu-latest
concurrency: release
permissions:
id-token: write
contents: write
actions: write
pull-requests: write
repository-projects: write
steps:
- uses: actions/checkout@v3 # would probably be better to use v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9' # this setup python action uses a separate version than the python-semantic-release, thats why we had the error
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install build hatchling python-semantic-release
- name: Verify build module installation
run: python -m pip show build
- name: Build package
run: python -m build
- name: Python Semantic Release
uses: python-semantic-release/[email protected]
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
verbosity: 2
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
```
--------------------------------------------------------------------------------
/COMMIT_MESSAGE.md:
--------------------------------------------------------------------------------
```markdown
fix: Support flexible MCP memory server naming conventions
The hook installer was hardcoded to check for a memory server named
exactly 'memory', but Claude Code allows users to configure MCP servers
with any name they choose. This caused false "standalone" detection even
when a memory MCP server was properly configured and connected.
Changes:
- Check multiple common memory server names (memory-service, memory,
mcp-memory-service, extended-memory)
- Fallback to 'claude mcp list' grep detection for any memory-related
server
- Support HTTP MCP server format (URL field instead of Command field)
- Update validation to accept http type and URL format
- Maintain backward compatibility with original 'memory' name
Fixes installation failures for users who configured their memory MCP
servers with descriptive names like 'memory-service' (common for HTTP
servers) or 'extended-memory' (older installations).
Testing:
- Verified with HTTP MCP server named 'memory-service'
- Confirmed backward compatibility with 'memory' name
- Tested fallback detection mechanism
- All test cases documented in TESTING_NOTES.md
Co-Authored-By: Claude Sonnet 4.5 <[email protected]>
```
--------------------------------------------------------------------------------
/scripts/installation/setup_backup_cron.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Setup automated backups for MCP Memory Service
# Creates cron jobs for regular SQLite-vec database backups
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BACKUP_SCRIPT="$SCRIPT_DIR/backup_sqlite_vec.sh"
# Check if backup script exists
if [[ ! -f "$BACKUP_SCRIPT" ]]; then
echo "Error: Backup script not found at $BACKUP_SCRIPT"
exit 1
fi
# Make sure backup script is executable
chmod +x "$BACKUP_SCRIPT"
# Create cron job entry
CRON_ENTRY="0 2 * * * $BACKUP_SCRIPT > /tmp/mcp-backup.log 2>&1"
# Check if cron job already exists
if crontab -l 2>/dev/null | grep -q "$BACKUP_SCRIPT"; then
echo "Backup cron job already exists. Current crontab:"
crontab -l | grep "$BACKUP_SCRIPT"
else
# Add cron job
(crontab -l 2>/dev/null || true; echo "$CRON_ENTRY") | crontab -
echo "Added daily backup cron job:"
echo "$CRON_ENTRY"
fi
echo ""
echo "Backup automation setup complete!"
echo "- Daily backups at 2:00 AM"
echo "- Backup script: $BACKUP_SCRIPT"
echo "- Log file: /tmp/mcp-backup.log"
echo ""
echo "To check cron jobs: crontab -l"
echo "To remove cron job: crontab -l | grep -v backup_sqlite_vec.sh | crontab -"
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/setup_local_litestream.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Setup script for Litestream replica on local macOS machine
set -e
echo "🔧 Setting up Litestream replica on local macOS..."
# Copy configuration to system location
echo "⚙️ Installing Litestream configuration..."
sudo mkdir -p /usr/local/etc
sudo cp litestream_replica_config.yml /usr/local/etc/litestream.yml
# Create log directory
sudo mkdir -p /var/log
sudo touch /var/log/litestream.log
sudo chmod 644 /var/log/litestream.log
# Install LaunchDaemon
echo "🚀 Installing LaunchDaemon..."
sudo cp deployment/io.litestream.replication.plist /Library/LaunchDaemons/
# Set permissions
sudo chown root:wheel /Library/LaunchDaemons/io.litestream.replication.plist
sudo chmod 644 /Library/LaunchDaemons/io.litestream.replication.plist
echo "✅ Local Litestream setup completed!"
echo ""
echo "Next steps:"
echo "1. Load service: sudo launchctl load /Library/LaunchDaemons/io.litestream.replication.plist"
echo "2. Start service: sudo launchctl start io.litestream.replication"
echo "3. Check status: litestream replicas -config /usr/local/etc/litestream.yml"
echo ""
echo "⚠️ Before starting the replica service, make sure the master is running on narrowbox.local"
```
--------------------------------------------------------------------------------
/docs/technical/tag-storage.md:
--------------------------------------------------------------------------------
```markdown
# Tag Storage Procedure
## File Structure Overview
```
mcp_memory_service/
├── tests/
│ └── test_tag_storage.py # Integration tests
├── scripts/
│ ├── validate_memories.py # Validation script
│ └── migrate_tags.py # Migration script
```
## Execution Steps
1. **Run Initial Validation**
```bash
python scripts/validate_memories.py
```
- Generates validation report of current state
2. **Run Integration Tests**
```bash
python tests/test_tag_storage.py
```
- Verifies functionality
3. **Execute Migration**
```bash
python scripts/migrate_tags.py
```
The script will:
- Create a backup automatically
- Run validation check
- Ask for confirmation before proceeding
- Perform migration
- Verify the migration
4. **Post-Migration Validation**
```bash
python scripts/validate_memories.py
```
- Confirms successful migration
## Monitoring Requirements
- Keep backup files for at least 7 days
- Monitor logs for any tag-related errors
- Run validation script daily for the first week
- Check search functionality with various tag formats
## Rollback Process
If issues are detected, use:
```bash
python scripts/migrate_tags.py --rollback
```
```
--------------------------------------------------------------------------------
/scripts/maintenance/check_memory_types.py:
--------------------------------------------------------------------------------
```python
#!/usr/bin/env python3
"""Quick script to check memory types in local database."""
import sqlite3
from pathlib import Path
# Windows database path
db_path = Path.home() / "AppData/Local/mcp-memory/sqlite_vec.db"
if not db_path.exists():
print(f"❌ Database not found at: {db_path}")
exit(1)
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# Get memory type distribution
cursor.execute("""
SELECT memory_type, COUNT(*) as count
FROM memories
GROUP BY memory_type
ORDER BY count DESC
""")
results = cursor.fetchall()
total = sum(count for _, count in results)
print(f"\nMemory Type Distribution")
print("=" * 60)
print(f"Total memories: {total:,}")
print(f"Unique types: {len(results)}\n")
print(f"{'Memory Type':<40} {'Count':>8} {'%':>6}")
print("-" * 60)
for memory_type, count in results[:30]: # Show top 30
pct = (count / total) * 100 if total > 0 else 0
type_display = memory_type if memory_type else "(empty/NULL)"
print(f"{type_display:<40} {count:>8,} {pct:>5.1f}%")
if len(results) > 30:
remaining = len(results) - 30
remaining_count = sum(count for _, count in results[30:])
print(f"\n... and {remaining} more types ({remaining_count:,} memories)")
conn.close()
```
--------------------------------------------------------------------------------
/scripts/utils/list-collections.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from chromadb import HttpClient
def list_collections():
try:
# Connect to local ChromaDB
client = HttpClient(host='localhost', port=8000)
# List all collections
collections = client.list_collections()
print("\nFound Collections:")
print("------------------")
for collection in collections:
print(f"Name: {collection.name}")
print(f"Metadata: {collection.metadata}")
print(f"Count: {collection.count()}")
print("------------------")
except Exception as e:
print(f"Error connecting to local ChromaDB: {str(e)}")
if __name__ == "__main__":
list_collections()
```
--------------------------------------------------------------------------------
/tests/unit/conftest.py:
--------------------------------------------------------------------------------
```python
"""
Shared test fixtures and helpers for unit tests.
"""
import tempfile
from pathlib import Path
from typing import List, Any, Optional
async def extract_chunks_from_temp_file(
loader: Any,
filename: str,
content: str,
encoding: str = 'utf-8',
**extract_kwargs
) -> List[Any]:
"""
Helper to extract chunks from a temporary file.
Args:
loader: Loader instance (CSVLoader, JSONLoader, etc.)
filename: Name of the temporary file to create
content: Content to write to the file
encoding: File encoding (default: utf-8)
**extract_kwargs: Additional keyword arguments to pass to extract_chunks()
Returns:
List of extracted chunks
Example:
>>> loader = CSVLoader(chunk_size=1000, chunk_overlap=200)
>>> chunks = await extract_chunks_from_temp_file(
... loader,
... "test.csv",
... "name,age\\nJohn,25",
... delimiter=','
... )
"""
with tempfile.TemporaryDirectory() as tmpdir:
file_path = Path(tmpdir) / filename
file_path.write_text(content, encoding=encoding)
chunks = []
async for chunk in loader.extract_chunks(file_path, **extract_kwargs):
chunks.append(chunk)
return chunks
```
--------------------------------------------------------------------------------
/test_version_checker.js:
--------------------------------------------------------------------------------
```javascript
#!/usr/bin/env node
/**
* Test script for version-checker.js utility
*/
const { getVersionInfo, formatVersionDisplay } = require('./claude-hooks/utilities/version-checker');
const CONSOLE_COLORS = {
RESET: '\x1b[0m',
BRIGHT: '\x1b[1m',
DIM: '\x1b[2m',
CYAN: '\x1b[36m',
GREEN: '\x1b[32m',
YELLOW: '\x1b[33m',
GRAY: '\x1b[90m',
RED: '\x1b[31m'
};
async function test() {
console.log('Testing version-checker utility...\n');
const projectRoot = __dirname;
// Test with PyPI check
console.log('1. Testing with PyPI check enabled:');
const versionInfo = await getVersionInfo(projectRoot, { checkPyPI: true, timeout: 3000 });
console.log(' Raw version info:', JSON.stringify(versionInfo, null, 2));
const display = formatVersionDisplay(versionInfo, CONSOLE_COLORS);
console.log(' Formatted:', display);
console.log('\n2. Testing without PyPI check:');
const localOnly = await getVersionInfo(projectRoot, { checkPyPI: false });
console.log(' Raw version info:', JSON.stringify(localOnly, null, 2));
const localDisplay = formatVersionDisplay(localOnly, CONSOLE_COLORS);
console.log(' Formatted:', localDisplay);
console.log('\n✅ Test completed!');
}
test().catch(error => {
console.error('❌ Test failed:', error);
process.exit(1);
});
```
--------------------------------------------------------------------------------
/docs/deployment/production-guide.md:
--------------------------------------------------------------------------------
```markdown
# MCP Memory Service - Production Setup
## 🚀 Quick Start
This MCP Memory Service is configured with **consolidation system**, **mDNS auto-discovery**, **HTTPS**, and **automatic startup**.
### **Installation**
```bash
# 1. Install the service
bash install_service.sh
# 2. Update configuration (if needed)
./update_service.sh
# 3. Start the service
sudo systemctl start mcp-memory
```
### **Verification**
```bash
# Check service status
sudo systemctl status mcp-memory
# Test API health
curl -k https://localhost:8000/api/health
# Verify mDNS discovery
avahi-browse -t _mcp-memory._tcp
```
## 📋 **Service Details**
- **Service Name**: `memory._mcp-memory._tcp.local.`
- **HTTPS Address**: https://localhost:8000
- **API Key**: `mcp-0b1ccbde2197a08dcb12d41af4044be6`
- **Auto-Startup**: ✅ Enabled
- **Consolidation**: ✅ Active
- **mDNS Discovery**: ✅ Working
## 🛠️ **Management**
```bash
./service_control.sh start # Start service
./service_control.sh stop # Stop service
./service_control.sh status # Show status
./service_control.sh logs # View logs
./service_control.sh health # Test API
```
## 📖 **Documentation**
- **Complete Guide**: `COMPLETE_SETUP_GUIDE.md`
- **Service Files**: `mcp-memory.service`, management scripts
- **Archive**: `archive/setup-development/` (development files)
**✅ Ready for production use!**
```
--------------------------------------------------------------------------------
/claude-hooks/statusline.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Claude Code Status Line Script
# Displays session memory context in status line
# Format: 🧠 8 (5 recent) memories | 📊 12 commits
# Path to session cache file
CACHE_FILE="$HOME/.claude/hooks/utilities/session-cache.json"
# ANSI color codes for styling
CYAN='\033[36m'
GREEN='\033[32m'
GRAY='\033[90m'
RESET='\033[0m'
# Check if cache file exists
if [ ! -f "$CACHE_FILE" ]; then
# No cache file - session not started yet or hook failed
echo ""
exit 0
fi
# Read cache file and extract data
MEMORIES=$(jq -r '.memoriesLoaded // 0' "$CACHE_FILE" 2>/dev/null)
RECENT=$(jq -r '.recentCount // 0' "$CACHE_FILE" 2>/dev/null)
GIT_COMMITS=$(jq -r '.gitCommits // 0' "$CACHE_FILE" 2>/dev/null)
# Handle jq errors
if [ $? -ne 0 ]; then
echo ""
exit 0
fi
# Build status line output
STATUS=""
# Memory section
if [ "$MEMORIES" -gt 0 ]; then
if [ "$RECENT" -gt 0 ]; then
STATUS="${CYAN}🧠 ${MEMORIES}${RESET} ${GREEN}(${RECENT} recent)${RESET} memories"
else
STATUS="${CYAN}🧠 ${MEMORIES}${RESET} memories"
fi
fi
# Git section
if [ "$GIT_COMMITS" -gt 0 ]; then
if [ -n "$STATUS" ]; then
STATUS="${STATUS} ${GRAY}|${RESET} ${CYAN}📊 ${GIT_COMMITS} commits${RESET}"
else
STATUS="${CYAN}📊 ${GIT_COMMITS} commits${RESET}"
fi
fi
# Output first line becomes status line
echo -e "$STATUS"
```
--------------------------------------------------------------------------------
/.claude/directives/development-setup.md:
--------------------------------------------------------------------------------
```markdown
# Development Setup - Critical Guidelines
## Editable Install (MANDATORY)
**⚠️ ALWAYS use editable install** to avoid stale package issues:
```bash
# REQUIRED for development
pip install -e . # or: uv pip install -e .
# Verify
pip show mcp-memory-service | grep Location
# Should show: .../mcp-memory-service/src
# NOT: .../site-packages
```
**Why:** MCP servers load from `site-packages`, not source files. Without `-e`, source changes won't be reflected until reinstall.
**Common symptom**: Code shows v8.23.0 but server reports v8.5.3
## Development Workflow
1. Clone repo: `git clone https://github.com/doobidoo/mcp-memory-service.git`
2. Create venv: `python -m venv venv && source venv/bin/activate`
3. **Editable install**: `pip install -e .` ← CRITICAL STEP
4. Verify: `python -c "import mcp_memory_service; print(mcp_memory_service.__version__)"`
5. Start coding - changes take effect after server restart (no reinstall needed)
## Version Mismatch Detection
```bash
# Quick check script
python scripts/validation/check_dev_setup.py
# Manual verification (both should match)
grep '__version__' src/mcp_memory_service/__init__.py
python -c "import mcp_memory_service; print(mcp_memory_service.__version__)"
```
## Fix Stale Installation
```bash
pip uninstall mcp-memory-service
pip install -e .
# Restart MCP servers in Claude Code
# Run: /mcp
```
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/web/oauth/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
OAuth 2.1 Dynamic Client Registration implementation for MCP Memory Service.
Provides OAuth 2.1 DCR endpoints to enable Claude Code HTTP transport integration.
This module implements:
- RFC 8414: OAuth 2.0 Authorization Server Metadata
- RFC 7591: OAuth 2.0 Dynamic Client Registration Protocol
- OAuth 2.1 security requirements and best practices
Key features:
- Dynamic client registration for automated OAuth client setup
- JWT-based access tokens with proper validation
- Authorization code flow with PKCE support
- Client credentials flow for server-to-server authentication
- Comprehensive scope-based authorization
- Backward compatibility with existing API key authentication
"""
__all__ = [
"discovery",
"models",
"registration",
"authorization",
"middleware",
"storage"
]
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/setup_remote_litestream.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Setup script for Litestream master on remote server (narrowbox.local)
set -e
echo "🔧 Setting up Litestream master on remote server..."
# Install Litestream
echo "📦 Installing Litestream..."
curl -LsS https://github.com/benbjohnson/litestream/releases/latest/download/litestream-linux-amd64.tar.gz | tar -xzf -
sudo mv litestream /usr/local/bin/
sudo chmod +x /usr/local/bin/litestream
# Create directories
echo "📁 Creating directories..."
sudo mkdir -p /var/www/litestream/mcp-memory
sudo mkdir -p /backup/litestream/mcp-memory
# Set permissions
sudo chown -R www-data:www-data /var/www/litestream
sudo chmod -R 755 /var/www/litestream
# Copy configuration
echo "⚙️ Installing Litestream configuration..."
sudo cp litestream_master_config.yml /etc/litestream.yml
# Install systemd services
echo "🚀 Installing systemd services..."
sudo cp litestream.service /etc/systemd/system/
sudo cp litestream-http.service /etc/systemd/system/
# Reload systemd and enable services
sudo systemctl daemon-reload
sudo systemctl enable litestream.service
sudo systemctl enable litestream-http.service
echo "✅ Remote Litestream setup completed!"
echo ""
echo "Next steps:"
echo "1. Start services: sudo systemctl start litestream litestream-http"
echo "2. Check status: sudo systemctl status litestream litestream-http"
echo "3. Verify HTTP endpoint: curl http://localhost:8080/mcp-memory/"
```
--------------------------------------------------------------------------------
/tools/docker/docker-compose.yml:
--------------------------------------------------------------------------------
```yaml
version: '3.8'
# Docker Compose configuration for MCP protocol mode
# For use with MCP clients (Claude Desktop, VS Code extension, etc.)
# For HTTP/API mode, use docker-compose.http.yml instead
services:
mcp-memory-service:
build:
context: ../..
dockerfile: tools/docker/Dockerfile
# Required for MCP protocol communication
stdin_open: true
tty: true
volumes:
# Single data directory for all storage
- ./data:/app/data
# Model cache (prevents re-downloading models on each restart)
# Uncomment the following line to persist Hugging Face models
# - ${HOME}/.cache/huggingface:/root/.cache/huggingface
environment:
# Mode selection
- MCP_MODE=mcp
# Storage configuration
- MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
- MCP_MEMORY_SQLITE_PATH=/app/data/sqlite_vec.db
- MCP_MEMORY_BACKUPS_PATH=/app/data/backups
# Performance tuning
- LOG_LEVEL=${LOG_LEVEL:-INFO}
- MAX_RESULTS_PER_QUERY=10
- SIMILARITY_THRESHOLD=0.7
# Python configuration
- PYTHONUNBUFFERED=1
- PYTHONPATH=/app/src
# Offline mode (uncomment if models are pre-cached and network is restricted)
# - HF_HUB_OFFLINE=1
# - TRANSFORMERS_OFFLINE=1
# Use the unified entrypoint
entrypoint: ["/usr/local/bin/docker-entrypoint-unified.sh"]
restart: unless-stopped
```
--------------------------------------------------------------------------------
/scripts/testing/test-connection.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from chromadb import HttpClient
def test_connection(port=8000):
try:
# Try to connect to local ChromaDB
client = HttpClient(host='localhost', port=port)
# Try a simple operation
heartbeat = client.heartbeat()
print(f"Successfully connected to ChromaDB on port {port}")
print(f"Heartbeat: {heartbeat}")
# List collections
collections = client.list_collections()
print("\nFound collections:")
for collection in collections:
print(f"- {collection.name} (count: {collection.count()})")
except Exception as e:
print(f"Error connecting to ChromaDB on port {port}: {str(e)}")
if __name__ == "__main__":
# Try default port
test_connection()
# If the above fails, you might want to try other common ports:
# test_connection(8080)
# test_connection(9000)
```
--------------------------------------------------------------------------------
/docs/ROADMAP.md:
--------------------------------------------------------------------------------
```markdown
# Development Roadmap
**The official roadmap has moved to the Wiki for easier maintenance and community collaboration.**
📖 **[View Development Roadmap on Wiki](https://github.com/doobidoo/mcp-memory-service/wiki/13-Development-Roadmap)**
The Wiki version includes:
- ✅ Completed milestones (v8.0-v8.38)
- 🎯 Current focus (v8.39-v9.0 - Q1 2026)
- 🚀 Future enhancements (Q2 2026+)
- 🎯 Medium term vision (Q3-Q4 2026)
- 🌟 Long-term aspirations (2027+)
- 📊 Success metrics and KPIs
- 🤝 Community contribution opportunities
## Why the Wiki?
The Wiki provides several advantages for roadmap documentation:
- ✅ **Easier Updates**: No PR required for roadmap changes
- ✅ **Better Navigation**: Integrated with other wiki guides
- ✅ **Community Collaboration**: Lower barrier for community input
- ✅ **Rich Formatting**: Enhanced markdown features
- ✅ **Cleaner Repository**: Reduces noise in commit history
## For Active Development Tracking
The roadmap on the Wiki tracks strategic direction. For day-to-day development:
- **[GitHub Projects](https://github.com/doobidoo/mcp-memory-service/projects)** - Sprint planning and task boards
- **[Open Issues](https://github.com/doobidoo/mcp-memory-service/issues)** - Bug reports and feature requests
- **[Pull Requests](https://github.com/doobidoo/mcp-memory-service/pulls)** - Active code changes
- **[CHANGELOG.md](../CHANGELOG.md)** - Release history and completed features
---
**Maintainer**: @doobidoo
**Last Updated**: November 26, 2025
```
--------------------------------------------------------------------------------
/scripts/ci/check_dockerfile_args.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Check for unused ARG declarations in Dockerfiles
# Prevents issues like #313 where unused ARGs caused confusion
set -e
DOCKERFILES=(
"tools/docker/Dockerfile"
"tools/docker/Dockerfile.slim"
)
EXIT_CODE=0
echo "🔍 Checking for unused Docker ARGs..."
echo ""
for dockerfile in "${DOCKERFILES[@]}"; do
if [[ ! -f "$dockerfile" ]]; then
echo "⚠️ Skipping $dockerfile (not found)"
continue
fi
echo "📄 Checking $dockerfile"
# Extract ARG names (excluding built-in TARGETPLATFORM, BUILDPLATFORM, etc.)
args=$(grep -oP '(?<=^ARG )\w+' "$dockerfile" 2>/dev/null || true)
for arg in $args; do
# Skip built-in Docker ARGs that are auto-populated
case "$arg" in
TARGETPLATFORM|BUILDPLATFORM|TARGETOS|TARGETARCH|TARGETVARIANT)
continue
;;
esac
# Check if ARG is used anywhere (as $ARG or ${ARG} or ${ARG:-default})
if ! grep -qE "(\\\$$arg|\\$\\{$arg[}:])" "$dockerfile"; then
echo " ❌ Unused ARG: $arg"
EXIT_CODE=1
else
echo " ✅ Used ARG: $arg"
fi
done
echo ""
done
if [[ $EXIT_CODE -eq 0 ]]; then
echo "✅ All Docker ARGs are used correctly"
else
echo "❌ Found unused Docker ARGs - please remove them or use them"
echo ""
echo "Note: Unused ARGs can cause confusion and build issues."
echo "See Issue #313 for an example where unused PLATFORM arg"
echo "caused Apple Silicon builds to fail."
fi
exit $EXIT_CODE
```
--------------------------------------------------------------------------------
/scripts/installation/setup_claude_mcp.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Setup script for Claude Code MCP configuration
echo "🔧 Setting up MCP Memory Service for Claude Code..."
echo "=================================================="
# Get the absolute path to the repository
REPO_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
VENV_PYTHON="$REPO_PATH/venv/bin/python"
echo "Repository path: $REPO_PATH"
echo "Python path: $VENV_PYTHON"
# Check if virtual environment exists
if [ ! -f "$VENV_PYTHON" ]; then
echo "❌ Virtual environment not found at: $VENV_PYTHON"
echo "Please run: python -m venv venv && source venv/bin/activate && pip install -r requirements.txt"
exit 1
fi
# Create MCP configuration
cat > "$REPO_PATH/mcp_server_config.json" << EOF
{
"mcpServers": {
"memory": {
"command": "$VENV_PYTHON",
"args": ["-m", "src.mcp_memory_service.server"],
"cwd": "$REPO_PATH",
"env": {
"MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
"PYTHONPATH": "$REPO_PATH/src"
}
}
}
}
EOF
echo "✅ Created MCP configuration: $REPO_PATH/mcp_server_config.json"
echo ""
echo "📋 Manual Configuration Steps:"
echo "1. Copy the configuration below"
echo "2. Add it to your Claude Code MCP settings"
echo ""
echo "Configuration to add:"
echo "====================="
cat "$REPO_PATH/mcp_server_config.json"
echo ""
echo "🚀 Alternative: Start server manually and use Claude Code normally"
echo " cd $REPO_PATH"
echo " source venv/bin/activate"
echo " export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec"
echo " python -m src.mcp_memory_service.server"
```
--------------------------------------------------------------------------------
/.claude/directives/version-management.md:
--------------------------------------------------------------------------------
```markdown
# Version Management - Release Workflow
## ⚠️ CRITICAL: Always Use github-release-manager Agent
**NEVER do manual releases** (major, minor, patch, or hotfixes). Manual workflows miss steps and are error-prone.
## Four-File Version Bump Procedure
1. Update `src/mcp_memory_service/__init__.py` (line 50: `__version__ = "X.Y.Z"`)
2. Update `pyproject.toml` (line 7: `version = "X.Y.Z"`)
3. Update `README.md` (line 19: Latest Release section)
4. Run `uv lock` to update dependency lock file
5. Commit all four files together
## Release Workflow
```bash
# ALWAYS use the agent
@agent github-release-manager "Check if we need a release"
@agent github-release-manager "Create release for v8.20.0"
```
**Agent ensures:**
- README.md updates
- GitHub Release creation
- Proper issue tracking
- CHANGELOG.md formatting
- Workflow verification (Docker Publish, HTTP-MCP Bridge)
## Hotfix Workflow (Critical Bugs)
- **Speed target**: 8-10 minutes from bug report to release (achievable with AI assistance)
- **Process**: Fix → Test → Four-file bump → Commit → github-release-manager agent
- **Issue management**: Post detailed root cause analysis, don't close until user confirms fix works
- **Example**: v8.20.1 (8 minutes: bug report → fix → release → user notification)
## Why Agent-First?
**Manual v8.20.1** (❌):
- Forgot README.md update
- Incomplete GitHub Release
- Missed workflow verification
**With agent v8.20.1** (✅):
- All files updated
- Proper release created
- Complete documentation
**Lesson**: Always use agents, even for "simple" hotfixes
```
--------------------------------------------------------------------------------
/scripts/quality/rescore_deberta.py:
--------------------------------------------------------------------------------
```python
#!/usr/bin/env python3
"""Re-score all DeBERTa memories with corrected model."""
import asyncio
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
# Use SQLite directly to avoid Cloudflare network timeouts
from mcp_memory_service.storage.sqlite_vec import SqliteVecMemoryStorage
from mcp_memory_service.config import SQLITE_VEC_PATH
from mcp_memory_service.quality.onnx_ranker import get_onnx_ranker_model
async def rescore():
print("Loading DeBERTa...")
deberta = get_onnx_ranker_model('nvidia-quality-classifier-deberta', 'auto')
print("Connecting to storage (SQLite-vec only, no network)...")
storage = SqliteVecMemoryStorage(SQLITE_VEC_PATH)
await storage.initialize()
print("Fetching memories...")
all_memories = await storage.get_all_memories()
to_rescore = [m for m in all_memories
if m.metadata and m.metadata.get('quality_provider') == 'onnx_deberta']
print(f"Re-scoring {len(to_rescore)} memories...")
for i, m in enumerate(to_rescore, 1):
new_score = deberta.score_quality("", m.content)
await storage.update_memory_metadata(
content_hash=m.content_hash,
updates={'quality_score': new_score}
)
if i % 100 == 0:
print(f" [{i:5d}/{len(to_rescore)}] Score: {new_score:.3f}")
print(f"\n✓ Re-scored {len(to_rescore)} memories")
print("Note: Changes saved to SQLite. Hybrid backend will sync to Cloudflare automatically.")
if __name__ == "__main__":
asyncio.run(rescore())
```
--------------------------------------------------------------------------------
/scripts/run_memory_server.py:
--------------------------------------------------------------------------------
```python
#!/usr/bin/env python3
"""
Backward compatibility redirect to new location (v6.17.0+).
This stub ensures existing Claude Desktop configurations continue working
after the v6.17.0 script reorganization. The actual script has moved to
scripts/server/run_memory_server.py.
For best stability, consider using one of these approaches instead:
1. python -m mcp_memory_service.server (recommended)
2. uv run memory server
3. scripts/server/run_memory_server.py (direct path)
"""
import sys
import os
# Add informational notice (not a warning to avoid alarming users)
print("[INFO] Note: scripts/run_memory_server.py has moved to scripts/server/run_memory_server.py", file=sys.stderr)
print("[INFO] Consider using 'python -m mcp_memory_service.server' for better stability", file=sys.stderr)
print("[INFO] See https://github.com/doobidoo/mcp-memory-service for migration guide", file=sys.stderr)
# Execute the relocated script
script_dir = os.path.dirname(os.path.abspath(__file__))
new_script = os.path.join(script_dir, "server", "run_memory_server.py")
if os.path.exists(new_script):
# Preserve the original __file__ context for the new script
global_vars = {
'__file__': new_script,
'__name__': '__main__',
'sys': sys,
'os': os
}
with open(new_script, 'r', encoding='utf-8') as f:
exec(compile(f.read(), new_script, 'exec'), global_vars)
else:
print(f"[ERROR] Could not find {new_script}", file=sys.stderr)
print("[ERROR] Please ensure you have the complete mcp-memory-service repository", file=sys.stderr)
sys.exit(1)
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/ingestion/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Document Ingestion Module
Provides functionality to side-load documents into the memory database,
supporting multiple formats including PDF, text, and structured data.
This module enables users to pre-populate the vector database with
documentation, knowledge bases, and other content for semantic retrieval.
"""
from .base import DocumentLoader, DocumentChunk, IngestionResult
from .chunker import TextChunker
from .registry import get_loader_for_file, register_loader, SUPPORTED_FORMATS, is_supported_file
# Import loaders to trigger registration
# Order matters! Import SemtoolsLoader first, then specialized loaders
# This allows specialized loaders to override if semtools is unavailable
from . import text_loader
from . import semtools_loader
from . import pdf_loader
from . import json_loader
from . import csv_loader
__all__ = [
'DocumentLoader',
'DocumentChunk',
'IngestionResult',
'TextChunker',
'get_loader_for_file',
'register_loader',
'SUPPORTED_FORMATS',
'is_supported_file'
]
```
--------------------------------------------------------------------------------
/scripts/run/start_sqlite_vec.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Quick start script for MCP Memory Service with SQLite-vec backend
echo "🚀 Starting MCP Memory Service with SQLite-vec backend..."
echo "=================================================="
# Check if virtual environment exists
if [ ! -d "venv" ]; then
echo "❌ Virtual environment not found. Please run setup first."
exit 1
fi
# Activate virtual environment
echo "📦 Activating virtual environment..."
source venv/bin/activate
# Set SQLite-vec backend
echo "🔧 Configuring SQLite-vec backend..."
export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
# Display configuration
echo "✅ Configuration:"
echo " Backend: $MCP_MEMORY_STORAGE_BACKEND"
echo " Database: ~/.local/share/mcp-memory/sqlite_vec.db"
echo " Python: $(which python)"
# Check key dependencies
echo ""
echo "🧪 Checking dependencies..."
python -c "
import sqlite_vec
import sentence_transformers
import mcp
print(' ✅ sqlite-vec available')
print(' ✅ sentence-transformers available')
print(' ✅ mcp available')
"
echo ""
echo "🎯 Ready! The MCP Memory Service is configured for sqlite-vec."
echo ""
echo "To start the server:"
echo " python -m src.mcp_memory_service.server"
echo ""
echo "🧪 Testing server startup..."
timeout 3 python -m src.mcp_memory_service.server 2>/dev/null || echo "✅ Server can start successfully!"
echo ""
echo "For Claude Code integration:"
echo " - The service will automatically use sqlite-vec"
echo " - Memory database: ~/.local/share/mcp-memory/sqlite_vec.db"
echo " - 75% less memory usage vs ChromaDB"
echo ""
echo "To test the setup:"
echo " python simple_sqlite_vec_test.py"
```
--------------------------------------------------------------------------------
/claude-hooks/debug-pattern-test.js:
--------------------------------------------------------------------------------
```javascript
#!/usr/bin/env node
/**
* Debug Pattern Detection
*/
const { AdaptivePatternDetector } = require('./utilities/adaptive-pattern-detector');
async function debugPatternDetection() {
console.log('🔍 Debugging Pattern Detection');
console.log('═'.repeat(50));
const detector = new AdaptivePatternDetector({ sensitivity: 0.7 });
const testMessage = "What did we decide about the authentication approach?";
console.log(`\nTesting message: "${testMessage}"`);
const result = await detector.detectPatterns(testMessage);
console.log('\nResults:');
console.log('- Matches found:', result.matches.length);
console.log('- Confidence:', result.confidence);
console.log('- Processing tier:', result.processingTier);
console.log('- Trigger recommendation:', result.triggerRecommendation);
if (result.matches.length > 0) {
console.log('\nMatches:');
result.matches.forEach((match, i) => {
console.log(` ${i + 1}. Category: ${match.category}`);
console.log(` Pattern: ${match.pattern}`);
console.log(` Confidence: ${match.confidence}`);
console.log(` Type: ${match.type}`);
});
}
// Test the instant patterns directly
console.log('\n🔍 Testing Instant Patterns Directly');
const instantMatches = detector.detectInstantPatterns(testMessage);
console.log('Instant matches:', instantMatches.length);
instantMatches.forEach((match, i) => {
console.log(` ${i + 1}. ${match.category}: ${match.confidence}`);
});
}
debugPatternDetection().catch(console.error);
```
--------------------------------------------------------------------------------
/docs/development/todo-tracker.md:
--------------------------------------------------------------------------------
```markdown
# TODO Tracker
**Last Updated:** 2025-11-08 10:25:25
**Scan Directory:** src
**Total TODOs:** 5
## Summary
| Priority | Count | Description |
|----------|-------|-------------|
| CRITICAL (P0) | 1 | Security, data corruption, blocking bugs |
| HIGH (P1) | 2 | Performance, user-facing, incomplete features |
| MEDIUM (P2) | 2 | Code quality, optimizations, technical debt |
| LOW (P3) | 0
0 | Documentation, cosmetic, nice-to-haves |
---
## CRITICAL (P0)
- `src/mcp_memory_service/web/api/analytics.py:625` - Period filtering is not implemented, leading to incorrect analytics data.
## HIGH (P1)
- `src/mcp_memory_service/storage/cloudflare.py:185` - Lack of a fallback for embedding generation makes the service vulnerable to external API failures.
- `src/mcp_memory_service/web/api/manage.py:231` - Inefficient queries can cause significant performance bottlenecks, especially with large datasets.
## MEDIUM (P2)
- `src/mcp_memory_service/web/api/documents.py:592` - Using a deprecated FastAPI event handler; should be migrated to the modern `lifespan` context manager to reduce technical debt.
- `src/mcp_memory_service/web/api/analytics.py:213` - The `storage.get_stats()` method is missing a data point, leading to API inconsistency.
## LOW (P3)
*(None in this list)*
---
## How to Address
1. **CRITICAL**: Address immediately, block releases if necessary
2. **HIGH**: Schedule for current/next sprint
3. **MEDIUM**: Add to backlog, address in refactoring sprints
4. **LOW**: Address opportunistically or when touching related code
## Updating This Tracker
Run: `bash scripts/maintenance/scan_todos.sh`
```
--------------------------------------------------------------------------------
/scripts/backup/backup_sqlite_vec.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# SQLite-vec Database Backup Script
# Creates timestamped backups of the SQLite-vec database
set -e
# Configuration
MEMORY_DIR="${MCP_MEMORY_BASE_DIR:-$HOME/.local/share/mcp-memory}"
BACKUP_DIR="$MEMORY_DIR/backups"
DATABASE_FILE="$MEMORY_DIR/sqlite_vec.db"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_NAME="sqlite_backup_$TIMESTAMP"
BACKUP_PATH="$BACKUP_DIR/$BACKUP_NAME"
# Check if database exists
if [[ ! -f "$DATABASE_FILE" ]]; then
echo "Error: SQLite database not found at $DATABASE_FILE"
exit 1
fi
# Create backup directory
mkdir -p "$BACKUP_PATH"
# Copy database files (main, WAL, and SHM files)
echo "Creating backup: $BACKUP_NAME"
cp "$DATABASE_FILE" "$BACKUP_PATH/" 2>/dev/null || true
cp "${DATABASE_FILE}-wal" "$BACKUP_PATH/" 2>/dev/null || true
cp "${DATABASE_FILE}-shm" "$BACKUP_PATH/" 2>/dev/null || true
# Get backup size
BACKUP_SIZE=$(du -sh "$BACKUP_PATH" | cut -f1)
# Count files backed up
FILE_COUNT=$(find "$BACKUP_PATH" -type f | wc -l)
# Create backup metadata
cat > "$BACKUP_PATH/backup_info.json" << EOF
{
"backup_name": "$BACKUP_NAME",
"timestamp": "$TIMESTAMP",
"source_database": "$DATABASE_FILE",
"backup_path": "$BACKUP_PATH",
"backup_size": "$BACKUP_SIZE",
"files_count": $FILE_COUNT,
"backend": "sqlite_vec",
"created_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}
EOF
echo "Backup completed successfully:"
echo " Name: $BACKUP_NAME"
echo " Path: $BACKUP_PATH"
echo " Size: $BACKUP_SIZE"
echo " Files: $FILE_COUNT"
# Cleanup old backups (keep last 7 days)
find "$BACKUP_DIR" -name "sqlite_backup_*" -type d -mtime +7 -exec rm -rf {} \; 2>/dev/null || true
exit 0
```
--------------------------------------------------------------------------------
/.metrics/baseline_cc_install_hooks.txt:
--------------------------------------------------------------------------------
```
claude-hooks/install_hooks.py
F 1225:0 main - F (42)
M 857:4 HookInstaller.configure_claude_settings - D (30)
M 1038:4 HookInstaller.run_tests - C (17)
M 595:4 HookInstaller.install_basic_hooks - C (15)
M 1124:4 HookInstaller._cleanup_empty_directories - C (14)
M 238:4 HookInstaller._parse_mcp_get_output - C (12)
M 351:4 HookInstaller.validate_mcp_prerequisites - C (12)
M 783:4 HookInstaller.install_configuration - C (11)
M 198:4 HookInstaller.detect_claude_mcp_configuration - B (9)
M 721:4 HookInstaller.install_natural_triggers - B (9)
C 77:0 HookInstaller - B (7)
M 151:4 HookInstaller.check_prerequisites - B (7)
F 44:0 get_project_version - B (6)
M 284:4 HookInstaller._detect_python_path - B (6)
M 1169:4 HookInstaller.uninstall - B (6)
M 679:4 HookInstaller.install_auto_capture - A (5)
M 90:4 HookInstaller._detect_claude_hooks_directory - A (4)
M 574:4 HookInstaller.create_backup - A (4)
M 270:4 HookInstaller.detect_environment_type - A (3)
M 387:4 HookInstaller.generate_hooks_config_from_mcp - A (2)
C 67:0 Colors - A (1)
M 84:4 HookInstaller.__init__ - A (1)
M 129:4 HookInstaller.info - A (1)
M 133:4 HookInstaller.warn - A (1)
M 137:4 HookInstaller.error - A (1)
M 141:4 HookInstaller.success - A (1)
M 145:4 HookInstaller.header - A (1)
M 314:4 HookInstaller.configure_protocol_for_environment - A (1)
M 457:4 HookInstaller.generate_basic_config - A (1)
M 522:4 HookInstaller.enhance_config_for_natural_triggers - A (1)
30 blocks (classes, functions, methods) analyzed.
Average complexity: B (7.7)
```
--------------------------------------------------------------------------------
/docs/legacy/dual-protocol-hooks.md:
--------------------------------------------------------------------------------
```markdown
# Dual Protocol Memory Hooks (Legacy)
> **Note**: This feature has been superseded by Natural Memory Triggers v7.1.3+. This documentation is kept for reference only.
**Dual Protocol Memory Hooks** (v7.0.0+) provide intelligent memory awareness with automatic protocol detection:
## Configuration
```json
{
"memoryService": {
"protocol": "auto",
"preferredProtocol": "mcp",
"fallbackEnabled": true,
"http": {
"endpoint": "https://localhost:8443",
"apiKey": "your-api-key",
"healthCheckTimeout": 3000,
"useDetailedHealthCheck": true
},
"mcp": {
"serverCommand": ["uv", "run", "memory", "server", "-s", "cloudflare"],
"serverWorkingDir": "/Users/yourname/path/to/mcp-memory-service",
"connectionTimeout": 5000,
"toolCallTimeout": 10000
}
}
}
```
## Protocol Options
- `"auto"`: Smart detection (MCP → HTTP → Environment fallback)
- `"http"`: HTTP-only mode (web server at localhost:8443)
- `"mcp"`: MCP-only mode (direct server process)
## Benefits
- **Reliability**: Multiple connection methods ensure hooks always work
- **Performance**: MCP direct for speed, HTTP for stability
- **Flexibility**: Works with local development or remote deployments
- **Compatibility**: Full backward compatibility with existing configurations
## Migration to Natural Memory Triggers
If you're using Dual Protocol Hooks, consider migrating to Natural Memory Triggers v7.1.3+ which offers:
- 85%+ trigger accuracy
- Multi-tier performance optimization
- CLI management system
- Git-aware context integration
- Adaptive learning
See main CLAUDE.md for migration instructions.
```
--------------------------------------------------------------------------------
/.claude/directives/pr-workflow.md:
--------------------------------------------------------------------------------
```markdown
# PR Workflow - Mandatory Quality Checks
## 🚦 Before Creating PR (CRITICAL)
**⚠️ MANDATORY**: Run quality checks BEFORE creating PR to prevent multi-iteration review cycles.
### Recommended Workflow
```bash
# Step 1: Stage your changes
git add .
# Step 2: Run comprehensive pre-PR check (MANDATORY)
bash scripts/pr/pre_pr_check.sh
# Step 3: Only create PR if all checks pass
gh pr create --fill
# Step 4: Request Gemini review
gh pr comment <PR_NUMBER> --body "/gemini review"
```
### What pre_pr_check.sh Does
1. ✅ Runs `quality_gate.sh --staged --with-pyscn` (complexity ≤8, security scan, PEP 8)
2. ✅ Runs full test suite (`pytest tests/`)
3. ✅ Checks import ordering (PEP 8 compliance)
4. ✅ Detects debug code (print statements, breakpoints)
5. ✅ Validates docstring coverage
6. ✅ Reminds to use code-quality-guard agent
### Manual Option (if script unavailable)
```bash
# Run quality gate
bash scripts/pr/quality_gate.sh --staged --with-pyscn
# Run tests
pytest tests/
# Use code-quality-guard agent
@agent code-quality-guard "Analyze complexity and security for staged files"
```
### Why This Matters
- **PR #280 lesson**: 7 review iterations, 20 issues found across 7 cycles
- **Root cause**: Quality checks NOT run before PR creation
- **Prevention**: Mandatory pre-PR script catches issues early
- **Time saved**: ~30-60 min per PR vs multi-day review cycles
### PR Template Checklist
See `.github/PULL_REQUEST_TEMPLATE.md` for complete checklist including:
- [ ] Quality gate passed (complexity ≤8, no security issues)
- [ ] All tests passing locally
- [ ] Code-quality-guard agent used
- [ ] Self-reviewed on GitHub diff
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/server/__main__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Entry point for running the server package as a module.
Allows running the server with:
python -m mcp_memory_service.server [args]
This is required for backward compatibility with CI/CD workflows
and Docker containers that use `python -m` invocation.
"""
import sys
import argparse
from . import main
from .._version import __version__
def run_with_args():
"""Handle command-line arguments before starting server."""
# Simple argument parsing for --version and --help
parser = argparse.ArgumentParser(
prog='python -m mcp_memory_service.server',
description='MCP Memory Service - Model Context Protocol Server',
add_help=True
)
parser.add_argument(
'--version',
action='version',
version=f'%(prog)s {__version__}'
)
# Parse known args to allow --version/--help while passing through other args
args, unknown = parser.parse_known_args()
# If we get here, no --version or --help was provided
# Start the server normally
main()
if __name__ == '__main__':
run_with_args()
```
--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint-persistent.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Docker entrypoint script for MCP Memory Service - Persistent mode
# This script keeps the container running even when there's no active MCP client
set -e
echo "[INFO] Starting MCP Memory Service in Docker container (persistent mode)"
# Function to handle signals
handle_signal() {
echo "[INFO] Received signal, shutting down..."
if [ -n "$SERVER_PID" ]; then
kill -TERM $SERVER_PID 2>/dev/null || true
fi
exit 0
}
# Set up signal handlers
trap handle_signal SIGTERM SIGINT
# Create named pipes for stdio communication
FIFO_DIR="/tmp/mcp-memory-fifo"
mkdir -p "$FIFO_DIR"
STDIN_FIFO="$FIFO_DIR/stdin"
STDOUT_FIFO="$FIFO_DIR/stdout"
# Remove old pipes if they exist
rm -f "$STDIN_FIFO" "$STDOUT_FIFO"
# Create new named pipes
mkfifo "$STDIN_FIFO"
mkfifo "$STDOUT_FIFO"
echo "[INFO] Created named pipes for stdio communication"
# Start the server in the background with the named pipes
if [ "${UV_ACTIVE}" = "1" ]; then
echo "[INFO] Running with UV wrapper (persistent mode)"
python -u uv_wrapper.py < "$STDIN_FIFO" > "$STDOUT_FIFO" 2>&1 &
else
echo "[INFO] Running directly with Python (persistent mode)"
python -u -m mcp_memory_service.server < "$STDIN_FIFO" > "$STDOUT_FIFO" 2>&1 &
fi
SERVER_PID=$!
echo "[INFO] Server started with PID: $SERVER_PID"
# Keep the stdin pipe open to prevent the server from exiting
exec 3> "$STDIN_FIFO"
# Monitor the server process
while true; do
if ! kill -0 $SERVER_PID 2>/dev/null; then
echo "[ERROR] Server process exited unexpectedly"
exit 1
fi
# Send a keep-alive message every 30 seconds
echo "" >&3
sleep 30
done
```
--------------------------------------------------------------------------------
/examples/claude_desktop_config_windows.json:
--------------------------------------------------------------------------------
```json
{
"_comment": "Windows-specific MCP Memory Service configuration for Claude Desktop",
"_instructions": [
"Replace 'YOUR_USERNAME' with your actual Windows username",
"Replace 'C:\\REPOSITORIES\\mcp-memory-service' with your actual repository path",
"Supported backends: sqlite_vec, cloudflare, hybrid (ChromaDB removed in v8.0.0)"
],
"mcpServers": {
"memory": {
"command": "python",
"args": [
"C:/REPOSITORIES/mcp-memory-service/scripts/memory_offline.py"
],
"env": {
"PYTHONPATH": "C://REPOSITORIES//mcp-memory-service",
"_comment_backend_choice": "Choose one of the backend configurations below",
"_comment_sqlite_vec": "=== SQLite-vec Backend (Recommended for local storage) ===",
"MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
"MCP_MEMORY_SQLITE_PATH": "C:\\Users\\YOUR_USERNAME\\AppData\\Local\\mcp-memory\\memory_migrated.db",
"MCP_MEMORY_BACKUPS_PATH": "C:\\Users\\YOUR_USERNAME\\AppData\\Local\\mcp-memory\\backups",
"_comment_offline": "=== Offline Mode Configuration (prevents PyTorch downloads) ===",
"HF_HOME": "C:\\Users\\YOUR_USERNAME\\.cache\\huggingface",
"TRANSFORMERS_CACHE": "C:\\Users\\YOUR_USERNAME\\.cache\\huggingface\\transformers",
"SENTENCE_TRANSFORMERS_HOME": "C:\\Users\\YOUR_USERNAME\\.cache\\torch\\sentence_transformers",
"HF_HUB_OFFLINE": "1",
"TRANSFORMERS_OFFLINE": "1",
"_comment_performance": "=== Performance Settings ===",
"PYTORCH_ENABLE_MPS_FALLBACK": "1",
"PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:128"
}
}
}
}
```
--------------------------------------------------------------------------------
/scripts/testing/simple_test.py:
--------------------------------------------------------------------------------
```python
#\!/usr/bin/env python3
"""
Simple test to use Homebrew Python's sentence-transformers
"""
import os
import sys
import subprocess
# Set environment variables for testing
os.environ["MCP_MEMORY_STORAGE_BACKEND"] = "sqlite_vec"
os.environ["MCP_MEMORY_SQLITE_PATH"] = os.path.expanduser("~/Library/Application Support/mcp-memory/sqlite_vec.db")
os.environ["MCP_MEMORY_BACKUPS_PATH"] = os.path.expanduser("~/Library/Application Support/mcp-memory/backups")
os.environ["MCP_MEMORY_USE_ONNX"] = "1"
# Get the Homebrew Python path
result = subprocess.run(
['brew', '--prefix', 'pytorch'],
capture_output=True,
text=True,
check=True
)
pytorch_prefix = result.stdout.strip()
homebrew_python_path = f"{pytorch_prefix}/libexec/bin/python3"
print(f"Using Homebrew Python: {homebrew_python_path}")
# Run a simple test with the Homebrew Python
test_script = """
import torch
import sentence_transformers
import sys
print(f"Python: {sys.version}")
print(f"PyTorch: {torch.__version__}")
print(f"sentence-transformers: {sentence_transformers.__version__}")
# Load a model
model = sentence_transformers.SentenceTransformer('paraphrase-MiniLM-L3-v2')
print(f"Model loaded: {model}")
# Encode a test sentence
test_text = "This is a test sentence for encoding with Homebrew PyTorch"
embedding = model.encode([test_text])
print(f"Embedding shape: {embedding.shape}")
print("Test successful\!")
"""
# Run the test with Homebrew Python
result = subprocess.run(
[homebrew_python_path, "-c", test_script],
capture_output=True,
text=True
)
print("=== STDOUT ===")
print(result.stdout)
if result.stderr:
print("=== STDERR ===")
print(result.stderr)
```
--------------------------------------------------------------------------------
/scripts/utils/test_groq_bridge.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Test script for Groq bridge integration
# Demonstrates usage without requiring API key
set -e
echo "=== Groq Bridge Integration Test ==="
echo ""
# Check if groq package is installed
echo "1. Checking Python groq package..."
if python3 -c "import groq" 2>/dev/null; then
echo " ✓ groq package installed"
else
echo " ✗ groq package NOT installed"
echo ""
echo "To install: pip install groq"
echo "Or: uv pip install groq"
exit 1
fi
# Check if API key is set
echo ""
echo "2. Checking GROQ_API_KEY environment variable..."
if [ -z "$GROQ_API_KEY" ]; then
echo " ✗ GROQ_API_KEY not set"
echo ""
echo "To set: export GROQ_API_KEY='your-api-key-here'"
echo "Get your API key from: https://console.groq.com/keys"
echo ""
echo "Skipping API test (would require valid key)"
else
echo " ✓ GROQ_API_KEY configured"
# Test the bridge with a simple query
echo ""
echo "3. Testing Groq bridge with sample query..."
echo ""
python3 scripts/utils/groq_agent_bridge.py \
"Rate the complexity of this Python function on a scale of 1-10: def add(a, b): return a + b" \
--json
fi
echo ""
echo "=== Integration Test Complete ==="
echo ""
echo "Usage examples:"
echo ""
echo "# Complexity analysis"
echo "python scripts/utils/groq_agent_bridge.py \"Analyze complexity 1-10: \$(cat file.py)\""
echo ""
echo "# Security scan"
echo "python scripts/utils/groq_agent_bridge.py \"Check for security issues: \$(cat file.py)\" --json"
echo ""
echo "# With custom model and temperature"
echo "python scripts/utils/groq_agent_bridge.py \"Your prompt\" --model llama2-70b-4096 --temperature 0.3"
```
--------------------------------------------------------------------------------
/tools/docker/DEPRECATED.md:
--------------------------------------------------------------------------------
```markdown
# Deprecated Docker Files
The following Docker files are deprecated as of v5.0.4 and will be removed in v6.0.0:
## Deprecated Files
### 1. `docker-compose.standalone.yml`
- **Replaced by**: `docker-compose.http.yml`
- **Reason**: Confusing name, mixed ChromaDB/SQLite configs, incorrect entrypoint for HTTP mode
- **Migration**: Use `docker-compose.http.yml` for HTTP/API access
### 2. `docker-compose.uv.yml`
- **Replaced by**: UV is now built into the main Dockerfile
- **Reason**: UV support should be in the image, not a separate compose file
- **Migration**: UV is automatically available in all configurations
### 3. `docker-compose.pythonpath.yml`
- **Replaced by**: Fixed PYTHONPATH in main Dockerfile
- **Reason**: PYTHONPATH fix belongs in Dockerfile, not compose variant
- **Migration**: All compose files now have correct PYTHONPATH=/app/src
### 4. `docker-entrypoint-persistent.sh`
- **Replaced by**: `docker-entrypoint-unified.sh`
- **Reason**: Overcomplicated, doesn't support HTTP mode, named pipes unnecessary
- **Migration**: Use unified entrypoint with MCP_MODE environment variable
## New Simplified Structure
Use one of these two configurations:
1. **MCP Protocol Mode** (for Claude Desktop, VS Code):
```bash
docker-compose up -d
```
2. **HTTP/API Mode** (for web access, REST API):
```bash
docker-compose -f docker-compose.http.yml up -d
```
## Timeline
- **v5.0.4**: Files marked as deprecated, new structure introduced
- **v5.1.0**: Warning messages added when using deprecated files
- **v6.0.0**: Deprecated files removed
## Credits
Thanks to Joe Esposito for identifying the Docker setup issues that led to this simplification.
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/utils/hashing.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
from typing import Any, Dict, Optional
def generate_content_hash(content: str, metadata: Optional[Dict[str, Any]] = None) -> str:
"""
Generate a unique hash for content and metadata.
This improved version ensures consistent hashing by:
1. Normalizing content (strip whitespace, lowercase)
2. Sorting metadata keys
3. Using a consistent JSON serialization
"""
# Normalize content
normalized_content = content.strip().lower()
# Create hash content with normalized content
hash_content = normalized_content
# Add metadata if present
if metadata:
# Filter out timestamp and dynamic fields
static_metadata = {
k: v for k, v in metadata.items()
if k not in ['timestamp', 'content_hash', 'embedding']
}
if static_metadata:
# Sort keys and use consistent JSON serialization
hash_content += json.dumps(static_metadata, sort_keys=True, ensure_ascii=True)
# Generate hash
return hashlib.sha256(hash_content.encode('utf-8')).hexdigest()
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/consolidation/__init__.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Dream-inspired memory consolidation system.
This module implements autonomous memory consolidation inspired by human cognitive
processes during sleep cycles, featuring exponential decay scoring, creative
association discovery, semantic compression, and controlled forgetting.
"""
from .base import ConsolidationBase
from .decay import ExponentialDecayCalculator
from .associations import CreativeAssociationEngine
from .clustering import SemanticClusteringEngine
from .compression import SemanticCompressionEngine
from .forgetting import ControlledForgettingEngine
from .consolidator import DreamInspiredConsolidator
from .scheduler import ConsolidationScheduler
from .health import ConsolidationHealthMonitor, HealthStatus, HealthMetric, HealthAlert
__all__ = [
'ConsolidationBase',
'ExponentialDecayCalculator',
'CreativeAssociationEngine',
'SemanticClusteringEngine',
'SemanticCompressionEngine',
'ControlledForgettingEngine',
'DreamInspiredConsolidator',
'ConsolidationScheduler',
'ConsolidationHealthMonitor',
'HealthStatus',
'HealthMetric',
'HealthAlert'
]
```
--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Docker entrypoint script for MCP Memory Service
set -e
echo "[INFO] Starting MCP Memory Service in Docker container"
# Function to handle signals
handle_signal() {
echo "[INFO] Received signal, shutting down..."
if [ -n "$SERVER_PID" ]; then
kill -TERM $SERVER_PID 2>/dev/null || true
fi
exit 0
}
# Set up signal handlers
trap handle_signal SIGTERM SIGINT
# Function to keep stdin alive
keep_stdin_alive() {
while true; do
# Send newline to stdin every 30 seconds to keep the pipe open
echo "" 2>/dev/null || break
sleep 30
done
}
# Check if running in standalone mode
if [ "${MCP_STANDALONE_MODE}" = "1" ]; then
echo "[INFO] Running in standalone mode"
exec /usr/local/bin/docker-entrypoint-persistent.sh "$@"
fi
# Check if UV_ACTIVE is set
if [ "${UV_ACTIVE}" = "1" ]; then
echo "[INFO] Running with UV wrapper"
# Start the keep-alive process in the background
keep_stdin_alive &
KEEPALIVE_PID=$!
# Run the server
python -u uv_wrapper.py "$@" &
SERVER_PID=$!
# Wait for the server process
wait $SERVER_PID
SERVER_EXIT_CODE=$?
# Clean up the keep-alive process
kill $KEEPALIVE_PID 2>/dev/null || true
exit $SERVER_EXIT_CODE
else
echo "[INFO] Running directly with Python"
# Start the keep-alive process in the background
keep_stdin_alive &
KEEPALIVE_PID=$!
# Run the server
python -u -m mcp_memory_service.server "$@" &
SERVER_PID=$!
# Wait for the server process
wait $SERVER_PID
SERVER_EXIT_CODE=$?
# Clean up the keep-alive process
kill $KEEPALIVE_PID 2>/dev/null || true
exit $SERVER_EXIT_CODE
fi
```
--------------------------------------------------------------------------------
/scripts/setup-lightweight.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# MCP Memory Service - Automated Setup
# Installs from Sundeepg98's fork with ONNX patches
#
# Usage: curl -sSL <gist-url> | bash
# Or: ./setup-mcp-memory.sh
set -e
echo "🔧 MCP Memory Service - Optimized Setup"
echo "========================================"
echo ""
# Check prerequisites
if ! command -v pipx &> /dev/null; then
echo "❌ pipx not found. Install with: pip install pipx"
exit 1
fi
# Uninstall existing if present
if pipx list | grep -q mcp-memory-service; then
echo "📦 Removing existing installation..."
pipx uninstall mcp-memory-service
fi
# Install from fork
echo "📥 Installing from Sundeepg98/mcp-memory-service fork..."
pipx install "git+https://github.com/Sundeepg98/mcp-memory-service.git"
# Create data directory
echo "📁 Creating data directories..."
mkdir -p ~/.local/share/mcp-memory
mkdir -p ~/.cache/mcp_memory/onnx_models
# Get the python path
PYTHON_PATH=$(pipx environment --value PIPX_HOME)/venvs/mcp-memory-service/bin/python
echo ""
echo "✅ Installation complete!"
echo ""
echo "📋 Next: Add this to ~/.claude/settings.json:"
echo ""
cat << 'EOF'
{
"mcpServers": {
"memory": {
"type": "stdio",
"command": "PYTHON_PATH_PLACEHOLDER",
"args": ["-m", "mcp_memory_service.server"],
"env": {
"MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
"MCP_QUALITY_BOOST_ENABLED": "true"
}
}
},
"env": {
"MCP_MEMORY_USE_ONNX": "true",
"MCP_CONSOLIDATION_ENABLED": "true"
}
}
EOF
echo ""
echo "Replace PYTHON_PATH_PLACEHOLDER with:"
echo " $PYTHON_PATH"
echo ""
echo "🔄 Then restart Claude Code"
echo ""
echo "📊 Disk usage: ~805MB (vs 7.7GB with transformers)"
echo "🤖 ONNX models will auto-download on first use (~255MB)"
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/resolve_conflicts.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Simple conflict resolution helper
STAGING_DB="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec_staging.db"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
if [ ! -f "$STAGING_DB" ]; then
echo -e "${RED}No staging database found${NC}"
exit 1
fi
# Get conflicts
CONFLICTS=$(sqlite3 "$STAGING_DB" "
SELECT id, content, staged_at, conflict_status
FROM staged_memories
WHERE conflict_status IN ('detected', 'push_failed')
ORDER BY staged_at DESC;
")
if [ -z "$CONFLICTS" ]; then
echo -e "${GREEN}No conflicts to resolve${NC}"
exit 0
fi
echo -e "${YELLOW}Found conflicts to resolve:${NC}"
echo ""
echo "$CONFLICTS" | while IFS='|' read -r id content staged_at status; do
echo -e "${RED}Conflict: $status${NC}"
echo -e "Content: ${content:0:80}..."
echo -e "Staged: $staged_at"
echo -e "ID: $id"
echo ""
echo "Actions:"
echo " 1. Keep and retry push"
echo " 2. Delete (abandon change)"
echo " 3. Skip for now"
echo ""
read -p "Choose action (1/2/3): " action
case $action in
1)
sqlite3 "$STAGING_DB" "
UPDATE staged_memories
SET conflict_status = 'none'
WHERE id = '$id';
"
echo -e "${GREEN}Marked for retry${NC}"
;;
2)
sqlite3 "$STAGING_DB" "DELETE FROM staged_memories WHERE id = '$id';"
echo -e "${YELLOW}Deleted${NC}"
;;
3)
echo -e "${YELLOW}Skipped${NC}"
;;
*)
echo -e "${YELLOW}Invalid choice, skipped${NC}"
;;
esac
echo ""
done
echo -e "${GREEN}Conflict resolution completed${NC}"
```
--------------------------------------------------------------------------------
/examples/memory_export_template.json:
--------------------------------------------------------------------------------
```json
{
"export_metadata": {
"source_machine": "example-hostname",
"export_timestamp": "2025-08-21T12:00:00.000000",
"total_memories": 3,
"database_path": "/path/to/sqlite_vec.db",
"platform": "Linux",
"python_version": "3.11.0",
"include_embeddings": false,
"filter_tags": null,
"exporter_version": "6.2.4"
},
"memories": [
{
"content": "MCP Memory Service is a Model Context Protocol server that provides semantic memory and persistent storage capabilities for Claude Desktop using SQLite-vec and sentence transformers.",
"content_hash": "example-hash-1234567890abcdef",
"tags": ["documentation", "project-overview"],
"created_at": 1692633600.0,
"updated_at": 1692633600.0,
"memory_type": "note",
"metadata": {
"source": "example-machine",
"project": "mcp-memory-service"
}
},
{
"content": "Key development commands: `uv run memory` to start server, `pytest tests/` for testing, `python install.py` for setup.",
"content_hash": "example-hash-abcdef1234567890",
"tags": ["commands", "development"],
"created_at": 1692634200.0,
"updated_at": 1692634200.0,
"memory_type": "reference",
"metadata": {
"source": "example-machine",
"category": "quick-reference"
}
},
{
"content": "SQLite-vec backend is now the default storage backend (v6.0+) offering fast performance and single-file database storage.",
"content_hash": "example-hash-fedcba0987654321",
"tags": ["architecture", "backend", "sqlite-vec"],
"created_at": 1692634800.0,
"updated_at": 1692634800.0,
"memory_type": "architectural-decision",
"metadata": {
"source": "example-machine",
"version": "v6.0.0"
}
}
]
}
```
--------------------------------------------------------------------------------
/docs/mastery/local-setup-and-run.md:
--------------------------------------------------------------------------------
```markdown
# MCP Memory Service — Local Setup and Run
Follow these steps to run the service locally, switch storage backends, and validate functionality.
## 1) Install Dependencies
Using uv (recommended):
```
uv sync
```
Using pip:
```
python -m venv .venv
source .venv/bin/activate # Windows: .venv\Scripts\activate
pip install -e .
```
If using SQLite-vec backend (recommended):
```
uv add sqlite-vec sentence-transformers torch
# or
pip install sqlite-vec sentence-transformers torch
```
## 2) Choose Storage Backend
SQLite-vec (default):
```
export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
# optional custom DB path
export MCP_MEMORY_SQLITE_PATH="$HOME/.local/share/mcp-memory/sqlite_vec.db"
```
ChromaDB (deprecated):
```
export MCP_MEMORY_STORAGE_BACKEND=chroma
export MCP_MEMORY_CHROMA_PATH="$HOME/.local/share/mcp-memory/chroma_db"
```
Cloudflare:
```
export MCP_MEMORY_STORAGE_BACKEND=cloudflare
export CLOUDFLARE_API_TOKEN=...
export CLOUDFLARE_ACCOUNT_ID=...
export CLOUDFLARE_VECTORIZE_INDEX=...
export CLOUDFLARE_D1_DATABASE_ID=...
```
## 3) Run the Server
Stdio MCP server (integrates with Claude Desktop):
```
uv run memory server
```
FastMCP HTTP server (for Claude Code / remote):
```
uv run mcp-memory-server
```
Configure Claude Desktop example (~/.claude/config.json):
```
{
"mcpServers": {
"memory": {
"command": "uv",
"args": ["--directory", "/path/to/mcp-memory-service", "run", "memory", "server"],
"env": { "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec" }
}
}
}
```
## 4) Verify Health and Basic Ops
CLI status:
```
uv run memory status
```
MCP tool flow (via client):
- store_memory → retrieve_memory → search_by_tag → delete_memory
## 5) Run Tests
```
pytest -q
# or
uv run pytest -q
```
See also: `docs/mastery/testing-guide.md` and `docs/sqlite-vec-backend.md`.
```
--------------------------------------------------------------------------------
/docs/integrations.md:
--------------------------------------------------------------------------------
```markdown
# MCP Memory Service Integrations
This document catalogs tools, utilities, and integrations that extend the functionality of the MCP Memory Service.
## Official Integrations
### [MCP Memory Dashboard](https://github.com/doobidoo/mcp-memory-dashboard)(This is still wip!)
A web-based dashboard for viewing, searching, and managing your MCP Memory Service data. The dashboard allows you to:
- Browse and search memories
- View memory metadata and tags
- Delete unwanted memories
- Perform semantic searches
- Monitor system health
## Community Integrations
### [Claude Memory Context](https://github.com/doobidoo/claude-memory-context)
A utility that enables Claude to start each conversation with awareness of the topics and important memories stored in your MCP Memory Service.
This tool:
- Queries your MCP memory service for recent and important memories
- Extracts topics and content summaries
- Formats this information into a structured context section
- Updates Claude project instructions automatically
The utility leverages Claude's project instructions feature without requiring any modifications to the MCP protocol. It can be automated to run periodically, ensuring Claude always has access to your latest memories.
See the [Claude Memory Context repository](https://github.com/doobidoo/claude-memory-context) for installation and usage instructions.
---
## Adding Your Integration
If you've built a tool or integration for the MCP Memory Service, we'd love to include it here. Please submit a pull request that adds your project to this document with:
1. The name of your integration (with link to repository)
2. A brief description (2-3 sentences)
3. A list of key features
4. Any installation notes or special requirements
All listed integrations should be functional, documented, and actively maintained.
```
--------------------------------------------------------------------------------
/claude-hooks/config.template.json:
--------------------------------------------------------------------------------
```json
{
"memoryService": {
"endpoint": "https://your-server:8443",
"apiKey": "your-api-key-here",
"defaultTags": ["claude-code", "auto-generated"],
"maxMemoriesPerSession": 8,
"enableSessionConsolidation": true
},
"projectDetection": {
"gitRepository": true,
"packageFiles": ["package.json", "pyproject.toml", "Cargo.toml", "go.mod", "pom.xml"],
"frameworkDetection": true,
"languageDetection": true,
"confidenceThreshold": 0.3
},
"memoryScoring": {
"weights": {
"timeDecay": 0.3,
"tagRelevance": 0.4,
"contentRelevance": 0.2,
"typeBonus": 0.1
},
"minRelevanceScore": 0.3,
"timeDecayRate": 0.1
},
"contextFormatting": {
"includeProjectSummary": true,
"includeRelevanceScores": false,
"groupByCategory": true,
"maxContentLength": 200,
"includeTimestamps": true
},
"sessionAnalysis": {
"extractTopics": true,
"extractDecisions": true,
"extractInsights": true,
"extractCodeChanges": true,
"extractNextSteps": true,
"minSessionLength": 100,
"minConfidence": 0.1
},
"hooks": {
"sessionStart": {
"enabled": true,
"timeout": 10000,
"priority": "high"
},
"sessionEnd": {
"enabled": true,
"timeout": 15000,
"priority": "normal"
},
"topicChange": {
"enabled": false,
"timeout": 5000,
"priority": "low"
}
},
"output": {
"verbose": true,
"showMemoryDetails": false,
"showProjectDetails": true,
"showScoringDetails": false,
"cleanMode": false
},
"logging": {
"level": "info",
"enableDebug": false,
"logToFile": false,
"logFilePath": "./claude-hooks.log"
},
"permissionRequest": {
"enabled": true,
"autoApprove": true,
"customSafePatterns": [],
"customDestructivePatterns": [],
"logDecisions": false
}
}
```
--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint-unified.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Unified Docker entrypoint script for MCP Memory Service
# Supports both MCP protocol mode and HTTP server mode
set -e
echo "[INFO] Starting MCP Memory Service in Docker container"
# Function to handle signals
handle_signal() {
echo "[INFO] Received signal, shutting down..."
if [ -n "$SERVER_PID" ]; then
kill -TERM $SERVER_PID 2>/dev/null || true
fi
exit 0
}
# Set up signal handlers
trap handle_signal SIGTERM SIGINT
# Determine mode based on environment variable
MODE="${MCP_MODE:-mcp}"
echo "[INFO] Running in $MODE mode"
if [ "$MODE" = "http" ] || [ "$MODE" = "api" ]; then
# HTTP Server Mode
echo "[INFO] Starting HTTP server with FastAPI/Uvicorn"
# Ensure we have the HTTP server file
if [ ! -f "/app/run_server.py" ]; then
echo "[ERROR] run_server.py not found. Please ensure it's copied in the Dockerfile"
exit 1
fi
# Start the HTTP server
exec python /app/run_server.py "$@"
elif [ "$MODE" = "mcp" ]; then
# MCP Protocol Mode (stdin/stdout)
echo "[INFO] Starting MCP protocol server (stdin/stdout communication)"
# Function to keep stdin alive
keep_stdin_alive() {
while true; do
# Send newline to stdin every 30 seconds to keep the pipe open
echo "" 2>/dev/null || break
sleep 30
done
}
# Start the keep-alive process in the background
keep_stdin_alive &
KEEPALIVE_PID=$!
# Run the MCP server
python -u -m mcp_memory_service.server "$@" &
SERVER_PID=$!
# Wait for the server process
wait $SERVER_PID
SERVER_EXIT_CODE=$?
# Clean up the keep-alive process
kill $KEEPALIVE_PID 2>/dev/null || true
exit $SERVER_EXIT_CODE
else
echo "[ERROR] Unknown mode: $MODE. Use 'mcp' for protocol mode or 'http' for API mode"
exit 1
fi
```
--------------------------------------------------------------------------------
/archive/setup-development/setup_consolidation_mdns.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Setup script for MCP Memory Service with Consolidation and mDNS
echo "Setting up MCP Memory Service with Consolidation and mDNS HTTPS..."
# Enable consolidation system
export MCP_CONSOLIDATION_ENABLED=true
# Configure consolidation settings
export MCP_DECAY_ENABLED=true
export MCP_RETENTION_CRITICAL=365
export MCP_RETENTION_REFERENCE=180
export MCP_RETENTION_STANDARD=30
export MCP_RETENTION_TEMPORARY=7
export MCP_ASSOCIATIONS_ENABLED=true
export MCP_ASSOCIATION_MIN_SIMILARITY=0.3
export MCP_ASSOCIATION_MAX_SIMILARITY=0.7
export MCP_ASSOCIATION_MAX_PAIRS=100
export MCP_CLUSTERING_ENABLED=true
export MCP_CLUSTERING_MIN_SIZE=5
export MCP_CLUSTERING_ALGORITHM=dbscan
export MCP_COMPRESSION_ENABLED=true
export MCP_COMPRESSION_MAX_LENGTH=500
export MCP_COMPRESSION_PRESERVE_ORIGINALS=true
export MCP_FORGETTING_ENABLED=true
export MCP_FORGETTING_RELEVANCE_THRESHOLD=0.1
export MCP_FORGETTING_ACCESS_THRESHOLD=90
# Set consolidation schedule (cron-like)
export MCP_SCHEDULE_DAILY="02:00"
export MCP_SCHEDULE_WEEKLY="SUN 03:00"
export MCP_SCHEDULE_MONTHLY="01 04:00"
# Configure mDNS multi-client server with HTTPS
export MCP_MDNS_ENABLED=true
export MCP_MDNS_SERVICE_NAME="memory"
export MCP_HTTPS_ENABLED=true
# HTTP server configuration
export MCP_HTTP_ENABLED=true
export MCP_HTTP_HOST=0.0.0.0
export MCP_HTTP_PORT=8000
# Storage backend
export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
# API security
export MCP_API_KEY="$(openssl rand -base64 32)"
echo "Configuration set! Environment variables:"
echo "- Consolidation enabled: $MCP_CONSOLIDATION_ENABLED"
echo "- mDNS enabled: $MCP_MDNS_ENABLED"
echo "- HTTPS enabled: $MCP_HTTPS_ENABLED"
echo "- Service name: $MCP_MDNS_SERVICE_NAME"
echo "- API Key generated: [SET]"
echo ""
echo "Starting MCP Memory Service HTTP server..."
# Activate virtual environment and start the server
source venv/bin/activate && python scripts/run_http_server.py
```
--------------------------------------------------------------------------------
/scripts/server/memory_offline.py:
--------------------------------------------------------------------------------
```python
#!/usr/bin/env python3
"""
Memory service launcher with forced offline mode.
This script sets offline mode BEFORE importing anything else.
"""
import os
import platform
import sys
def setup_offline_mode():
"""Setup offline mode environment variables BEFORE any imports."""
print("Setting up offline mode...", file=sys.stderr)
# Force offline mode
os.environ['HF_HUB_OFFLINE'] = '1'
os.environ['TRANSFORMERS_OFFLINE'] = '1'
# Configure cache paths for Windows
username = os.environ.get('USERNAME', os.environ.get('USER', ''))
if platform.system() == "Windows" and username:
hf_home = f"C:\\Users\\{username}\\.cache\\huggingface"
transformers_cache = f"C:\\Users\\{username}\\.cache\\huggingface\\transformers"
sentence_transformers_home = f"C:\\Users\\{username}\\.cache\\torch\\sentence_transformers"
else:
hf_home = os.path.expanduser("~/.cache/huggingface")
transformers_cache = os.path.expanduser("~/.cache/huggingface/transformers")
sentence_transformers_home = os.path.expanduser("~/.cache/torch/sentence_transformers")
# Set cache paths
os.environ['HF_HOME'] = hf_home
os.environ['TRANSFORMERS_CACHE'] = transformers_cache
os.environ['SENTENCE_TRANSFORMERS_HOME'] = sentence_transformers_home
print(f"HF_HUB_OFFLINE: {os.environ.get('HF_HUB_OFFLINE')}", file=sys.stderr)
print(f"HF_HOME: {os.environ.get('HF_HOME')}", file=sys.stderr)
# Add src to Python path
src_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'src')
if src_path not in sys.path:
sys.path.insert(0, src_path)
if __name__ == "__main__":
# Setup offline mode FIRST
setup_offline_mode()
# Now import and run the memory server
print("Starting MCP Memory Service in offline mode...", file=sys.stderr)
from mcp_memory_service.server import main
main()
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/staging_db_init.sql:
--------------------------------------------------------------------------------
```sql
-- Staging Database Schema for Offline Memory Changes
-- This database stores local changes when remote server is unavailable
-- Staged memories that need to be synchronized
CREATE TABLE IF NOT EXISTS staged_memories (
id TEXT PRIMARY KEY,
content TEXT NOT NULL,
content_hash TEXT NOT NULL,
tags TEXT, -- JSON array as string
metadata TEXT, -- JSON metadata as string
memory_type TEXT DEFAULT 'note',
operation TEXT NOT NULL CHECK (operation IN ('INSERT', 'UPDATE', 'DELETE')),
staged_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
original_created_at TIMESTAMP,
source_machine TEXT,
conflict_status TEXT DEFAULT 'none' CHECK (conflict_status IN ('none', 'detected', 'resolved'))
);
-- Sync status tracking
CREATE TABLE IF NOT EXISTS sync_status (
key TEXT PRIMARY KEY,
value TEXT,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Index for performance
CREATE INDEX IF NOT EXISTS idx_staged_memories_hash ON staged_memories(content_hash);
CREATE INDEX IF NOT EXISTS idx_staged_memories_staged_at ON staged_memories(staged_at);
CREATE INDEX IF NOT EXISTS idx_staged_memories_operation ON staged_memories(operation);
-- Initialize sync status
INSERT OR REPLACE INTO sync_status (key, value) VALUES
('last_remote_sync', ''),
('last_local_sync', ''),
('staging_version', '1.0'),
('total_staged_changes', '0');
-- Triggers to maintain staged changes count
CREATE TRIGGER IF NOT EXISTS update_staged_count_insert
AFTER INSERT ON staged_memories
BEGIN
UPDATE sync_status
SET value = CAST((CAST(value AS INTEGER) + 1) AS TEXT),
updated_at = CURRENT_TIMESTAMP
WHERE key = 'total_staged_changes';
END;
CREATE TRIGGER IF NOT EXISTS update_staged_count_delete
AFTER DELETE ON staged_memories
BEGIN
UPDATE sync_status
SET value = CAST((CAST(value AS INTEGER) - 1) AS TEXT),
updated_at = CURRENT_TIMESTAMP
WHERE key = 'total_staged_changes';
END;
```
--------------------------------------------------------------------------------
/.github/workflows/claude-code-review.yml:
--------------------------------------------------------------------------------
```yaml
name: Claude Code Review
on:
pull_request:
types: [opened, synchronize]
# Optional: Only run on specific file changes
# paths:
# - "src/**/*.ts"
# - "src/**/*.tsx"
# - "src/**/*.js"
# - "src/**/*.jsx"
jobs:
claude-review:
# SECURITY: Only run for repository owner to prevent API credit abuse
if: github.event.pull_request.user.login == github.repository_owner
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write # Allow posting review comments
issues: write # Allow posting issue comments
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code Review
id: claude-review
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
prompt: |
REPO: ${{ github.repository }}
PR NUMBER: ${{ github.event.pull_request.number }}
Please review this pull request and provide feedback on:
- Code quality and best practices
- Potential bugs or issues
- Performance considerations
- Security concerns
- Test coverage
Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback.
Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR.
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://docs.claude.com/en/docs/claude-code/sdk#command-line for available options
claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"'
```
--------------------------------------------------------------------------------
/docs/guides/scripts.md:
--------------------------------------------------------------------------------
```markdown
# Scripts Documentation
This document provides an overview of the available scripts in the `scripts/` directory and their purposes.
## Essential Scripts
### Server Management
- `run_memory_server.py`: Main script to start the memory service server
```bash
python scripts/run_memory_server.py
```
### Environment Verification
- `verify_environment.py`: Verifies the installation environment and dependencies
```bash
python scripts/verify_environment.py
```
### Installation Testing
- `test_installation.py`: Tests the installation and basic functionality
```bash
python scripts/test_installation.py
```
### Memory Management
- `validate_memories.py`: Validates the integrity of stored memories
```bash
python scripts/validate_memories.py
```
- `repair_memories.py`: Repairs corrupted or invalid memories
```bash
python scripts/repair_memories.py
```
- `list-collections.py`: Lists all available memory collections
```bash
python scripts/list-collections.py
```
## Migration Scripts
- `mcp-migration.py`: Handles migration of MCP-related data
```bash
python scripts/mcp-migration.py
```
- `memory-migration.py`: Handles migration of memory data
```bash
python scripts/memory-migration.py
```
## Troubleshooting Scripts
- `verify_pytorch_windows.py`: Verifies PyTorch installation on Windows
```bash
python scripts/verify_pytorch_windows.py
```
- `verify_torch.py`: General PyTorch verification
```bash
python scripts/verify_torch.py
```
## Usage Notes
- Most scripts can be run directly with Python
- Some scripts may require specific environment variables to be set
- Always run verification scripts after installation or major updates
- Use migration scripts with caution and ensure backups are available
## Script Dependencies
- Python 3.10+
- Required packages listed in `requirements.txt`
- Some scripts may require additional dependencies listed in `requirements-migration.txt`
```
--------------------------------------------------------------------------------
/archive/setup-development/test_service.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Test script to debug service startup issues
echo "=== MCP Memory Service Debug Test ==="
# Set working directory
cd /home/hkr/repositories/mcp-memory-service
# Set environment variables (same as service)
export PATH=/home/hkr/repositories/mcp-memory-service/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
export PYTHONPATH=/home/hkr/repositories/mcp-memory-service/src
export MCP_CONSOLIDATION_ENABLED=true
export MCP_MDNS_ENABLED=true
export MCP_HTTPS_ENABLED=true
export MCP_MDNS_SERVICE_NAME="MCP Memory"
export MCP_HTTP_ENABLED=true
export MCP_HTTP_HOST=0.0.0.0
export MCP_HTTP_PORT=8000
export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
export MCP_API_KEY=mcp-0b1ccbde2197a08dcb12d41af4044be6
echo "Working directory: $(pwd)"
echo "Python executable: $(which python)"
echo "Virtual env Python: /home/hkr/repositories/mcp-memory-service/venv/bin/python"
# Check if venv Python exists
if [ -f "/home/hkr/repositories/mcp-memory-service/venv/bin/python" ]; then
echo "✅ Virtual environment Python exists"
else
echo "❌ Virtual environment Python missing!"
exit 1
fi
# Check if run_http_server.py exists
if [ -f "/home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py" ]; then
echo "✅ Server script exists"
else
echo "❌ Server script missing!"
exit 1
fi
# Test Python import
echo "=== Testing Python imports ==="
/home/hkr/repositories/mcp-memory-service/venv/bin/python -c "
import sys
sys.path.insert(0, '/home/hkr/repositories/mcp-memory-service/src')
try:
from mcp_memory_service.web.app import app
print('✅ Web app import successful')
except Exception as e:
print(f'❌ Web app import failed: {e}')
sys.exit(1)
"
echo "=== Testing server startup (5 seconds) ==="
timeout 5s /home/hkr/repositories/mcp-memory-service/venv/bin/python /home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py || echo "Server test completed"
echo "=== Debug test finished ==="
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/web/dependencies.py:
--------------------------------------------------------------------------------
```python
# Copyright 2024 Heinrich Krupp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
FastAPI dependencies for the HTTP interface.
"""
import logging
from typing import Optional
from fastapi import HTTPException, Depends
from ..storage.base import MemoryStorage
from ..services.memory_service import MemoryService
logger = logging.getLogger(__name__)
# Global storage instance
_storage: Optional[MemoryStorage] = None
def set_storage(storage: MemoryStorage) -> None:
"""Set the global storage instance."""
global _storage
_storage = storage
def get_storage() -> MemoryStorage:
"""Get the global storage instance."""
if _storage is None:
raise HTTPException(status_code=503, detail="Storage not initialized")
return _storage
def get_memory_service(storage: MemoryStorage = Depends(get_storage)) -> MemoryService:
"""Get a MemoryService instance with the configured storage backend."""
return MemoryService(storage)
async def create_storage_backend() -> MemoryStorage:
"""
Create and initialize storage backend for web interface based on configuration.
Returns:
Initialized storage backend
"""
from ..config import DATABASE_PATH
from ..storage.factory import create_storage_instance
logger.info("Creating storage backend for web interface...")
# Use shared factory with DATABASE_PATH for web interface
return await create_storage_instance(DATABASE_PATH, server_type="http")
```