#
tokens: 49820/50000 72/772 files (page 3/62)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 3 of 62. Use http://codebase.md/doobidoo/mcp-memory-service?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .claude
│   ├── agents
│   │   ├── amp-bridge.md
│   │   ├── amp-pr-automator.md
│   │   ├── code-quality-guard.md
│   │   ├── gemini-pr-automator.md
│   │   └── github-release-manager.md
│   ├── commands
│   │   ├── README.md
│   │   ├── refactor-function
│   │   ├── refactor-function-prod
│   │   └── refactor-function.md
│   ├── consolidation-fix-handoff.md
│   ├── consolidation-hang-fix-summary.md
│   ├── directives
│   │   ├── agents.md
│   │   ├── code-quality-workflow.md
│   │   ├── consolidation-details.md
│   │   ├── development-setup.md
│   │   ├── hooks-configuration.md
│   │   ├── memory-first.md
│   │   ├── memory-tagging.md
│   │   ├── pr-workflow.md
│   │   ├── quality-system-details.md
│   │   ├── README.md
│   │   ├── refactoring-checklist.md
│   │   ├── storage-backends.md
│   │   └── version-management.md
│   ├── prompts
│   │   └── hybrid-cleanup-integration.md
│   ├── settings.local.json.backup
│   └── settings.local.json.local
├── .commit-message
├── .coveragerc
├── .dockerignore
├── .env.example
├── .env.sqlite.backup
├── .envnn#
├── .gitattributes
├── .github
│   ├── FUNDING.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── performance_issue.yml
│   ├── pull_request_template.md
│   └── workflows
│       ├── bridge-tests.yml
│       ├── CACHE_FIX.md
│       ├── claude-branch-automation.yml
│       ├── claude-code-review.yml
│       ├── claude.yml
│       ├── cleanup-images.yml.disabled
│       ├── dev-setup-validation.yml
│       ├── docker-publish.yml
│       ├── dockerfile-lint.yml
│       ├── LATEST_FIXES.md
│       ├── main-optimized.yml.disabled
│       ├── main.yml
│       ├── publish-and-test.yml
│       ├── publish-dual.yml
│       ├── README_OPTIMIZATION.md
│       ├── release-tag.yml.disabled
│       ├── release.yml
│       ├── roadmap-review-reminder.yml
│       ├── SECRET_CONDITIONAL_FIX.md
│       └── WORKFLOW_FIXES.md
├── .gitignore
├── .mcp.json.backup
├── .mcp.json.template
├── .metrics
│   ├── baseline_cc_install_hooks.txt
│   ├── baseline_mi_install_hooks.txt
│   ├── baseline_nesting_install_hooks.txt
│   ├── BASELINE_REPORT.md
│   ├── COMPLEXITY_COMPARISON.txt
│   ├── QUICK_REFERENCE.txt
│   ├── README.md
│   ├── REFACTORED_BASELINE.md
│   ├── REFACTORING_COMPLETION_REPORT.md
│   └── TRACKING_TABLE.md
├── .pyscn
│   ├── .gitignore
│   └── reports
│       └── analyze_20251123_214224.html
├── AGENTS.md
├── ai-optimized-tool-descriptions.py
├── archive
│   ├── deployment
│   │   ├── deploy_fastmcp_fixed.sh
│   │   ├── deploy_http_with_mcp.sh
│   │   └── deploy_mcp_v4.sh
│   ├── deployment-configs
│   │   ├── empty_config.yml
│   │   └── smithery.yaml
│   ├── development
│   │   └── test_fastmcp.py
│   ├── docs-removed-2025-08-23
│   │   ├── authentication.md
│   │   ├── claude_integration.md
│   │   ├── claude-code-compatibility.md
│   │   ├── claude-code-integration.md
│   │   ├── claude-code-quickstart.md
│   │   ├── claude-desktop-setup.md
│   │   ├── complete-setup-guide.md
│   │   ├── database-synchronization.md
│   │   ├── development
│   │   │   ├── autonomous-memory-consolidation.md
│   │   │   ├── CLEANUP_PLAN.md
│   │   │   ├── CLEANUP_README.md
│   │   │   ├── CLEANUP_SUMMARY.md
│   │   │   ├── dream-inspired-memory-consolidation.md
│   │   │   ├── hybrid-slm-memory-consolidation.md
│   │   │   ├── mcp-milestone.md
│   │   │   ├── multi-client-architecture.md
│   │   │   ├── test-results.md
│   │   │   └── TIMESTAMP_FIX_SUMMARY.md
│   │   ├── distributed-sync.md
│   │   ├── invocation_guide.md
│   │   ├── macos-intel.md
│   │   ├── master-guide.md
│   │   ├── mcp-client-configuration.md
│   │   ├── multi-client-server.md
│   │   ├── service-installation.md
│   │   ├── sessions
│   │   │   └── MCP_ENHANCEMENT_SESSION_MEMORY_v4.1.0.md
│   │   ├── UBUNTU_SETUP.md
│   │   ├── ubuntu.md
│   │   ├── windows-setup.md
│   │   └── windows.md
│   ├── docs-root-cleanup-2025-08-23
│   │   ├── AWESOME_LIST_SUBMISSION.md
│   │   ├── CLOUDFLARE_IMPLEMENTATION.md
│   │   ├── DOCUMENTATION_ANALYSIS.md
│   │   ├── DOCUMENTATION_CLEANUP_PLAN.md
│   │   ├── DOCUMENTATION_CONSOLIDATION_COMPLETE.md
│   │   ├── LITESTREAM_SETUP_GUIDE.md
│   │   ├── lm_studio_system_prompt.md
│   │   ├── PYTORCH_DOWNLOAD_FIX.md
│   │   └── README-ORIGINAL-BACKUP.md
│   ├── investigations
│   │   └── MACOS_HOOKS_INVESTIGATION.md
│   ├── litestream-configs-v6.3.0
│   │   ├── install_service.sh
│   │   ├── litestream_master_config_fixed.yml
│   │   ├── litestream_master_config.yml
│   │   ├── litestream_replica_config_fixed.yml
│   │   ├── litestream_replica_config.yml
│   │   ├── litestream_replica_simple.yml
│   │   ├── litestream-http.service
│   │   ├── litestream.service
│   │   └── requirements-cloudflare.txt
│   ├── release-notes
│   │   └── release-notes-v7.1.4.md
│   └── setup-development
│       ├── README.md
│       ├── setup_consolidation_mdns.sh
│       ├── STARTUP_SETUP_GUIDE.md
│       └── test_service.sh
├── CHANGELOG-HISTORIC.md
├── CHANGELOG.md
├── claude_commands
│   ├── memory-context.md
│   ├── memory-health.md
│   ├── memory-ingest-dir.md
│   ├── memory-ingest.md
│   ├── memory-recall.md
│   ├── memory-search.md
│   ├── memory-store.md
│   ├── README.md
│   └── session-start.md
├── claude-hooks
│   ├── config.json
│   ├── config.template.json
│   ├── CONFIGURATION.md
│   ├── core
│   │   ├── auto-capture-hook.js
│   │   ├── auto-capture-hook.ps1
│   │   ├── memory-retrieval.js
│   │   ├── mid-conversation.js
│   │   ├── permission-request.js
│   │   ├── session-end.js
│   │   ├── session-start.js
│   │   └── topic-change.js
│   ├── debug-pattern-test.js
│   ├── install_claude_hooks_windows.ps1
│   ├── install_hooks.py
│   ├── memory-mode-controller.js
│   ├── MIGRATION.md
│   ├── README-AUTO-CAPTURE.md
│   ├── README-NATURAL-TRIGGERS.md
│   ├── README-PERMISSION-REQUEST.md
│   ├── README-phase2.md
│   ├── README.md
│   ├── simple-test.js
│   ├── statusline.sh
│   ├── test-adaptive-weights.js
│   ├── test-dual-protocol-hook.js
│   ├── test-mcp-hook.js
│   ├── test-natural-triggers.js
│   ├── test-recency-scoring.js
│   ├── tests
│   │   ├── integration-test.js
│   │   ├── phase2-integration-test.js
│   │   ├── test-code-execution.js
│   │   ├── test-cross-session.json
│   │   ├── test-permission-request.js
│   │   ├── test-session-tracking.json
│   │   └── test-threading.json
│   ├── utilities
│   │   ├── adaptive-pattern-detector.js
│   │   ├── auto-capture-patterns.js
│   │   ├── context-formatter.js
│   │   ├── context-shift-detector.js
│   │   ├── conversation-analyzer.js
│   │   ├── dynamic-context-updater.js
│   │   ├── git-analyzer.js
│   │   ├── mcp-client.js
│   │   ├── memory-client.js
│   │   ├── memory-scorer.js
│   │   ├── performance-manager.js
│   │   ├── project-detector.js
│   │   ├── session-cache.json
│   │   ├── session-tracker.js
│   │   ├── tiered-conversation-monitor.js
│   │   ├── user-override-detector.js
│   │   └── version-checker.js
│   └── WINDOWS-SESSIONSTART-BUG.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── COMMIT_MESSAGE.md
├── CONTRIBUTING.md
├── Development-Sprint-November-2025.md
├── docs
│   ├── amp-cli-bridge.md
│   ├── api
│   │   ├── code-execution-interface.md
│   │   ├── memory-metadata-api.md
│   │   ├── PHASE1_IMPLEMENTATION_SUMMARY.md
│   │   ├── PHASE2_IMPLEMENTATION_SUMMARY.md
│   │   ├── PHASE2_REPORT.md
│   │   └── tag-standardization.md
│   ├── architecture
│   │   ├── graph-database-design.md
│   │   ├── search-enhancement-spec.md
│   │   └── search-examples.md
│   ├── architecture.md
│   ├── archive
│   │   └── obsolete-workflows
│   │       ├── load_memory_context.md
│   │       └── README.md
│   ├── assets
│   │   └── images
│   │       ├── dashboard-v3.3.0-preview.png
│   │       ├── memory-awareness-hooks-example.png
│   │       ├── project-infographic.svg
│   │       └── README.md
│   ├── CLAUDE_CODE_QUICK_REFERENCE.md
│   ├── cloudflare-setup.md
│   ├── demo-recording-script.md
│   ├── deployment
│   │   ├── docker.md
│   │   ├── dual-service.md
│   │   ├── production-guide.md
│   │   └── systemd-service.md
│   ├── development
│   │   ├── ai-agent-instructions.md
│   │   ├── code-quality
│   │   │   ├── phase-2a-completion.md
│   │   │   ├── phase-2a-handle-get-prompt.md
│   │   │   ├── phase-2a-index.md
│   │   │   ├── phase-2a-install-package.md
│   │   │   └── phase-2b-session-summary.md
│   │   ├── code-quality-workflow.md
│   │   ├── dashboard-workflow.md
│   │   ├── issue-management.md
│   │   ├── pr-280-post-mortem.md
│   │   ├── pr-review-guide.md
│   │   ├── refactoring-notes.md
│   │   ├── release-checklist.md
│   │   └── todo-tracker.md
│   ├── docker-optimized-build.md
│   ├── document-ingestion.md
│   ├── DOCUMENTATION_AUDIT.md
│   ├── enhancement-roadmap-issue-14.md
│   ├── examples
│   │   ├── analysis-scripts.js
│   │   ├── maintenance-session-example.md
│   │   ├── memory-distribution-chart.jsx
│   │   ├── quality-system-configs.md
│   │   └── tag-schema.json
│   ├── features
│   │   └── association-quality-boost.md
│   ├── first-time-setup.md
│   ├── glama-deployment.md
│   ├── guides
│   │   ├── advanced-command-examples.md
│   │   ├── chromadb-migration.md
│   │   ├── commands-vs-mcp-server.md
│   │   ├── mcp-enhancements.md
│   │   ├── mdns-service-discovery.md
│   │   ├── memory-consolidation-guide.md
│   │   ├── memory-quality-guide.md
│   │   ├── migration.md
│   │   ├── scripts.md
│   │   └── STORAGE_BACKENDS.md
│   ├── HOOK_IMPROVEMENTS.md
│   ├── hooks
│   │   └── phase2-code-execution-migration.md
│   ├── http-server-management.md
│   ├── ide-compatability.md
│   ├── IMAGE_RETENTION_POLICY.md
│   ├── images
│   │   ├── dashboard-placeholder.md
│   │   └── update-restart-demo.png
│   ├── implementation
│   │   ├── health_checks.md
│   │   └── performance.md
│   ├── IMPLEMENTATION_PLAN_HTTP_SSE.md
│   ├── integration
│   │   ├── homebrew.md
│   │   └── multi-client.md
│   ├── integrations
│   │   ├── gemini.md
│   │   ├── groq-bridge.md
│   │   ├── groq-integration-summary.md
│   │   └── groq-model-comparison.md
│   ├── integrations.md
│   ├── legacy
│   │   └── dual-protocol-hooks.md
│   ├── LIGHTWEIGHT_ONNX_SETUP.md
│   ├── LM_STUDIO_COMPATIBILITY.md
│   ├── maintenance
│   │   └── memory-maintenance.md
│   ├── mastery
│   │   ├── api-reference.md
│   │   ├── architecture-overview.md
│   │   ├── configuration-guide.md
│   │   ├── local-setup-and-run.md
│   │   ├── testing-guide.md
│   │   └── troubleshooting.md
│   ├── migration
│   │   ├── code-execution-api-quick-start.md
│   │   └── graph-migration-guide.md
│   ├── natural-memory-triggers
│   │   ├── cli-reference.md
│   │   ├── installation-guide.md
│   │   └── performance-optimization.md
│   ├── oauth-setup.md
│   ├── pr-graphql-integration.md
│   ├── quality-system-ui-implementation.md
│   ├── quick-setup-cloudflare-dual-environment.md
│   ├── README.md
│   ├── refactoring
│   │   └── phase-3-3-analysis.md
│   ├── releases
│   │   └── v8.72.0-testing.md
│   ├── remote-configuration-wiki-section.md
│   ├── research
│   │   ├── code-execution-interface-implementation.md
│   │   └── code-execution-interface-summary.md
│   ├── ROADMAP.md
│   ├── sqlite-vec-backend.md
│   ├── statistics
│   │   ├── charts
│   │   │   ├── activity_patterns.png
│   │   │   ├── contributors.png
│   │   │   ├── growth_trajectory.png
│   │   │   ├── monthly_activity.png
│   │   │   └── october_sprint.png
│   │   ├── data
│   │   │   ├── activity_by_day.csv
│   │   │   ├── activity_by_hour.csv
│   │   │   ├── contributors.csv
│   │   │   └── monthly_activity.csv
│   │   ├── generate_charts.py
│   │   └── REPOSITORY_STATISTICS.md
│   ├── technical
│   │   ├── development.md
│   │   ├── memory-migration.md
│   │   ├── migration-log.md
│   │   ├── sqlite-vec-embedding-fixes.md
│   │   └── tag-storage.md
│   ├── testing
│   │   └── regression-tests.md
│   ├── testing-cloudflare-backend.md
│   ├── troubleshooting
│   │   ├── cloudflare-api-token-setup.md
│   │   ├── cloudflare-authentication.md
│   │   ├── database-transfer-migration.md
│   │   ├── general.md
│   │   ├── hooks-quick-reference.md
│   │   ├── memory-management.md
│   │   ├── pr162-schema-caching-issue.md
│   │   ├── session-end-hooks.md
│   │   └── sync-issues.md
│   ├── tutorials
│   │   ├── advanced-techniques.md
│   │   ├── data-analysis.md
│   │   └── demo-session-walkthrough.md
│   ├── wiki-documentation-plan.md
│   └── wiki-Graph-Database-Architecture.md
├── examples
│   ├── claude_desktop_config_template.json
│   ├── claude_desktop_config_windows.json
│   ├── claude-desktop-http-config.json
│   ├── config
│   │   └── claude_desktop_config.json
│   ├── http-mcp-bridge.js
│   ├── memory_export_template.json
│   ├── README.md
│   ├── setup
│   │   └── setup_multi_client_complete.py
│   └── start_https_example.sh
├── IMPLEMENTATION_SUMMARY.md
├── install_service.py
├── install.py
├── LICENSE
├── NOTICE
├── PR_DESCRIPTION.md
├── pyproject-lite.toml
├── pyproject.toml
├── pytest.ini
├── README.md
├── release-notes-v8.61.0.md
├── run_server.py
├── scripts
│   ├── .claude
│   │   └── settings.local.json
│   ├── archive
│   │   └── check_missing_timestamps.py
│   ├── backup
│   │   ├── backup_memories.py
│   │   ├── backup_sqlite_vec.sh
│   │   ├── export_distributable_memories.sh
│   │   └── restore_memories.py
│   ├── benchmarks
│   │   ├── benchmark_code_execution_api.py
│   │   ├── benchmark_hybrid_sync.py
│   │   └── benchmark_server_caching.py
│   ├── ci
│   │   ├── check_dockerfile_args.sh
│   │   └── validate_imports.sh
│   ├── database
│   │   ├── analyze_sqlite_vec_db.py
│   │   ├── check_sqlite_vec_status.py
│   │   ├── db_health_check.py
│   │   └── simple_timestamp_check.py
│   ├── development
│   │   ├── debug_server_initialization.py
│   │   ├── find_orphaned_files.py
│   │   ├── fix_mdns.sh
│   │   ├── fix_sitecustomize.py
│   │   ├── remote_ingest.sh
│   │   ├── setup-git-merge-drivers.sh
│   │   ├── uv-lock-merge.sh
│   │   └── verify_hybrid_sync.py
│   ├── hooks
│   │   └── pre-commit
│   ├── installation
│   │   ├── install_linux_service.py
│   │   ├── install_macos_service.py
│   │   ├── install_uv.py
│   │   ├── install_windows_service.py
│   │   ├── install.py
│   │   ├── setup_backup_cron.sh
│   │   ├── setup_claude_mcp.sh
│   │   └── setup_cloudflare_resources.py
│   ├── linux
│   │   ├── service_status.sh
│   │   ├── start_service.sh
│   │   ├── stop_service.sh
│   │   ├── uninstall_service.sh
│   │   └── view_logs.sh
│   ├── maintenance
│   │   ├── add_project_tags.py
│   │   ├── apply_quality_boost_retroactively.py
│   │   ├── assign_memory_types.py
│   │   ├── auto_retag_memory_merge.py
│   │   ├── auto_retag_memory.py
│   │   ├── backfill_graph_table.py
│   │   ├── check_memory_types.py
│   │   ├── cleanup_association_memories_hybrid.py
│   │   ├── cleanup_association_memories.py
│   │   ├── cleanup_corrupted_encoding.py
│   │   ├── cleanup_low_quality.py
│   │   ├── cleanup_memories.py
│   │   ├── cleanup_organize.py
│   │   ├── consolidate_memory_types.py
│   │   ├── consolidation_mappings.json
│   │   ├── delete_orphaned_vectors_fixed.py
│   │   ├── delete_test_memories.py
│   │   ├── fast_cleanup_duplicates_with_tracking.sh
│   │   ├── find_all_duplicates.py
│   │   ├── find_cloudflare_duplicates.py
│   │   ├── find_duplicates.py
│   │   ├── memory-types.md
│   │   ├── README.md
│   │   ├── recover_timestamps_from_cloudflare.py
│   │   ├── regenerate_embeddings.py
│   │   ├── repair_malformed_tags.py
│   │   ├── repair_memories.py
│   │   ├── repair_sqlite_vec_embeddings.py
│   │   ├── repair_zero_embeddings.py
│   │   ├── restore_from_json_export.py
│   │   ├── retag_valuable_memories.py
│   │   ├── scan_todos.sh
│   │   ├── soft_delete_test_memories.py
│   │   └── sync_status.py
│   ├── migration
│   │   ├── cleanup_mcp_timestamps.py
│   │   ├── legacy
│   │   │   └── migrate_chroma_to_sqlite.py
│   │   ├── mcp-migration.py
│   │   ├── migrate_sqlite_vec_embeddings.py
│   │   ├── migrate_storage.py
│   │   ├── migrate_tags.py
│   │   ├── migrate_timestamps.py
│   │   ├── migrate_to_cloudflare.py
│   │   ├── migrate_to_sqlite_vec.py
│   │   ├── migrate_v5_enhanced.py
│   │   ├── TIMESTAMP_CLEANUP_README.md
│   │   └── verify_mcp_timestamps.py
│   ├── pr
│   │   ├── amp_collect_results.sh
│   │   ├── amp_detect_breaking_changes.sh
│   │   ├── amp_generate_tests.sh
│   │   ├── amp_pr_review.sh
│   │   ├── amp_quality_gate.sh
│   │   ├── amp_suggest_fixes.sh
│   │   ├── auto_review.sh
│   │   ├── detect_breaking_changes.sh
│   │   ├── generate_tests.sh
│   │   ├── lib
│   │   │   └── graphql_helpers.sh
│   │   ├── pre_pr_check.sh
│   │   ├── quality_gate.sh
│   │   ├── resolve_threads.sh
│   │   ├── run_pyscn_analysis.sh
│   │   ├── run_quality_checks_on_files.sh
│   │   ├── run_quality_checks.sh
│   │   ├── thread_status.sh
│   │   └── watch_reviews.sh
│   ├── quality
│   │   ├── bulk_evaluate_onnx.py
│   │   ├── check_test_scores.py
│   │   ├── debug_deberta_scoring.py
│   │   ├── export_deberta_onnx.py
│   │   ├── fix_dead_code_install.sh
│   │   ├── migrate_to_deberta.py
│   │   ├── phase1_dead_code_analysis.md
│   │   ├── phase2_complexity_analysis.md
│   │   ├── README_PHASE1.md
│   │   ├── README_PHASE2.md
│   │   ├── rescore_deberta.py
│   │   ├── rescore_fallback.py
│   │   ├── reset_onnx_scores.py
│   │   ├── track_pyscn_metrics.sh
│   │   └── weekly_quality_review.sh
│   ├── README.md
│   ├── run
│   │   ├── memory_wrapper_cleanup.ps1
│   │   ├── memory_wrapper_cleanup.py
│   │   ├── memory_wrapper_cleanup.sh
│   │   ├── README_CLEANUP_WRAPPER.md
│   │   ├── run_mcp_memory.sh
│   │   ├── run-with-uv.sh
│   │   └── start_sqlite_vec.sh
│   ├── run_memory_server.py
│   ├── server
│   │   ├── check_http_server.py
│   │   ├── check_server_health.py
│   │   ├── memory_offline.py
│   │   ├── preload_models.py
│   │   ├── run_http_server.py
│   │   ├── run_memory_server.py
│   │   ├── start_http_server.bat
│   │   └── start_http_server.sh
│   ├── service
│   │   ├── deploy_dual_services.sh
│   │   ├── http_server_manager.sh
│   │   ├── install_http_service.sh
│   │   ├── mcp-memory-http.service
│   │   ├── mcp-memory.service
│   │   ├── memory_service_manager.sh
│   │   ├── service_control.sh
│   │   ├── service_utils.py
│   │   ├── update_service.sh
│   │   └── windows
│   │       ├── add_watchdog_trigger.ps1
│   │       ├── install_scheduled_task.ps1
│   │       ├── manage_service.ps1
│   │       ├── run_http_server_background.ps1
│   │       ├── uninstall_scheduled_task.ps1
│   │       └── update_and_restart.ps1
│   ├── setup-lightweight.sh
│   ├── sync
│   │   ├── check_drift.py
│   │   ├── claude_sync_commands.py
│   │   ├── export_memories.py
│   │   ├── import_memories.py
│   │   ├── litestream
│   │   │   ├── apply_local_changes.sh
│   │   │   ├── enhanced_memory_store.sh
│   │   │   ├── init_staging_db.sh
│   │   │   ├── io.litestream.replication.plist
│   │   │   ├── manual_sync.sh
│   │   │   ├── memory_sync.sh
│   │   │   ├── pull_remote_changes.sh
│   │   │   ├── push_to_remote.sh
│   │   │   ├── README.md
│   │   │   ├── resolve_conflicts.sh
│   │   │   ├── setup_local_litestream.sh
│   │   │   ├── setup_remote_litestream.sh
│   │   │   ├── staging_db_init.sql
│   │   │   ├── stash_local_changes.sh
│   │   │   ├── sync_from_remote_noconfig.sh
│   │   │   └── sync_from_remote.sh
│   │   ├── README.md
│   │   ├── safe_cloudflare_update.sh
│   │   ├── sync_memory_backends.py
│   │   └── sync_now.py
│   ├── testing
│   │   ├── run_complete_test.py
│   │   ├── run_memory_test.sh
│   │   ├── simple_test.py
│   │   ├── test_cleanup_logic.py
│   │   ├── test_cloudflare_backend.py
│   │   ├── test_docker_functionality.py
│   │   ├── test_installation.py
│   │   ├── test_mdns.py
│   │   ├── test_memory_api.py
│   │   ├── test_memory_simple.py
│   │   ├── test_migration.py
│   │   ├── test_search_api.py
│   │   ├── test_sqlite_vec_embeddings.py
│   │   ├── test_sse_events.py
│   │   ├── test-connection.py
│   │   └── test-hook.js
│   ├── update_and_restart.sh
│   ├── utils
│   │   ├── claude_commands_utils.py
│   │   ├── detect_platform.py
│   │   ├── generate_personalized_claude_md.sh
│   │   ├── groq
│   │   ├── groq_agent_bridge.py
│   │   ├── list-collections.py
│   │   ├── memory_wrapper_uv.py
│   │   ├── query_memories.py
│   │   ├── README_detect_platform.md
│   │   ├── smithery_wrapper.py
│   │   ├── test_groq_bridge.sh
│   │   └── uv_wrapper.py
│   └── validation
│       ├── check_dev_setup.py
│       ├── check_documentation_links.py
│       ├── check_handler_coverage.py
│       ├── diagnose_backend_config.py
│       ├── validate_configuration_complete.py
│       ├── validate_graph_tools.py
│       ├── validate_memories.py
│       ├── validate_migration.py
│       ├── validate_timestamp_integrity.py
│       ├── verify_environment.py
│       ├── verify_pytorch_windows.py
│       └── verify_torch.py
├── SECURITY.md
├── selective_timestamp_recovery.py
├── SPONSORS.md
├── src
│   └── mcp_memory_service
│       ├── __init__.py
│       ├── _version.py
│       ├── api
│       │   ├── __init__.py
│       │   ├── client.py
│       │   ├── operations.py
│       │   ├── sync_wrapper.py
│       │   └── types.py
│       ├── backup
│       │   ├── __init__.py
│       │   └── scheduler.py
│       ├── cli
│       │   ├── __init__.py
│       │   ├── ingestion.py
│       │   ├── main.py
│       │   └── utils.py
│       ├── config.py
│       ├── consolidation
│       │   ├── __init__.py
│       │   ├── associations.py
│       │   ├── base.py
│       │   ├── clustering.py
│       │   ├── compression.py
│       │   ├── consolidator.py
│       │   ├── decay.py
│       │   ├── forgetting.py
│       │   ├── health.py
│       │   └── scheduler.py
│       ├── dependency_check.py
│       ├── discovery
│       │   ├── __init__.py
│       │   ├── client.py
│       │   └── mdns_service.py
│       ├── embeddings
│       │   ├── __init__.py
│       │   └── onnx_embeddings.py
│       ├── ingestion
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── chunker.py
│       │   ├── csv_loader.py
│       │   ├── json_loader.py
│       │   ├── pdf_loader.py
│       │   ├── registry.py
│       │   ├── semtools_loader.py
│       │   └── text_loader.py
│       ├── lm_studio_compat.py
│       ├── mcp_server.py
│       ├── models
│       │   ├── __init__.py
│       │   └── memory.py
│       ├── quality
│       │   ├── __init__.py
│       │   ├── ai_evaluator.py
│       │   ├── async_scorer.py
│       │   ├── config.py
│       │   ├── implicit_signals.py
│       │   ├── metadata_codec.py
│       │   ├── onnx_ranker.py
│       │   └── scorer.py
│       ├── server
│       │   ├── __init__.py
│       │   ├── __main__.py
│       │   ├── cache_manager.py
│       │   ├── client_detection.py
│       │   ├── environment.py
│       │   ├── handlers
│       │   │   ├── __init__.py
│       │   │   ├── consolidation.py
│       │   │   ├── documents.py
│       │   │   ├── graph.py
│       │   │   ├── memory.py
│       │   │   ├── quality.py
│       │   │   └── utility.py
│       │   └── logging_config.py
│       ├── server_impl.py
│       ├── services
│       │   ├── __init__.py
│       │   └── memory_service.py
│       ├── storage
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── cloudflare.py
│       │   ├── factory.py
│       │   ├── graph.py
│       │   ├── http_client.py
│       │   ├── hybrid.py
│       │   ├── migrations
│       │   │   └── 008_add_graph_table.sql
│       │   └── sqlite_vec.py
│       ├── sync
│       │   ├── __init__.py
│       │   ├── exporter.py
│       │   ├── importer.py
│       │   └── litestream_config.py
│       ├── utils
│       │   ├── __init__.py
│       │   ├── cache_manager.py
│       │   ├── content_splitter.py
│       │   ├── db_utils.py
│       │   ├── debug.py
│       │   ├── directory_ingestion.py
│       │   ├── document_processing.py
│       │   ├── gpu_detection.py
│       │   ├── hashing.py
│       │   ├── health_check.py
│       │   ├── http_server_manager.py
│       │   ├── port_detection.py
│       │   ├── quality_analytics.py
│       │   ├── startup_orchestrator.py
│       │   ├── system_detection.py
│       │   └── time_parser.py
│       └── web
│           ├── __init__.py
│           ├── api
│           │   ├── __init__.py
│           │   ├── analytics.py
│           │   ├── backup.py
│           │   ├── consolidation.py
│           │   ├── documents.py
│           │   ├── events.py
│           │   ├── health.py
│           │   ├── manage.py
│           │   ├── mcp.py
│           │   ├── memories.py
│           │   ├── quality.py
│           │   ├── search.py
│           │   └── sync.py
│           ├── app.py
│           ├── dependencies.py
│           ├── oauth
│           │   ├── __init__.py
│           │   ├── authorization.py
│           │   ├── discovery.py
│           │   ├── middleware.py
│           │   ├── models.py
│           │   ├── registration.py
│           │   └── storage.py
│           ├── sse.py
│           └── static
│               ├── app.js
│               ├── i18n
│               │   ├── de.json
│               │   ├── en.json
│               │   ├── es.json
│               │   ├── fr.json
│               │   ├── ja.json
│               │   ├── ko.json
│               │   └── zh.json
│               ├── index.html
│               ├── README.md
│               ├── sse_test.html
│               └── style.css
├── start_http_debug.bat
├── start_http_server.sh
├── test_document.txt
├── test_version_checker.js
├── TESTING_NOTES.md
├── tests
│   ├── __init__.py
│   ├── api
│   │   ├── __init__.py
│   │   ├── test_compact_types.py
│   │   └── test_operations.py
│   ├── bridge
│   │   ├── mock_responses.js
│   │   ├── package-lock.json
│   │   ├── package.json
│   │   └── test_http_mcp_bridge.js
│   ├── conftest.py
│   ├── consolidation
│   │   ├── __init__.py
│   │   ├── conftest.py
│   │   ├── test_associations.py
│   │   ├── test_clustering.py
│   │   ├── test_compression.py
│   │   ├── test_consolidator.py
│   │   ├── test_decay.py
│   │   ├── test_forgetting.py
│   │   └── test_graph_modes.py
│   ├── contracts
│   │   └── api-specification.yml
│   ├── integration
│   │   ├── conftest.py
│   │   ├── HANDLER_COVERAGE_REPORT.md
│   │   ├── package-lock.json
│   │   ├── package.json
│   │   ├── test_all_memory_handlers.py
│   │   ├── test_api_key_fallback.py
│   │   ├── test_api_memories_chronological.py
│   │   ├── test_api_tag_time_search.py
│   │   ├── test_api_with_memory_service.py
│   │   ├── test_bridge_integration.js
│   │   ├── test_cli_interfaces.py
│   │   ├── test_cloudflare_connection.py
│   │   ├── test_concurrent_clients.py
│   │   ├── test_data_serialization_consistency.py
│   │   ├── test_http_server_startup.py
│   │   ├── test_mcp_memory.py
│   │   ├── test_mdns_integration.py
│   │   ├── test_oauth_basic_auth.py
│   │   ├── test_oauth_flow.py
│   │   ├── test_server_handlers.py
│   │   └── test_store_memory.py
│   ├── performance
│   │   ├── test_background_sync.py
│   │   └── test_hybrid_live.py
│   ├── README.md
│   ├── smithery
│   │   └── test_smithery.py
│   ├── sqlite
│   │   └── simple_sqlite_vec_test.py
│   ├── storage
│   │   ├── conftest.py
│   │   └── test_graph_storage.py
│   ├── test_client.py
│   ├── test_content_splitting.py
│   ├── test_database.py
│   ├── test_deberta_quality.py
│   ├── test_fallback_quality.py
│   ├── test_graph_traversal.py
│   ├── test_hybrid_cloudflare_limits.py
│   ├── test_hybrid_storage.py
│   ├── test_lightweight_onnx.py
│   ├── test_memory_ops.py
│   ├── test_memory_wrapper_cleanup.py
│   ├── test_quality_integration.py
│   ├── test_quality_system.py
│   ├── test_semantic_search.py
│   ├── test_sqlite_vec_storage.py
│   ├── test_time_parser.py
│   ├── test_timestamp_preservation.py
│   ├── timestamp
│   │   ├── test_hook_vs_manual_storage.py
│   │   ├── test_issue99_final_validation.py
│   │   ├── test_search_retrieval_inconsistency.py
│   │   ├── test_timestamp_issue.py
│   │   └── test_timestamp_simple.py
│   └── unit
│       ├── conftest.py
│       ├── test_cloudflare_storage.py
│       ├── test_csv_loader.py
│       ├── test_fastapi_dependencies.py
│       ├── test_import.py
│       ├── test_imports.py
│       ├── test_json_loader.py
│       ├── test_mdns_simple.py
│       ├── test_mdns.py
│       ├── test_memory_service.py
│       ├── test_memory.py
│       ├── test_semtools_loader.py
│       ├── test_storage_interface_compatibility.py
│       ├── test_tag_time_filtering.py
│       └── test_uv_no_pip_installer_fallback.py
├── tools
│   ├── docker
│   │   ├── DEPRECATED.md
│   │   ├── docker-compose.http.yml
│   │   ├── docker-compose.pythonpath.yml
│   │   ├── docker-compose.standalone.yml
│   │   ├── docker-compose.uv.yml
│   │   ├── docker-compose.yml
│   │   ├── docker-entrypoint-persistent.sh
│   │   ├── docker-entrypoint-unified.sh
│   │   ├── docker-entrypoint.sh
│   │   ├── Dockerfile
│   │   ├── Dockerfile.glama
│   │   ├── Dockerfile.slim
│   │   ├── README.md
│   │   └── test-docker-modes.sh
│   └── README.md
├── uv.lock
└── verify_compression.sh
```

# Files

--------------------------------------------------------------------------------
/archive/deployment/deploy_mcp_v4.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | 
 3 | # Deploy FastAPI MCP Server v4.0.0-alpha.1
 4 | echo "🚀 Deploying FastAPI MCP Server v4.0.0-alpha.1..."
 5 | 
 6 | # Stop current service
 7 | echo "⏹️  Stopping current HTTP API service..."
 8 | sudo systemctl stop mcp-memory
 9 | 
10 | # Update systemd service file
11 | echo "📝 Updating systemd service configuration..."
12 | sudo cp /tmp/mcp-memory-v4.service /etc/systemd/system/mcp-memory.service
13 | 
14 | # Reload systemd daemon
15 | echo "🔄 Reloading systemd daemon..."
16 | sudo systemctl daemon-reload
17 | 
18 | # Start the new MCP server
19 | echo "▶️  Starting FastAPI MCP server..."
20 | sudo systemctl start mcp-memory
21 | 
22 | # Check status
23 | echo "🔍 Checking service status..."
24 | sudo systemctl status mcp-memory --no-pager
25 | 
26 | echo ""
27 | echo "✅ FastAPI MCP Server v4.0.0-alpha.1 deployment complete!"
28 | echo ""
29 | echo "🌐 Service Access:"
30 | echo "   - MCP Protocol: Available on port 8000"
31 | echo "   - Health Check: curl http://localhost:8000/health"
32 | echo "   - Service Logs: sudo journalctl -u mcp-memory -f"
33 | echo ""
34 | echo "🔧 Service Management:"
35 | echo "   - Status: sudo systemctl status mcp-memory"
36 | echo "   - Stop:   sudo systemctl stop mcp-memory"
37 | echo "   - Start:  sudo systemctl start mcp-memory"
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/storage/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | from .base import MemoryStorage
16 | 
17 | # Conditional imports based on available dependencies
18 | __all__ = ['MemoryStorage']
19 | 
20 | try:
21 |     from .sqlite_vec import SqliteVecMemoryStorage
22 |     __all__.append('SqliteVecMemoryStorage')
23 | except ImportError:
24 |     SqliteVecMemoryStorage = None
25 | 
26 | try:
27 |     from .cloudflare import CloudflareStorage
28 |     __all__.append('CloudflareStorage')
29 | except ImportError:
30 |     CloudflareStorage = None
31 | 
32 | try:
33 |     from .hybrid import HybridMemoryStorage
34 |     __all__.append('HybridMemoryStorage')
35 | except ImportError:
36 |     HybridMemoryStorage = None
```

--------------------------------------------------------------------------------
/.metrics/baseline_nesting_install_hooks.txt:
--------------------------------------------------------------------------------

```
 1 | BASELINE COMPLEXITY METRICS
 2 | ====================================================================================================
 3 | 
 4 | Function: detect_claude_mcp_configuration
 5 |   Location: Lines 198-236 (39 total)
 6 |   Total Statements: 23
 7 |   Control Flow:
 8 |     - If statements: 3
 9 |     - For loops: 1
10 |     - While loops: 0
11 |     - Try/Except blocks: 4
12 |     - With statements: 0
13 |     - Total branches: 8
14 |   Boolean Operators: 4
15 |   Maximum Nesting Depth: 5
16 |   Estimated Cyclomatic Complexity: 13
17 | 
18 | Function: _parse_mcp_get_output
19 |   Location: Lines 238-268 (31 total)
20 |   Total Statements: 24
21 |   Control Flow:
22 |     - If statements: 7
23 |     - For loops: 1
24 |     - While loops: 0
25 |     - Try/Except blocks: 2
26 |     - With statements: 0
27 |     - Total branches: 10
28 |   Boolean Operators: 5
29 |   Maximum Nesting Depth: 9
30 |   Estimated Cyclomatic Complexity: 16
31 | 
32 | Function: validate_mcp_prerequisites
33 |   Location: Lines 351-385 (35 total)
34 |   Total Statements: 23
35 |   Control Flow:
36 |     - If statements: 8
37 |     - For loops: 0
38 |     - While loops: 0
39 |     - Try/Except blocks: 0
40 |     - With statements: 0
41 |     - Total branches: 8
42 |   Boolean Operators: 12
43 |   Maximum Nesting Depth: 4
44 |   Estimated Cyclomatic Complexity: 21
45 | 
46 | 
```

--------------------------------------------------------------------------------
/scripts/backup/export_distributable_memories.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | 
 3 | # Export distributable reference memories for sharing across local network
 4 | # Usage: ./export_distributable_memories.sh [output_file]
 5 | 
 6 | OUTPUT_FILE="${1:-mcp_reference_memories_$(date +%Y%m%d).json}"
 7 | MCP_ENDPOINT="https://10.0.1.30:8443/mcp"
 8 | API_KEY="test-key-123"
 9 | 
10 | echo "Exporting distributable reference memories..."
11 | echo "Output file: $OUTPUT_FILE"
12 | 
13 | curl -k -s -X POST "$MCP_ENDPOINT" \
14 |   -H "Content-Type: application/json" \
15 |   -H "Authorization: Bearer $API_KEY" \
16 |   -d '{
17 |     "jsonrpc": "2.0", 
18 |     "id": 1, 
19 |     "method": "tools/call", 
20 |     "params": {
21 |       "name": "search_by_tag", 
22 |       "arguments": {
23 |         "tags": ["distributable-reference"]
24 |       }
25 |     }
26 |   }' | jq -r '.result.content[0].text' > "$OUTPUT_FILE"
27 | 
28 | if [ $? -eq 0 ]; then
29 |     echo "✅ Export completed: $OUTPUT_FILE"
30 |     echo "📊 Memory count: $(cat "$OUTPUT_FILE" | jq '. | length' 2>/dev/null || echo "Unknown")"
31 |     echo ""
32 |     echo "To import to another MCP Memory Service:"
33 |     echo "1. Copy $OUTPUT_FILE to target machine"
34 |     echo "2. Use store_memory calls for each entry"
35 |     echo "3. Update CLAUDE.md with new memory hashes"
36 | else
37 |     echo "❌ Export failed"
38 |     exit 1
39 | fi
```

--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------

```yaml
 1 | name: Release (Manual)
 2 | 
 3 | on:
 4 |   workflow_dispatch:
 5 | 
 6 | jobs:
 7 |   release:
 8 |     runs-on: ubuntu-latest
 9 |     concurrency: release
10 |     permissions:
11 |       id-token: write
12 |       contents: write
13 |       actions: write
14 |       pull-requests: write
15 |       repository-projects: write
16 | 
17 |     steps:
18 |     - uses: actions/checkout@v3 # would probably be better to use v4
19 |       with:
20 |         fetch-depth: 0
21 |         token: ${{ secrets.GITHUB_TOKEN }}
22 | 
23 |     - name: Set up Python
24 |       uses: actions/setup-python@v4
25 |       with:
26 |         python-version: '3.9' # this setup python action uses a separate version than the python-semantic-release, thats why we had the error
27 | 
28 |     - name: Install dependencies
29 |       run: |
30 |         python -m pip install --upgrade pip
31 |         python -m pip install build hatchling python-semantic-release
32 | 
33 |     - name: Verify build module installation
34 |       run: python -m pip show build
35 | 
36 |     - name: Build package
37 |       run: python -m build
38 | 
39 |     - name: Python Semantic Release
40 |       uses: python-semantic-release/[email protected]
41 |       with:
42 |         github_token: ${{ secrets.GITHUB_TOKEN }}
43 |         verbosity: 2
44 |       env:
45 |         GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
46 | 
```

--------------------------------------------------------------------------------
/COMMIT_MESSAGE.md:
--------------------------------------------------------------------------------

```markdown
 1 | fix: Support flexible MCP memory server naming conventions
 2 | 
 3 | The hook installer was hardcoded to check for a memory server named
 4 | exactly 'memory', but Claude Code allows users to configure MCP servers
 5 | with any name they choose. This caused false "standalone" detection even
 6 | when a memory MCP server was properly configured and connected.
 7 | 
 8 | Changes:
 9 | - Check multiple common memory server names (memory-service, memory,
10 |   mcp-memory-service, extended-memory)
11 | - Fallback to 'claude mcp list' grep detection for any memory-related
12 |   server
13 | - Support HTTP MCP server format (URL field instead of Command field)
14 | - Update validation to accept http type and URL format
15 | - Maintain backward compatibility with original 'memory' name
16 | 
17 | Fixes installation failures for users who configured their memory MCP
18 | servers with descriptive names like 'memory-service' (common for HTTP
19 | servers) or 'extended-memory' (older installations).
20 | 
21 | Testing:
22 | - Verified with HTTP MCP server named 'memory-service'
23 | - Confirmed backward compatibility with 'memory' name
24 | - Tested fallback detection mechanism
25 | - All test cases documented in TESTING_NOTES.md
26 | 
27 | Co-Authored-By: Claude Sonnet 4.5 <[email protected]>
28 | 
```

--------------------------------------------------------------------------------
/scripts/installation/setup_backup_cron.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Setup automated backups for MCP Memory Service
 3 | # Creates cron jobs for regular SQLite-vec database backups
 4 | 
 5 | set -e
 6 | 
 7 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
 8 | BACKUP_SCRIPT="$SCRIPT_DIR/backup_sqlite_vec.sh"
 9 | 
10 | # Check if backup script exists
11 | if [[ ! -f "$BACKUP_SCRIPT" ]]; then
12 |     echo "Error: Backup script not found at $BACKUP_SCRIPT"
13 |     exit 1
14 | fi
15 | 
16 | # Make sure backup script is executable
17 | chmod +x "$BACKUP_SCRIPT"
18 | 
19 | # Create cron job entry
20 | CRON_ENTRY="0 2 * * * $BACKUP_SCRIPT > /tmp/mcp-backup.log 2>&1"
21 | 
22 | # Check if cron job already exists
23 | if crontab -l 2>/dev/null | grep -q "$BACKUP_SCRIPT"; then
24 |     echo "Backup cron job already exists. Current crontab:"
25 |     crontab -l | grep "$BACKUP_SCRIPT"
26 | else
27 |     # Add cron job
28 |     (crontab -l 2>/dev/null || true; echo "$CRON_ENTRY") | crontab -
29 |     echo "Added daily backup cron job:"
30 |     echo "$CRON_ENTRY"
31 | fi
32 | 
33 | echo ""
34 | echo "Backup automation setup complete!"
35 | echo "- Daily backups at 2:00 AM"
36 | echo "- Backup script: $BACKUP_SCRIPT"
37 | echo "- Log file: /tmp/mcp-backup.log"
38 | echo ""
39 | echo "To check cron jobs: crontab -l"
40 | echo "To remove cron job: crontab -l | grep -v backup_sqlite_vec.sh | crontab -"
```

--------------------------------------------------------------------------------
/scripts/sync/litestream/setup_local_litestream.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Setup script for Litestream replica on local macOS machine
 3 | 
 4 | set -e
 5 | 
 6 | echo "🔧 Setting up Litestream replica on local macOS..."
 7 | 
 8 | # Copy configuration to system location
 9 | echo "⚙️ Installing Litestream configuration..."
10 | sudo mkdir -p /usr/local/etc
11 | sudo cp litestream_replica_config.yml /usr/local/etc/litestream.yml
12 | 
13 | # Create log directory
14 | sudo mkdir -p /var/log
15 | sudo touch /var/log/litestream.log
16 | sudo chmod 644 /var/log/litestream.log
17 | 
18 | # Install LaunchDaemon
19 | echo "🚀 Installing LaunchDaemon..."
20 | sudo cp deployment/io.litestream.replication.plist /Library/LaunchDaemons/
21 | 
22 | # Set permissions
23 | sudo chown root:wheel /Library/LaunchDaemons/io.litestream.replication.plist
24 | sudo chmod 644 /Library/LaunchDaemons/io.litestream.replication.plist
25 | 
26 | echo "✅ Local Litestream setup completed!"
27 | echo ""
28 | echo "Next steps:"
29 | echo "1. Load service: sudo launchctl load /Library/LaunchDaemons/io.litestream.replication.plist"
30 | echo "2. Start service: sudo launchctl start io.litestream.replication"
31 | echo "3. Check status: litestream replicas -config /usr/local/etc/litestream.yml"
32 | echo ""
33 | echo "⚠️  Before starting the replica service, make sure the master is running on narrowbox.local"
```

--------------------------------------------------------------------------------
/docs/technical/tag-storage.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Tag Storage Procedure
 2 | 
 3 | ## File Structure Overview
 4 | ```
 5 | mcp_memory_service/
 6 | ├── tests/
 7 | │   └── test_tag_storage.py    # Integration tests
 8 | ├── scripts/
 9 | │   ├── validate_memories.py   # Validation script
10 | │   └── migrate_tags.py        # Migration script
11 | ```
12 | 
13 | ## Execution Steps
14 | 
15 | 1. **Run Initial Validation**
16 |    ```bash
17 |    python scripts/validate_memories.py
18 |    ```
19 |    - Generates validation report of current state
20 | 
21 | 2. **Run Integration Tests**
22 |    ```bash
23 |    python tests/test_tag_storage.py
24 |    ```
25 |    - Verifies functionality
26 | 
27 | 3. **Execute Migration**
28 |    ```bash
29 |    python scripts/migrate_tags.py
30 |    ```
31 |    The script will:
32 |    - Create a backup automatically
33 |    - Run validation check
34 |    - Ask for confirmation before proceeding
35 |    - Perform migration
36 |    - Verify the migration
37 | 
38 | 4. **Post-Migration Validation**
39 |    ```bash
40 |    python scripts/validate_memories.py
41 |    ```
42 |    - Confirms successful migration
43 | 
44 | ## Monitoring Requirements
45 | - Keep backup files for at least 7 days
46 | - Monitor logs for any tag-related errors
47 | - Run validation script daily for the first week
48 | - Check search functionality with various tag formats
49 | 
50 | ## Rollback Process
51 | If issues are detected, use:
52 | ```bash
53 | python scripts/migrate_tags.py --rollback
54 | ```
```

--------------------------------------------------------------------------------
/scripts/maintenance/check_memory_types.py:
--------------------------------------------------------------------------------

```python
 1 | #!/usr/bin/env python3
 2 | """Quick script to check memory types in local database."""
 3 | import sqlite3
 4 | from pathlib import Path
 5 | 
 6 | # Windows database path
 7 | db_path = Path.home() / "AppData/Local/mcp-memory/sqlite_vec.db"
 8 | 
 9 | if not db_path.exists():
10 |     print(f"❌ Database not found at: {db_path}")
11 |     exit(1)
12 | 
13 | conn = sqlite3.connect(db_path)
14 | cursor = conn.cursor()
15 | 
16 | # Get memory type distribution
17 | cursor.execute("""
18 |     SELECT memory_type, COUNT(*) as count
19 |     FROM memories
20 |     GROUP BY memory_type
21 |     ORDER BY count DESC
22 | """)
23 | 
24 | results = cursor.fetchall()
25 | total = sum(count for _, count in results)
26 | 
27 | print(f"\nMemory Type Distribution")
28 | print("=" * 60)
29 | print(f"Total memories: {total:,}")
30 | print(f"Unique types: {len(results)}\n")
31 | 
32 | print(f"{'Memory Type':<40} {'Count':>8} {'%':>6}")
33 | print("-" * 60)
34 | 
35 | for memory_type, count in results[:30]:  # Show top 30
36 |     pct = (count / total) * 100 if total > 0 else 0
37 |     type_display = memory_type if memory_type else "(empty/NULL)"
38 |     print(f"{type_display:<40} {count:>8,} {pct:>5.1f}%")
39 | 
40 | if len(results) > 30:
41 |     remaining = len(results) - 30
42 |     remaining_count = sum(count for _, count in results[30:])
43 |     print(f"\n... and {remaining} more types ({remaining_count:,} memories)")
44 | 
45 | conn.close()
46 | 
```

--------------------------------------------------------------------------------
/scripts/utils/list-collections.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | from chromadb import HttpClient
16 | 
17 | def list_collections():
18 |     try:
19 |         # Connect to local ChromaDB
20 |         client = HttpClient(host='localhost', port=8000)
21 |         
22 |         # List all collections
23 |         collections = client.list_collections()
24 |         
25 |         print("\nFound Collections:")
26 |         print("------------------")
27 |         for collection in collections:
28 |             print(f"Name: {collection.name}")
29 |             print(f"Metadata: {collection.metadata}")
30 |             print(f"Count: {collection.count()}")
31 |             print("------------------")
32 |             
33 |     except Exception as e:
34 |         print(f"Error connecting to local ChromaDB: {str(e)}")
35 | 
36 | if __name__ == "__main__":
37 |     list_collections()
38 | 
```

--------------------------------------------------------------------------------
/tests/unit/conftest.py:
--------------------------------------------------------------------------------

```python
 1 | """
 2 | Shared test fixtures and helpers for unit tests.
 3 | """
 4 | 
 5 | import tempfile
 6 | from pathlib import Path
 7 | from typing import List, Any, Optional
 8 | 
 9 | 
10 | async def extract_chunks_from_temp_file(
11 |     loader: Any,
12 |     filename: str,
13 |     content: str,
14 |     encoding: str = 'utf-8',
15 |     **extract_kwargs
16 | ) -> List[Any]:
17 |     """
18 |     Helper to extract chunks from a temporary file.
19 | 
20 |     Args:
21 |         loader: Loader instance (CSVLoader, JSONLoader, etc.)
22 |         filename: Name of the temporary file to create
23 |         content: Content to write to the file
24 |         encoding: File encoding (default: utf-8)
25 |         **extract_kwargs: Additional keyword arguments to pass to extract_chunks()
26 | 
27 |     Returns:
28 |         List of extracted chunks
29 | 
30 |     Example:
31 |         >>> loader = CSVLoader(chunk_size=1000, chunk_overlap=200)
32 |         >>> chunks = await extract_chunks_from_temp_file(
33 |         ...     loader,
34 |         ...     "test.csv",
35 |         ...     "name,age\\nJohn,25",
36 |         ...     delimiter=','
37 |         ... )
38 |     """
39 |     with tempfile.TemporaryDirectory() as tmpdir:
40 |         file_path = Path(tmpdir) / filename
41 |         file_path.write_text(content, encoding=encoding)
42 | 
43 |         chunks = []
44 |         async for chunk in loader.extract_chunks(file_path, **extract_kwargs):
45 |             chunks.append(chunk)
46 | 
47 |         return chunks
48 | 
```

--------------------------------------------------------------------------------
/test_version_checker.js:
--------------------------------------------------------------------------------

```javascript
 1 | #!/usr/bin/env node
 2 | 
 3 | /**
 4 |  * Test script for version-checker.js utility
 5 |  */
 6 | 
 7 | const { getVersionInfo, formatVersionDisplay } = require('./claude-hooks/utilities/version-checker');
 8 | 
 9 | const CONSOLE_COLORS = {
10 |     RESET: '\x1b[0m',
11 |     BRIGHT: '\x1b[1m',
12 |     DIM: '\x1b[2m',
13 |     CYAN: '\x1b[36m',
14 |     GREEN: '\x1b[32m',
15 |     YELLOW: '\x1b[33m',
16 |     GRAY: '\x1b[90m',
17 |     RED: '\x1b[31m'
18 | };
19 | 
20 | async function test() {
21 |     console.log('Testing version-checker utility...\n');
22 | 
23 |     const projectRoot = __dirname;
24 | 
25 |     // Test with PyPI check
26 |     console.log('1. Testing with PyPI check enabled:');
27 |     const versionInfo = await getVersionInfo(projectRoot, { checkPyPI: true, timeout: 3000 });
28 |     console.log('   Raw version info:', JSON.stringify(versionInfo, null, 2));
29 |     const display = formatVersionDisplay(versionInfo, CONSOLE_COLORS);
30 |     console.log('   Formatted:', display);
31 | 
32 |     console.log('\n2. Testing without PyPI check:');
33 |     const localOnly = await getVersionInfo(projectRoot, { checkPyPI: false });
34 |     console.log('   Raw version info:', JSON.stringify(localOnly, null, 2));
35 |     const localDisplay = formatVersionDisplay(localOnly, CONSOLE_COLORS);
36 |     console.log('   Formatted:', localDisplay);
37 | 
38 |     console.log('\n✅ Test completed!');
39 | }
40 | 
41 | test().catch(error => {
42 |     console.error('❌ Test failed:', error);
43 |     process.exit(1);
44 | });
45 | 
```

--------------------------------------------------------------------------------
/docs/deployment/production-guide.md:
--------------------------------------------------------------------------------

```markdown
 1 | # MCP Memory Service - Production Setup
 2 | 
 3 | ## 🚀 Quick Start
 4 | 
 5 | This MCP Memory Service is configured with **consolidation system**, **mDNS auto-discovery**, **HTTPS**, and **automatic startup**.
 6 | 
 7 | ### **Installation**
 8 | ```bash
 9 | # 1. Install the service
10 | bash install_service.sh
11 | 
12 | # 2. Update configuration (if needed)
13 | ./update_service.sh
14 | 
15 | # 3. Start the service
16 | sudo systemctl start mcp-memory
17 | ```
18 | 
19 | ### **Verification**
20 | ```bash
21 | # Check service status
22 | sudo systemctl status mcp-memory
23 | 
24 | # Test API health
25 | curl -k https://localhost:8000/api/health
26 | 
27 | # Verify mDNS discovery
28 | avahi-browse -t _mcp-memory._tcp
29 | ```
30 | 
31 | ## 📋 **Service Details**
32 | 
33 | - **Service Name**: `memory._mcp-memory._tcp.local.`
34 | - **HTTPS Address**: https://localhost:8000 
35 | - **API Key**: `mcp-0b1ccbde2197a08dcb12d41af4044be6`
36 | - **Auto-Startup**: ✅ Enabled
37 | - **Consolidation**: ✅ Active
38 | - **mDNS Discovery**: ✅ Working
39 | 
40 | ## 🛠️ **Management**
41 | 
42 | ```bash
43 | ./service_control.sh start     # Start service
44 | ./service_control.sh stop      # Stop service  
45 | ./service_control.sh status    # Show status
46 | ./service_control.sh logs      # View logs
47 | ./service_control.sh health    # Test API
48 | ```
49 | 
50 | ## 📖 **Documentation**
51 | 
52 | - **Complete Guide**: `COMPLETE_SETUP_GUIDE.md`
53 | - **Service Files**: `mcp-memory.service`, management scripts
54 | - **Archive**: `archive/setup-development/` (development files)
55 | 
56 | **✅ Ready for production use!**
```

--------------------------------------------------------------------------------
/claude-hooks/statusline.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | 
 3 | # Claude Code Status Line Script
 4 | # Displays session memory context in status line
 5 | # Format: 🧠 8 (5 recent) memories | 📊 12 commits
 6 | 
 7 | # Path to session cache file
 8 | CACHE_FILE="$HOME/.claude/hooks/utilities/session-cache.json"
 9 | 
10 | # ANSI color codes for styling
11 | CYAN='\033[36m'
12 | GREEN='\033[32m'
13 | GRAY='\033[90m'
14 | RESET='\033[0m'
15 | 
16 | # Check if cache file exists
17 | if [ ! -f "$CACHE_FILE" ]; then
18 |     # No cache file - session not started yet or hook failed
19 |     echo ""
20 |     exit 0
21 | fi
22 | 
23 | # Read cache file and extract data
24 | MEMORIES=$(jq -r '.memoriesLoaded // 0' "$CACHE_FILE" 2>/dev/null)
25 | RECENT=$(jq -r '.recentCount // 0' "$CACHE_FILE" 2>/dev/null)
26 | GIT_COMMITS=$(jq -r '.gitCommits // 0' "$CACHE_FILE" 2>/dev/null)
27 | 
28 | # Handle jq errors
29 | if [ $? -ne 0 ]; then
30 |     echo ""
31 |     exit 0
32 | fi
33 | 
34 | # Build status line output
35 | STATUS=""
36 | 
37 | # Memory section
38 | if [ "$MEMORIES" -gt 0 ]; then
39 |     if [ "$RECENT" -gt 0 ]; then
40 |         STATUS="${CYAN}🧠 ${MEMORIES}${RESET} ${GREEN}(${RECENT} recent)${RESET} memories"
41 |     else
42 |         STATUS="${CYAN}🧠 ${MEMORIES}${RESET} memories"
43 |     fi
44 | fi
45 | 
46 | # Git section
47 | if [ "$GIT_COMMITS" -gt 0 ]; then
48 |     if [ -n "$STATUS" ]; then
49 |         STATUS="${STATUS} ${GRAY}|${RESET} ${CYAN}📊 ${GIT_COMMITS} commits${RESET}"
50 |     else
51 |         STATUS="${CYAN}📊 ${GIT_COMMITS} commits${RESET}"
52 |     fi
53 | fi
54 | 
55 | # Output first line becomes status line
56 | echo -e "$STATUS"
57 | 
```

--------------------------------------------------------------------------------
/.claude/directives/development-setup.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Development Setup - Critical Guidelines
 2 | 
 3 | ## Editable Install (MANDATORY)
 4 | 
 5 | **⚠️ ALWAYS use editable install** to avoid stale package issues:
 6 | 
 7 | ```bash
 8 | # REQUIRED for development
 9 | pip install -e .  # or: uv pip install -e .
10 | 
11 | # Verify
12 | pip show mcp-memory-service | grep Location
13 | # Should show: .../mcp-memory-service/src
14 | # NOT: .../site-packages
15 | ```
16 | 
17 | **Why:** MCP servers load from `site-packages`, not source files. Without `-e`, source changes won't be reflected until reinstall.
18 | 
19 | **Common symptom**: Code shows v8.23.0 but server reports v8.5.3
20 | 
21 | ## Development Workflow
22 | 
23 | 1. Clone repo: `git clone https://github.com/doobidoo/mcp-memory-service.git`
24 | 2. Create venv: `python -m venv venv && source venv/bin/activate`
25 | 3. **Editable install**: `pip install -e .` ← CRITICAL STEP
26 | 4. Verify: `python -c "import mcp_memory_service; print(mcp_memory_service.__version__)"`
27 | 5. Start coding - changes take effect after server restart (no reinstall needed)
28 | 
29 | ## Version Mismatch Detection
30 | 
31 | ```bash
32 | # Quick check script
33 | python scripts/validation/check_dev_setup.py
34 | 
35 | # Manual verification (both should match)
36 | grep '__version__' src/mcp_memory_service/__init__.py
37 | python -c "import mcp_memory_service; print(mcp_memory_service.__version__)"
38 | ```
39 | 
40 | ## Fix Stale Installation
41 | 
42 | ```bash
43 | pip uninstall mcp-memory-service
44 | pip install -e .
45 | 
46 | # Restart MCP servers in Claude Code
47 | # Run: /mcp
48 | ```
49 | 
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/web/oauth/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | """
16 | OAuth 2.1 Dynamic Client Registration implementation for MCP Memory Service.
17 | 
18 | Provides OAuth 2.1 DCR endpoints to enable Claude Code HTTP transport integration.
19 | 
20 | This module implements:
21 | - RFC 8414: OAuth 2.0 Authorization Server Metadata
22 | - RFC 7591: OAuth 2.0 Dynamic Client Registration Protocol
23 | - OAuth 2.1 security requirements and best practices
24 | 
25 | Key features:
26 | - Dynamic client registration for automated OAuth client setup
27 | - JWT-based access tokens with proper validation
28 | - Authorization code flow with PKCE support
29 | - Client credentials flow for server-to-server authentication
30 | - Comprehensive scope-based authorization
31 | - Backward compatibility with existing API key authentication
32 | """
33 | 
34 | __all__ = [
35 |     "discovery",
36 |     "models",
37 |     "registration",
38 |     "authorization",
39 |     "middleware",
40 |     "storage"
41 | ]
```

--------------------------------------------------------------------------------
/scripts/sync/litestream/setup_remote_litestream.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Setup script for Litestream master on remote server (narrowbox.local)
 3 | 
 4 | set -e
 5 | 
 6 | echo "🔧 Setting up Litestream master on remote server..."
 7 | 
 8 | # Install Litestream
 9 | echo "📦 Installing Litestream..."
10 | curl -LsS https://github.com/benbjohnson/litestream/releases/latest/download/litestream-linux-amd64.tar.gz | tar -xzf -
11 | sudo mv litestream /usr/local/bin/
12 | sudo chmod +x /usr/local/bin/litestream
13 | 
14 | # Create directories
15 | echo "📁 Creating directories..."
16 | sudo mkdir -p /var/www/litestream/mcp-memory
17 | sudo mkdir -p /backup/litestream/mcp-memory
18 | 
19 | # Set permissions
20 | sudo chown -R www-data:www-data /var/www/litestream
21 | sudo chmod -R 755 /var/www/litestream
22 | 
23 | # Copy configuration
24 | echo "⚙️ Installing Litestream configuration..."
25 | sudo cp litestream_master_config.yml /etc/litestream.yml
26 | 
27 | # Install systemd services
28 | echo "🚀 Installing systemd services..."
29 | sudo cp litestream.service /etc/systemd/system/
30 | sudo cp litestream-http.service /etc/systemd/system/
31 | 
32 | # Reload systemd and enable services
33 | sudo systemctl daemon-reload
34 | sudo systemctl enable litestream.service
35 | sudo systemctl enable litestream-http.service
36 | 
37 | echo "✅ Remote Litestream setup completed!"
38 | echo ""
39 | echo "Next steps:"
40 | echo "1. Start services: sudo systemctl start litestream litestream-http"
41 | echo "2. Check status: sudo systemctl status litestream litestream-http"
42 | echo "3. Verify HTTP endpoint: curl http://localhost:8080/mcp-memory/"
```

--------------------------------------------------------------------------------
/tools/docker/docker-compose.yml:
--------------------------------------------------------------------------------

```yaml
 1 | version: '3.8'
 2 | 
 3 | # Docker Compose configuration for MCP protocol mode
 4 | # For use with MCP clients (Claude Desktop, VS Code extension, etc.)
 5 | # For HTTP/API mode, use docker-compose.http.yml instead
 6 | 
 7 | services:
 8 |   mcp-memory-service:
 9 |     build:
10 |       context: ../..
11 |       dockerfile: tools/docker/Dockerfile
12 |     
13 |     # Required for MCP protocol communication
14 |     stdin_open: true
15 |     tty: true
16 |     
17 |     volumes:
18 |       # Single data directory for all storage
19 |       - ./data:/app/data
20 | 
21 |       # Model cache (prevents re-downloading models on each restart)
22 |       # Uncomment the following line to persist Hugging Face models
23 |       # - ${HOME}/.cache/huggingface:/root/.cache/huggingface
24 |     
25 |     environment:
26 |       # Mode selection
27 |       - MCP_MODE=mcp
28 |       
29 |       # Storage configuration
30 |       - MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
31 |       - MCP_MEMORY_SQLITE_PATH=/app/data/sqlite_vec.db
32 |       - MCP_MEMORY_BACKUPS_PATH=/app/data/backups
33 |       
34 |       # Performance tuning
35 |       - LOG_LEVEL=${LOG_LEVEL:-INFO}
36 |       - MAX_RESULTS_PER_QUERY=10
37 |       - SIMILARITY_THRESHOLD=0.7
38 |       
39 |       # Python configuration
40 |       - PYTHONUNBUFFERED=1
41 |       - PYTHONPATH=/app/src
42 | 
43 |       # Offline mode (uncomment if models are pre-cached and network is restricted)
44 |       # - HF_HUB_OFFLINE=1
45 |       # - TRANSFORMERS_OFFLINE=1
46 |     
47 |     # Use the unified entrypoint
48 |     entrypoint: ["/usr/local/bin/docker-entrypoint-unified.sh"]
49 |     
50 |     restart: unless-stopped
```

--------------------------------------------------------------------------------
/scripts/testing/test-connection.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | from chromadb import HttpClient
16 | 
17 | def test_connection(port=8000):
18 |     try:
19 |         # Try to connect to local ChromaDB
20 |         client = HttpClient(host='localhost', port=port)
21 |         # Try a simple operation
22 |         heartbeat = client.heartbeat()
23 |         print(f"Successfully connected to ChromaDB on port {port}")
24 |         print(f"Heartbeat: {heartbeat}")
25 |         
26 |         # List collections
27 |         collections = client.list_collections()
28 |         print("\nFound collections:")
29 |         for collection in collections:
30 |             print(f"- {collection.name} (count: {collection.count()})")
31 |         
32 |     except Exception as e:
33 |         print(f"Error connecting to ChromaDB on port {port}: {str(e)}")
34 | 
35 | if __name__ == "__main__":
36 |     # Try default port
37 |     test_connection()
38 |     
39 |     # If the above fails, you might want to try other common ports:
40 |     # test_connection(8080)
41 |     # test_connection(9000)
42 | 
```

--------------------------------------------------------------------------------
/docs/ROADMAP.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Development Roadmap
 2 | 
 3 | **The official roadmap has moved to the Wiki for easier maintenance and community collaboration.**
 4 | 
 5 | 📖 **[View Development Roadmap on Wiki](https://github.com/doobidoo/mcp-memory-service/wiki/13-Development-Roadmap)**
 6 | 
 7 | The Wiki version includes:
 8 | - ✅ Completed milestones (v8.0-v8.38)
 9 | - 🎯 Current focus (v8.39-v9.0 - Q1 2026)
10 | - 🚀 Future enhancements (Q2 2026+)
11 | - 🎯 Medium term vision (Q3-Q4 2026)
12 | - 🌟 Long-term aspirations (2027+)
13 | - 📊 Success metrics and KPIs
14 | - 🤝 Community contribution opportunities
15 | 
16 | ## Why the Wiki?
17 | 
18 | The Wiki provides several advantages for roadmap documentation:
19 | - ✅ **Easier Updates**: No PR required for roadmap changes
20 | - ✅ **Better Navigation**: Integrated with other wiki guides
21 | - ✅ **Community Collaboration**: Lower barrier for community input
22 | - ✅ **Rich Formatting**: Enhanced markdown features
23 | - ✅ **Cleaner Repository**: Reduces noise in commit history
24 | 
25 | ## For Active Development Tracking
26 | 
27 | The roadmap on the Wiki tracks strategic direction. For day-to-day development:
28 | 
29 | - **[GitHub Projects](https://github.com/doobidoo/mcp-memory-service/projects)** - Sprint planning and task boards
30 | - **[Open Issues](https://github.com/doobidoo/mcp-memory-service/issues)** - Bug reports and feature requests
31 | - **[Pull Requests](https://github.com/doobidoo/mcp-memory-service/pulls)** - Active code changes
32 | - **[CHANGELOG.md](../CHANGELOG.md)** - Release history and completed features
33 | 
34 | ---
35 | 
36 | **Maintainer**: @doobidoo
37 | **Last Updated**: November 26, 2025
38 | 
```

--------------------------------------------------------------------------------
/scripts/ci/check_dockerfile_args.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Check for unused ARG declarations in Dockerfiles
 3 | # Prevents issues like #313 where unused ARGs caused confusion
 4 | 
 5 | set -e
 6 | 
 7 | DOCKERFILES=(
 8 |     "tools/docker/Dockerfile"
 9 |     "tools/docker/Dockerfile.slim"
10 | )
11 | 
12 | EXIT_CODE=0
13 | 
14 | echo "🔍 Checking for unused Docker ARGs..."
15 | echo ""
16 | 
17 | for dockerfile in "${DOCKERFILES[@]}"; do
18 |     if [[ ! -f "$dockerfile" ]]; then
19 |         echo "⚠️  Skipping $dockerfile (not found)"
20 |         continue
21 |     fi
22 | 
23 |     echo "📄 Checking $dockerfile"
24 | 
25 |     # Extract ARG names (excluding built-in TARGETPLATFORM, BUILDPLATFORM, etc.)
26 |     args=$(grep -oP '(?<=^ARG )\w+' "$dockerfile" 2>/dev/null || true)
27 | 
28 |     for arg in $args; do
29 |         # Skip built-in Docker ARGs that are auto-populated
30 |         case "$arg" in
31 |             TARGETPLATFORM|BUILDPLATFORM|TARGETOS|TARGETARCH|TARGETVARIANT)
32 |                 continue
33 |                 ;;
34 |         esac
35 | 
36 |         # Check if ARG is used anywhere (as $ARG or ${ARG} or ${ARG:-default})
37 |         if ! grep -qE "(\\\$$arg|\\$\\{$arg[}:])" "$dockerfile"; then
38 |             echo "   ❌ Unused ARG: $arg"
39 |             EXIT_CODE=1
40 |         else
41 |             echo "   ✅ Used ARG: $arg"
42 |         fi
43 |     done
44 |     echo ""
45 | done
46 | 
47 | if [[ $EXIT_CODE -eq 0 ]]; then
48 |     echo "✅ All Docker ARGs are used correctly"
49 | else
50 |     echo "❌ Found unused Docker ARGs - please remove them or use them"
51 |     echo ""
52 |     echo "Note: Unused ARGs can cause confusion and build issues."
53 |     echo "See Issue #313 for an example where unused PLATFORM arg"
54 |     echo "caused Apple Silicon builds to fail."
55 | fi
56 | 
57 | exit $EXIT_CODE
58 | 
```

--------------------------------------------------------------------------------
/scripts/installation/setup_claude_mcp.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Setup script for Claude Code MCP configuration
 3 | 
 4 | echo "🔧 Setting up MCP Memory Service for Claude Code..."
 5 | echo "=================================================="
 6 | 
 7 | # Get the absolute path to the repository
 8 | REPO_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
 9 | VENV_PYTHON="$REPO_PATH/venv/bin/python"
10 | 
11 | echo "Repository path: $REPO_PATH"
12 | echo "Python path: $VENV_PYTHON"
13 | 
14 | # Check if virtual environment exists
15 | if [ ! -f "$VENV_PYTHON" ]; then
16 |     echo "❌ Virtual environment not found at: $VENV_PYTHON"
17 |     echo "Please run: python -m venv venv && source venv/bin/activate && pip install -r requirements.txt"
18 |     exit 1
19 | fi
20 | 
21 | # Create MCP configuration
22 | cat > "$REPO_PATH/mcp_server_config.json" << EOF
23 | {
24 |   "mcpServers": {
25 |     "memory": {
26 |       "command": "$VENV_PYTHON",
27 |       "args": ["-m", "src.mcp_memory_service.server"],
28 |       "cwd": "$REPO_PATH",
29 |       "env": {
30 |         "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
31 |         "PYTHONPATH": "$REPO_PATH/src"
32 |       }
33 |     }
34 |   }
35 | }
36 | EOF
37 | 
38 | echo "✅ Created MCP configuration: $REPO_PATH/mcp_server_config.json"
39 | echo ""
40 | echo "📋 Manual Configuration Steps:"
41 | echo "1. Copy the configuration below"
42 | echo "2. Add it to your Claude Code MCP settings"
43 | echo ""
44 | echo "Configuration to add:"
45 | echo "====================="
46 | cat "$REPO_PATH/mcp_server_config.json"
47 | echo ""
48 | echo "🚀 Alternative: Start server manually and use Claude Code normally"
49 | echo "   cd $REPO_PATH"
50 | echo "   source venv/bin/activate"
51 | echo "   export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec"
52 | echo "   python -m src.mcp_memory_service.server"
```

--------------------------------------------------------------------------------
/.claude/directives/version-management.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Version Management - Release Workflow
 2 | 
 3 | ## ⚠️ CRITICAL: Always Use github-release-manager Agent
 4 | 
 5 | **NEVER do manual releases** (major, minor, patch, or hotfixes). Manual workflows miss steps and are error-prone.
 6 | 
 7 | ## Four-File Version Bump Procedure
 8 | 
 9 | 1. Update `src/mcp_memory_service/__init__.py` (line 50: `__version__ = "X.Y.Z"`)
10 | 2. Update `pyproject.toml` (line 7: `version = "X.Y.Z"`)
11 | 3. Update `README.md` (line 19: Latest Release section)
12 | 4. Run `uv lock` to update dependency lock file
13 | 5. Commit all four files together
14 | 
15 | ## Release Workflow
16 | 
17 | ```bash
18 | # ALWAYS use the agent
19 | @agent github-release-manager "Check if we need a release"
20 | @agent github-release-manager "Create release for v8.20.0"
21 | ```
22 | 
23 | **Agent ensures:**
24 | - README.md updates
25 | - GitHub Release creation
26 | - Proper issue tracking
27 | - CHANGELOG.md formatting
28 | - Workflow verification (Docker Publish, HTTP-MCP Bridge)
29 | 
30 | ## Hotfix Workflow (Critical Bugs)
31 | 
32 | - **Speed target**: 8-10 minutes from bug report to release (achievable with AI assistance)
33 | - **Process**: Fix → Test → Four-file bump → Commit → github-release-manager agent
34 | - **Issue management**: Post detailed root cause analysis, don't close until user confirms fix works
35 | - **Example**: v8.20.1 (8 minutes: bug report → fix → release → user notification)
36 | 
37 | ## Why Agent-First?
38 | 
39 | **Manual v8.20.1** (❌):
40 | - Forgot README.md update
41 | - Incomplete GitHub Release
42 | - Missed workflow verification
43 | 
44 | **With agent v8.20.1** (✅):
45 | - All files updated
46 | - Proper release created
47 | - Complete documentation
48 | 
49 | **Lesson**: Always use agents, even for "simple" hotfixes
50 | 
```

--------------------------------------------------------------------------------
/scripts/quality/rescore_deberta.py:
--------------------------------------------------------------------------------

```python
 1 | #!/usr/bin/env python3
 2 | """Re-score all DeBERTa memories with corrected model."""
 3 | import asyncio
 4 | import sys
 5 | from pathlib import Path
 6 | 
 7 | sys.path.insert(0, str(Path(__file__).parent.parent.parent))
 8 | 
 9 | # Use SQLite directly to avoid Cloudflare network timeouts
10 | from mcp_memory_service.storage.sqlite_vec import SqliteVecMemoryStorage
11 | from mcp_memory_service.config import SQLITE_VEC_PATH
12 | from mcp_memory_service.quality.onnx_ranker import get_onnx_ranker_model
13 | 
14 | async def rescore():
15 |     print("Loading DeBERTa...")
16 |     deberta = get_onnx_ranker_model('nvidia-quality-classifier-deberta', 'auto')
17 | 
18 |     print("Connecting to storage (SQLite-vec only, no network)...")
19 |     storage = SqliteVecMemoryStorage(SQLITE_VEC_PATH)
20 |     await storage.initialize()
21 | 
22 |     print("Fetching memories...")
23 |     all_memories = await storage.get_all_memories()
24 | 
25 |     to_rescore = [m for m in all_memories
26 |                   if m.metadata and m.metadata.get('quality_provider') == 'onnx_deberta']
27 | 
28 |     print(f"Re-scoring {len(to_rescore)} memories...")
29 | 
30 |     for i, m in enumerate(to_rescore, 1):
31 |         new_score = deberta.score_quality("", m.content)
32 |         await storage.update_memory_metadata(
33 |             content_hash=m.content_hash,
34 |             updates={'quality_score': new_score}
35 |         )
36 |         if i % 100 == 0:
37 |             print(f"  [{i:5d}/{len(to_rescore)}] Score: {new_score:.3f}")
38 | 
39 |     print(f"\n✓ Re-scored {len(to_rescore)} memories")
40 |     print("Note: Changes saved to SQLite. Hybrid backend will sync to Cloudflare automatically.")
41 | 
42 | if __name__ == "__main__":
43 |     asyncio.run(rescore())
44 | 
```

--------------------------------------------------------------------------------
/scripts/run_memory_server.py:
--------------------------------------------------------------------------------

```python
 1 | #!/usr/bin/env python3
 2 | """
 3 | Backward compatibility redirect to new location (v6.17.0+).
 4 | 
 5 | This stub ensures existing Claude Desktop configurations continue working
 6 | after the v6.17.0 script reorganization. The actual script has moved to
 7 | scripts/server/run_memory_server.py.
 8 | 
 9 | For best stability, consider using one of these approaches instead:
10 | 1. python -m mcp_memory_service.server (recommended)
11 | 2. uv run memory server
12 | 3. scripts/server/run_memory_server.py (direct path)
13 | """
14 | import sys
15 | import os
16 | 
17 | # Add informational notice (not a warning to avoid alarming users)
18 | print("[INFO] Note: scripts/run_memory_server.py has moved to scripts/server/run_memory_server.py", file=sys.stderr)
19 | print("[INFO] Consider using 'python -m mcp_memory_service.server' for better stability", file=sys.stderr)
20 | print("[INFO] See https://github.com/doobidoo/mcp-memory-service for migration guide", file=sys.stderr)
21 | 
22 | # Execute the relocated script
23 | script_dir = os.path.dirname(os.path.abspath(__file__))
24 | new_script = os.path.join(script_dir, "server", "run_memory_server.py")
25 | 
26 | if os.path.exists(new_script):
27 |     # Preserve the original __file__ context for the new script
28 |     global_vars = {
29 |         '__file__': new_script,
30 |         '__name__': '__main__',
31 |         'sys': sys,
32 |         'os': os
33 |     }
34 | 
35 |     with open(new_script, 'r', encoding='utf-8') as f:
36 |         exec(compile(f.read(), new_script, 'exec'), global_vars)
37 | else:
38 |     print(f"[ERROR] Could not find {new_script}", file=sys.stderr)
39 |     print("[ERROR] Please ensure you have the complete mcp-memory-service repository", file=sys.stderr)
40 |     sys.exit(1)
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/ingestion/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | """
16 | Document Ingestion Module
17 | 
18 | Provides functionality to side-load documents into the memory database,
19 | supporting multiple formats including PDF, text, and structured data.
20 | 
21 | This module enables users to pre-populate the vector database with
22 | documentation, knowledge bases, and other content for semantic retrieval.
23 | """
24 | 
25 | from .base import DocumentLoader, DocumentChunk, IngestionResult
26 | from .chunker import TextChunker
27 | from .registry import get_loader_for_file, register_loader, SUPPORTED_FORMATS, is_supported_file
28 | 
29 | # Import loaders to trigger registration
30 | # Order matters! Import SemtoolsLoader first, then specialized loaders
31 | # This allows specialized loaders to override if semtools is unavailable
32 | from . import text_loader
33 | from . import semtools_loader
34 | from . import pdf_loader
35 | from . import json_loader
36 | from . import csv_loader
37 | 
38 | __all__ = [
39 |     'DocumentLoader',
40 |     'DocumentChunk', 
41 |     'IngestionResult',
42 |     'TextChunker',
43 |     'get_loader_for_file',
44 |     'register_loader',
45 |     'SUPPORTED_FORMATS',
46 |     'is_supported_file'
47 | ]
```

--------------------------------------------------------------------------------
/scripts/run/start_sqlite_vec.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Quick start script for MCP Memory Service with SQLite-vec backend
 3 | 
 4 | echo "🚀 Starting MCP Memory Service with SQLite-vec backend..."
 5 | echo "=================================================="
 6 | 
 7 | # Check if virtual environment exists
 8 | if [ ! -d "venv" ]; then
 9 |     echo "❌ Virtual environment not found. Please run setup first."
10 |     exit 1
11 | fi
12 | 
13 | # Activate virtual environment
14 | echo "📦 Activating virtual environment..."
15 | source venv/bin/activate
16 | 
17 | # Set SQLite-vec backend
18 | echo "🔧 Configuring SQLite-vec backend..."
19 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
20 | 
21 | # Display configuration
22 | echo "✅ Configuration:"
23 | echo "   Backend: $MCP_MEMORY_STORAGE_BACKEND"
24 | echo "   Database: ~/.local/share/mcp-memory/sqlite_vec.db"
25 | echo "   Python: $(which python)"
26 | 
27 | # Check key dependencies
28 | echo ""
29 | echo "🧪 Checking dependencies..."
30 | python -c "
31 | import sqlite_vec
32 | import sentence_transformers
33 | import mcp
34 | print('   ✅ sqlite-vec available')
35 | print('   ✅ sentence-transformers available') 
36 | print('   ✅ mcp available')
37 | "
38 | 
39 | echo ""
40 | echo "🎯 Ready! The MCP Memory Service is configured for sqlite-vec."
41 | echo ""
42 | echo "To start the server:"
43 | echo "   python -m src.mcp_memory_service.server"
44 | echo ""
45 | echo "🧪 Testing server startup..."
46 | timeout 3 python -m src.mcp_memory_service.server 2>/dev/null || echo "✅ Server can start successfully!"
47 | echo ""
48 | echo "For Claude Code integration:"
49 | echo "   - The service will automatically use sqlite-vec"
50 | echo "   - Memory database: ~/.local/share/mcp-memory/sqlite_vec.db" 
51 | echo "   - 75% less memory usage vs ChromaDB"
52 | echo ""
53 | echo "To test the setup:"
54 | echo "   python simple_sqlite_vec_test.py"
```

--------------------------------------------------------------------------------
/claude-hooks/debug-pattern-test.js:
--------------------------------------------------------------------------------

```javascript
 1 | #!/usr/bin/env node
 2 | 
 3 | /**
 4 |  * Debug Pattern Detection
 5 |  */
 6 | 
 7 | const { AdaptivePatternDetector } = require('./utilities/adaptive-pattern-detector');
 8 | 
 9 | async function debugPatternDetection() {
10 |     console.log('🔍 Debugging Pattern Detection');
11 |     console.log('═'.repeat(50));
12 | 
13 |     const detector = new AdaptivePatternDetector({ sensitivity: 0.7 });
14 | 
15 |     const testMessage = "What did we decide about the authentication approach?";
16 |     console.log(`\nTesting message: "${testMessage}"`);
17 | 
18 |     const result = await detector.detectPatterns(testMessage);
19 | 
20 |     console.log('\nResults:');
21 |     console.log('- Matches found:', result.matches.length);
22 |     console.log('- Confidence:', result.confidence);
23 |     console.log('- Processing tier:', result.processingTier);
24 |     console.log('- Trigger recommendation:', result.triggerRecommendation);
25 | 
26 |     if (result.matches.length > 0) {
27 |         console.log('\nMatches:');
28 |         result.matches.forEach((match, i) => {
29 |             console.log(`  ${i + 1}. Category: ${match.category}`);
30 |             console.log(`     Pattern: ${match.pattern}`);
31 |             console.log(`     Confidence: ${match.confidence}`);
32 |             console.log(`     Type: ${match.type}`);
33 |         });
34 |     }
35 | 
36 |     // Test the instant patterns directly
37 |     console.log('\n🔍 Testing Instant Patterns Directly');
38 |     const instantMatches = detector.detectInstantPatterns(testMessage);
39 |     console.log('Instant matches:', instantMatches.length);
40 |     instantMatches.forEach((match, i) => {
41 |         console.log(`  ${i + 1}. ${match.category}: ${match.confidence}`);
42 |     });
43 | }
44 | 
45 | debugPatternDetection().catch(console.error);
```

--------------------------------------------------------------------------------
/docs/development/todo-tracker.md:
--------------------------------------------------------------------------------

```markdown
 1 | # TODO Tracker
 2 | 
 3 | **Last Updated:** 2025-11-08 10:25:25
 4 | **Scan Directory:** src
 5 | **Total TODOs:** 5
 6 | 
 7 | ## Summary
 8 | 
 9 | | Priority | Count | Description |
10 | |----------|-------|-------------|
11 | | CRITICAL (P0) | 1 | Security, data corruption, blocking bugs |
12 | | HIGH (P1) | 2 | Performance, user-facing, incomplete features |
13 | | MEDIUM (P2) | 2 | Code quality, optimizations, technical debt |
14 | | LOW (P3) | 0
15 | 0 | Documentation, cosmetic, nice-to-haves |
16 | 
17 | ---
18 | 
19 | ## CRITICAL (P0)
20 | - `src/mcp_memory_service/web/api/analytics.py:625` - Period filtering is not implemented, leading to incorrect analytics data.
21 | 
22 | ## HIGH (P1)
23 | - `src/mcp_memory_service/storage/cloudflare.py:185` - Lack of a fallback for embedding generation makes the service vulnerable to external API failures.
24 | - `src/mcp_memory_service/web/api/manage.py:231` - Inefficient queries can cause significant performance bottlenecks, especially with large datasets.
25 | 
26 | ## MEDIUM (P2)
27 | - `src/mcp_memory_service/web/api/documents.py:592` - Using a deprecated FastAPI event handler; should be migrated to the modern `lifespan` context manager to reduce technical debt.
28 | - `src/mcp_memory_service/web/api/analytics.py:213` - The `storage.get_stats()` method is missing a data point, leading to API inconsistency.
29 | 
30 | ## LOW (P3)
31 | *(None in this list)*
32 | 
33 | ---
34 | 
35 | ## How to Address
36 | 
37 | 1. **CRITICAL**: Address immediately, block releases if necessary
38 | 2. **HIGH**: Schedule for current/next sprint
39 | 3. **MEDIUM**: Add to backlog, address in refactoring sprints
40 | 4. **LOW**: Address opportunistically or when touching related code
41 | 
42 | ## Updating This Tracker
43 | 
44 | Run: `bash scripts/maintenance/scan_todos.sh`
45 | 
```

--------------------------------------------------------------------------------
/scripts/backup/backup_sqlite_vec.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # SQLite-vec Database Backup Script
 3 | # Creates timestamped backups of the SQLite-vec database
 4 | 
 5 | set -e
 6 | 
 7 | # Configuration
 8 | MEMORY_DIR="${MCP_MEMORY_BASE_DIR:-$HOME/.local/share/mcp-memory}"
 9 | BACKUP_DIR="$MEMORY_DIR/backups"
10 | DATABASE_FILE="$MEMORY_DIR/sqlite_vec.db"
11 | TIMESTAMP=$(date +%Y%m%d_%H%M%S)
12 | BACKUP_NAME="sqlite_backup_$TIMESTAMP"
13 | BACKUP_PATH="$BACKUP_DIR/$BACKUP_NAME"
14 | 
15 | # Check if database exists
16 | if [[ ! -f "$DATABASE_FILE" ]]; then
17 |     echo "Error: SQLite database not found at $DATABASE_FILE"
18 |     exit 1
19 | fi
20 | 
21 | # Create backup directory
22 | mkdir -p "$BACKUP_PATH"
23 | 
24 | # Copy database files (main, WAL, and SHM files)
25 | echo "Creating backup: $BACKUP_NAME"
26 | cp "$DATABASE_FILE" "$BACKUP_PATH/" 2>/dev/null || true
27 | cp "${DATABASE_FILE}-wal" "$BACKUP_PATH/" 2>/dev/null || true
28 | cp "${DATABASE_FILE}-shm" "$BACKUP_PATH/" 2>/dev/null || true
29 | 
30 | # Get backup size
31 | BACKUP_SIZE=$(du -sh "$BACKUP_PATH" | cut -f1)
32 | 
33 | # Count files backed up
34 | FILE_COUNT=$(find "$BACKUP_PATH" -type f | wc -l)
35 | 
36 | # Create backup metadata
37 | cat > "$BACKUP_PATH/backup_info.json" << EOF
38 | {
39 |   "backup_name": "$BACKUP_NAME",
40 |   "timestamp": "$TIMESTAMP",
41 |   "source_database": "$DATABASE_FILE",
42 |   "backup_path": "$BACKUP_PATH",
43 |   "backup_size": "$BACKUP_SIZE",
44 |   "files_count": $FILE_COUNT,
45 |   "backend": "sqlite_vec",
46 |   "created_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
47 | }
48 | EOF
49 | 
50 | echo "Backup completed successfully:"
51 | echo "  Name: $BACKUP_NAME"
52 | echo "  Path: $BACKUP_PATH"
53 | echo "  Size: $BACKUP_SIZE"
54 | echo "  Files: $FILE_COUNT"
55 | 
56 | # Cleanup old backups (keep last 7 days)
57 | find "$BACKUP_DIR" -name "sqlite_backup_*" -type d -mtime +7 -exec rm -rf {} \; 2>/dev/null || true
58 | 
59 | exit 0
```

--------------------------------------------------------------------------------
/.metrics/baseline_cc_install_hooks.txt:
--------------------------------------------------------------------------------

```
 1 | claude-hooks/install_hooks.py
 2 |     F 1225:0 main - F (42)
 3 |     M 857:4 HookInstaller.configure_claude_settings - D (30)
 4 |     M 1038:4 HookInstaller.run_tests - C (17)
 5 |     M 595:4 HookInstaller.install_basic_hooks - C (15)
 6 |     M 1124:4 HookInstaller._cleanup_empty_directories - C (14)
 7 |     M 238:4 HookInstaller._parse_mcp_get_output - C (12)
 8 |     M 351:4 HookInstaller.validate_mcp_prerequisites - C (12)
 9 |     M 783:4 HookInstaller.install_configuration - C (11)
10 |     M 198:4 HookInstaller.detect_claude_mcp_configuration - B (9)
11 |     M 721:4 HookInstaller.install_natural_triggers - B (9)
12 |     C 77:0 HookInstaller - B (7)
13 |     M 151:4 HookInstaller.check_prerequisites - B (7)
14 |     F 44:0 get_project_version - B (6)
15 |     M 284:4 HookInstaller._detect_python_path - B (6)
16 |     M 1169:4 HookInstaller.uninstall - B (6)
17 |     M 679:4 HookInstaller.install_auto_capture - A (5)
18 |     M 90:4 HookInstaller._detect_claude_hooks_directory - A (4)
19 |     M 574:4 HookInstaller.create_backup - A (4)
20 |     M 270:4 HookInstaller.detect_environment_type - A (3)
21 |     M 387:4 HookInstaller.generate_hooks_config_from_mcp - A (2)
22 |     C 67:0 Colors - A (1)
23 |     M 84:4 HookInstaller.__init__ - A (1)
24 |     M 129:4 HookInstaller.info - A (1)
25 |     M 133:4 HookInstaller.warn - A (1)
26 |     M 137:4 HookInstaller.error - A (1)
27 |     M 141:4 HookInstaller.success - A (1)
28 |     M 145:4 HookInstaller.header - A (1)
29 |     M 314:4 HookInstaller.configure_protocol_for_environment - A (1)
30 |     M 457:4 HookInstaller.generate_basic_config - A (1)
31 |     M 522:4 HookInstaller.enhance_config_for_natural_triggers - A (1)
32 | 
33 | 30 blocks (classes, functions, methods) analyzed.
34 | Average complexity: B (7.7)
35 | 
```

--------------------------------------------------------------------------------
/docs/legacy/dual-protocol-hooks.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Dual Protocol Memory Hooks (Legacy)
 2 | 
 3 | > **Note**: This feature has been superseded by Natural Memory Triggers v7.1.3+. This documentation is kept for reference only.
 4 | 
 5 | **Dual Protocol Memory Hooks** (v7.0.0+) provide intelligent memory awareness with automatic protocol detection:
 6 | 
 7 | ## Configuration
 8 | 
 9 | ```json
10 | {
11 |   "memoryService": {
12 |     "protocol": "auto",
13 |     "preferredProtocol": "mcp",
14 |     "fallbackEnabled": true,
15 |     "http": {
16 |       "endpoint": "https://localhost:8443",
17 |       "apiKey": "your-api-key",
18 |       "healthCheckTimeout": 3000,
19 |       "useDetailedHealthCheck": true
20 |     },
21 |     "mcp": {
22 |       "serverCommand": ["uv", "run", "memory", "server", "-s", "cloudflare"],
23 |       "serverWorkingDir": "/Users/yourname/path/to/mcp-memory-service",
24 |       "connectionTimeout": 5000,
25 |       "toolCallTimeout": 10000
26 |     }
27 |   }
28 | }
29 | ```
30 | 
31 | ## Protocol Options
32 | 
33 | - `"auto"`: Smart detection (MCP → HTTP → Environment fallback)
34 | - `"http"`: HTTP-only mode (web server at localhost:8443)
35 | - `"mcp"`: MCP-only mode (direct server process)
36 | 
37 | ## Benefits
38 | 
39 | - **Reliability**: Multiple connection methods ensure hooks always work
40 | - **Performance**: MCP direct for speed, HTTP for stability
41 | - **Flexibility**: Works with local development or remote deployments
42 | - **Compatibility**: Full backward compatibility with existing configurations
43 | 
44 | ## Migration to Natural Memory Triggers
45 | 
46 | If you're using Dual Protocol Hooks, consider migrating to Natural Memory Triggers v7.1.3+ which offers:
47 | - 85%+ trigger accuracy
48 | - Multi-tier performance optimization
49 | - CLI management system
50 | - Git-aware context integration
51 | - Adaptive learning
52 | 
53 | See main CLAUDE.md for migration instructions.
54 | 
```

--------------------------------------------------------------------------------
/.claude/directives/pr-workflow.md:
--------------------------------------------------------------------------------

```markdown
 1 | # PR Workflow - Mandatory Quality Checks
 2 | 
 3 | ## 🚦 Before Creating PR (CRITICAL)
 4 | 
 5 | **⚠️ MANDATORY**: Run quality checks BEFORE creating PR to prevent multi-iteration review cycles.
 6 | 
 7 | ### Recommended Workflow
 8 | 
 9 | ```bash
10 | # Step 1: Stage your changes
11 | git add .
12 | 
13 | # Step 2: Run comprehensive pre-PR check (MANDATORY)
14 | bash scripts/pr/pre_pr_check.sh
15 | 
16 | # Step 3: Only create PR if all checks pass
17 | gh pr create --fill
18 | 
19 | # Step 4: Request Gemini review
20 | gh pr comment <PR_NUMBER> --body "/gemini review"
21 | ```
22 | 
23 | ### What pre_pr_check.sh Does
24 | 
25 | 1. ✅ Runs `quality_gate.sh --staged --with-pyscn` (complexity ≤8, security scan, PEP 8)
26 | 2. ✅ Runs full test suite (`pytest tests/`)
27 | 3. ✅ Checks import ordering (PEP 8 compliance)
28 | 4. ✅ Detects debug code (print statements, breakpoints)
29 | 5. ✅ Validates docstring coverage
30 | 6. ✅ Reminds to use code-quality-guard agent
31 | 
32 | ### Manual Option (if script unavailable)
33 | 
34 | ```bash
35 | # Run quality gate
36 | bash scripts/pr/quality_gate.sh --staged --with-pyscn
37 | 
38 | # Run tests
39 | pytest tests/
40 | 
41 | # Use code-quality-guard agent
42 | @agent code-quality-guard "Analyze complexity and security for staged files"
43 | ```
44 | 
45 | ### Why This Matters
46 | 
47 | - **PR #280 lesson**: 7 review iterations, 20 issues found across 7 cycles
48 | - **Root cause**: Quality checks NOT run before PR creation
49 | - **Prevention**: Mandatory pre-PR script catches issues early
50 | - **Time saved**: ~30-60 min per PR vs multi-day review cycles
51 | 
52 | ### PR Template Checklist
53 | 
54 | See `.github/PULL_REQUEST_TEMPLATE.md` for complete checklist including:
55 | - [ ] Quality gate passed (complexity ≤8, no security issues)
56 | - [ ] All tests passing locally
57 | - [ ] Code-quality-guard agent used
58 | - [ ] Self-reviewed on GitHub diff
59 | 
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/server/__main__.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | """
16 | Entry point for running the server package as a module.
17 | 
18 | Allows running the server with:
19 |     python -m mcp_memory_service.server [args]
20 | 
21 | This is required for backward compatibility with CI/CD workflows
22 | and Docker containers that use `python -m` invocation.
23 | """
24 | 
25 | import sys
26 | import argparse
27 | from . import main
28 | from .._version import __version__
29 | 
30 | 
31 | def run_with_args():
32 |     """Handle command-line arguments before starting server."""
33 |     # Simple argument parsing for --version and --help
34 |     parser = argparse.ArgumentParser(
35 |         prog='python -m mcp_memory_service.server',
36 |         description='MCP Memory Service - Model Context Protocol Server',
37 |         add_help=True
38 |     )
39 |     parser.add_argument(
40 |         '--version',
41 |         action='version',
42 |         version=f'%(prog)s {__version__}'
43 |     )
44 | 
45 |     # Parse known args to allow --version/--help while passing through other args
46 |     args, unknown = parser.parse_known_args()
47 | 
48 |     # If we get here, no --version or --help was provided
49 |     # Start the server normally
50 |     main()
51 | 
52 | 
53 | if __name__ == '__main__':
54 |     run_with_args()
55 | 
```

--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint-persistent.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Docker entrypoint script for MCP Memory Service - Persistent mode
 3 | # This script keeps the container running even when there's no active MCP client
 4 | 
 5 | set -e
 6 | 
 7 | echo "[INFO] Starting MCP Memory Service in Docker container (persistent mode)"
 8 | 
 9 | # Function to handle signals
10 | handle_signal() {
11 |     echo "[INFO] Received signal, shutting down..."
12 |     if [ -n "$SERVER_PID" ]; then
13 |         kill -TERM $SERVER_PID 2>/dev/null || true
14 |     fi
15 |     exit 0
16 | }
17 | 
18 | # Set up signal handlers
19 | trap handle_signal SIGTERM SIGINT
20 | 
21 | # Create named pipes for stdio communication
22 | FIFO_DIR="/tmp/mcp-memory-fifo"
23 | mkdir -p "$FIFO_DIR"
24 | STDIN_FIFO="$FIFO_DIR/stdin"
25 | STDOUT_FIFO="$FIFO_DIR/stdout"
26 | 
27 | # Remove old pipes if they exist
28 | rm -f "$STDIN_FIFO" "$STDOUT_FIFO"
29 | 
30 | # Create new named pipes
31 | mkfifo "$STDIN_FIFO"
32 | mkfifo "$STDOUT_FIFO"
33 | 
34 | echo "[INFO] Created named pipes for stdio communication"
35 | 
36 | # Start the server in the background with the named pipes
37 | if [ "${UV_ACTIVE}" = "1" ]; then
38 |     echo "[INFO] Running with UV wrapper (persistent mode)"
39 |     python -u uv_wrapper.py < "$STDIN_FIFO" > "$STDOUT_FIFO" 2>&1 &
40 | else
41 |     echo "[INFO] Running directly with Python (persistent mode)"
42 |     python -u -m mcp_memory_service.server < "$STDIN_FIFO" > "$STDOUT_FIFO" 2>&1 &
43 | fi
44 | 
45 | SERVER_PID=$!
46 | echo "[INFO] Server started with PID: $SERVER_PID"
47 | 
48 | # Keep the stdin pipe open to prevent the server from exiting
49 | exec 3> "$STDIN_FIFO"
50 | 
51 | # Monitor the server process
52 | while true; do
53 |     if ! kill -0 $SERVER_PID 2>/dev/null; then
54 |         echo "[ERROR] Server process exited unexpectedly"
55 |         exit 1
56 |     fi
57 |     
58 |     # Send a keep-alive message every 30 seconds
59 |     echo "" >&3
60 |     
61 |     sleep 30
62 | done
```

--------------------------------------------------------------------------------
/examples/claude_desktop_config_windows.json:
--------------------------------------------------------------------------------

```json
 1 | {
 2 |   "_comment": "Windows-specific MCP Memory Service configuration for Claude Desktop",
 3 |   "_instructions": [
 4 |     "Replace 'YOUR_USERNAME' with your actual Windows username",
 5 |     "Replace 'C:\\REPOSITORIES\\mcp-memory-service' with your actual repository path",
 6 |     "Supported backends: sqlite_vec, cloudflare, hybrid (ChromaDB removed in v8.0.0)"
 7 |   ],
 8 |   "mcpServers": {
 9 |     "memory": {
10 |       "command": "python", 
11 |       "args": [
12 |         "C:/REPOSITORIES/mcp-memory-service/scripts/memory_offline.py"
13 |       ],
14 |       "env": {
15 |         "PYTHONPATH": "C://REPOSITORIES//mcp-memory-service",
16 |         "_comment_backend_choice": "Choose one of the backend configurations below",
17 |         
18 |         "_comment_sqlite_vec": "=== SQLite-vec Backend (Recommended for local storage) ===",
19 |         "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
20 |         "MCP_MEMORY_SQLITE_PATH": "C:\\Users\\YOUR_USERNAME\\AppData\\Local\\mcp-memory\\memory_migrated.db",
21 |         "MCP_MEMORY_BACKUPS_PATH": "C:\\Users\\YOUR_USERNAME\\AppData\\Local\\mcp-memory\\backups",
22 |         
23 |         "_comment_offline": "=== Offline Mode Configuration (prevents PyTorch downloads) ===",
24 |         "HF_HOME": "C:\\Users\\YOUR_USERNAME\\.cache\\huggingface",
25 |         "TRANSFORMERS_CACHE": "C:\\Users\\YOUR_USERNAME\\.cache\\huggingface\\transformers",
26 |         "SENTENCE_TRANSFORMERS_HOME": "C:\\Users\\YOUR_USERNAME\\.cache\\torch\\sentence_transformers",
27 |         "HF_HUB_OFFLINE": "1",
28 |         "TRANSFORMERS_OFFLINE": "1",
29 |         
30 |         "_comment_performance": "=== Performance Settings ===",
31 |         "PYTORCH_ENABLE_MPS_FALLBACK": "1",
32 |         "PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:128"
33 |       }
34 |     }
35 |   }
36 | }
```

--------------------------------------------------------------------------------
/scripts/testing/simple_test.py:
--------------------------------------------------------------------------------

```python
 1 | #\!/usr/bin/env python3
 2 | """
 3 | Simple test to use Homebrew Python's sentence-transformers
 4 | """
 5 | import os
 6 | import sys
 7 | import subprocess
 8 | 
 9 | # Set environment variables for testing
10 | os.environ["MCP_MEMORY_STORAGE_BACKEND"] = "sqlite_vec"
11 | os.environ["MCP_MEMORY_SQLITE_PATH"] = os.path.expanduser("~/Library/Application Support/mcp-memory/sqlite_vec.db")
12 | os.environ["MCP_MEMORY_BACKUPS_PATH"] = os.path.expanduser("~/Library/Application Support/mcp-memory/backups")
13 | os.environ["MCP_MEMORY_USE_ONNX"] = "1"
14 | 
15 | # Get the Homebrew Python path
16 | result = subprocess.run(
17 |     ['brew', '--prefix', 'pytorch'],
18 |     capture_output=True,
19 |     text=True,
20 |     check=True
21 | )
22 | pytorch_prefix = result.stdout.strip()
23 | homebrew_python_path = f"{pytorch_prefix}/libexec/bin/python3"
24 | 
25 | print(f"Using Homebrew Python: {homebrew_python_path}")
26 | 
27 | # Run a simple test with the Homebrew Python
28 | test_script = """
29 | import torch
30 | import sentence_transformers
31 | import sys
32 | 
33 | print(f"Python: {sys.version}")
34 | print(f"PyTorch: {torch.__version__}")
35 | print(f"sentence-transformers: {sentence_transformers.__version__}")
36 | 
37 | # Load a model
38 | model = sentence_transformers.SentenceTransformer('paraphrase-MiniLM-L3-v2')
39 | print(f"Model loaded: {model}")
40 | 
41 | # Encode a test sentence
42 | test_text = "This is a test sentence for encoding with Homebrew PyTorch"
43 | embedding = model.encode([test_text])
44 | print(f"Embedding shape: {embedding.shape}")
45 | print("Test successful\!")
46 | """
47 | 
48 | # Run the test with Homebrew Python
49 | result = subprocess.run(
50 |     [homebrew_python_path, "-c", test_script],
51 |     capture_output=True,
52 |     text=True
53 | )
54 | 
55 | print("=== STDOUT ===")
56 | print(result.stdout)
57 | 
58 | if result.stderr:
59 |     print("=== STDERR ===")
60 |     print(result.stderr)
61 | 
```

--------------------------------------------------------------------------------
/scripts/utils/test_groq_bridge.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Test script for Groq bridge integration
 3 | # Demonstrates usage without requiring API key
 4 | 
 5 | set -e
 6 | 
 7 | echo "=== Groq Bridge Integration Test ==="
 8 | echo ""
 9 | 
10 | # Check if groq package is installed
11 | echo "1. Checking Python groq package..."
12 | if python3 -c "import groq" 2>/dev/null; then
13 |     echo "   ✓ groq package installed"
14 | else
15 |     echo "   ✗ groq package NOT installed"
16 |     echo ""
17 |     echo "To install: pip install groq"
18 |     echo "Or: uv pip install groq"
19 |     exit 1
20 | fi
21 | 
22 | # Check if API key is set
23 | echo ""
24 | echo "2. Checking GROQ_API_KEY environment variable..."
25 | if [ -z "$GROQ_API_KEY" ]; then
26 |     echo "   ✗ GROQ_API_KEY not set"
27 |     echo ""
28 |     echo "To set: export GROQ_API_KEY='your-api-key-here'"
29 |     echo "Get your API key from: https://console.groq.com/keys"
30 |     echo ""
31 |     echo "Skipping API test (would require valid key)"
32 | else
33 |     echo "   ✓ GROQ_API_KEY configured"
34 | 
35 |     # Test the bridge with a simple query
36 |     echo ""
37 |     echo "3. Testing Groq bridge with sample query..."
38 |     echo ""
39 | 
40 |     python3 scripts/utils/groq_agent_bridge.py \
41 |         "Rate the complexity of this Python function on a scale of 1-10: def add(a, b): return a + b" \
42 |         --json
43 | fi
44 | 
45 | echo ""
46 | echo "=== Integration Test Complete ==="
47 | echo ""
48 | echo "Usage examples:"
49 | echo ""
50 | echo "# Complexity analysis"
51 | echo "python scripts/utils/groq_agent_bridge.py \"Analyze complexity 1-10: \$(cat file.py)\""
52 | echo ""
53 | echo "# Security scan"
54 | echo "python scripts/utils/groq_agent_bridge.py \"Check for security issues: \$(cat file.py)\" --json"
55 | echo ""
56 | echo "# With custom model and temperature"
57 | echo "python scripts/utils/groq_agent_bridge.py \"Your prompt\" --model llama2-70b-4096 --temperature 0.3"
58 | 
```

--------------------------------------------------------------------------------
/tools/docker/DEPRECATED.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Deprecated Docker Files
 2 | 
 3 | The following Docker files are deprecated as of v5.0.4 and will be removed in v6.0.0:
 4 | 
 5 | ## Deprecated Files
 6 | 
 7 | ### 1. `docker-compose.standalone.yml`
 8 | - **Replaced by**: `docker-compose.http.yml`
 9 | - **Reason**: Confusing name, mixed ChromaDB/SQLite configs, incorrect entrypoint for HTTP mode
10 | - **Migration**: Use `docker-compose.http.yml` for HTTP/API access
11 | 
12 | ### 2. `docker-compose.uv.yml`
13 | - **Replaced by**: UV is now built into the main Dockerfile
14 | - **Reason**: UV support should be in the image, not a separate compose file
15 | - **Migration**: UV is automatically available in all configurations
16 | 
17 | ### 3. `docker-compose.pythonpath.yml`
18 | - **Replaced by**: Fixed PYTHONPATH in main Dockerfile
19 | - **Reason**: PYTHONPATH fix belongs in Dockerfile, not compose variant
20 | - **Migration**: All compose files now have correct PYTHONPATH=/app/src
21 | 
22 | ### 4. `docker-entrypoint-persistent.sh`
23 | - **Replaced by**: `docker-entrypoint-unified.sh`
24 | - **Reason**: Overcomplicated, doesn't support HTTP mode, named pipes unnecessary
25 | - **Migration**: Use unified entrypoint with MCP_MODE environment variable
26 | 
27 | ## New Simplified Structure
28 | 
29 | Use one of these two configurations:
30 | 
31 | 1. **MCP Protocol Mode** (for Claude Desktop, VS Code):
32 |    ```bash
33 |    docker-compose up -d
34 |    ```
35 | 
36 | 2. **HTTP/API Mode** (for web access, REST API):
37 |    ```bash
38 |    docker-compose -f docker-compose.http.yml up -d
39 |    ```
40 | 
41 | ## Timeline
42 | 
43 | - **v5.0.4**: Files marked as deprecated, new structure introduced
44 | - **v5.1.0**: Warning messages added when using deprecated files
45 | - **v6.0.0**: Deprecated files removed
46 | 
47 | ## Credits
48 | 
49 | Thanks to Joe Esposito for identifying the Docker setup issues that led to this simplification.
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/utils/hashing.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | import hashlib
16 | import json
17 | from typing import Any, Dict, Optional
18 | 
19 | def generate_content_hash(content: str, metadata: Optional[Dict[str, Any]] = None) -> str:
20 |     """
21 |     Generate a unique hash for content and metadata.
22 |     
23 |     This improved version ensures consistent hashing by:
24 |     1. Normalizing content (strip whitespace, lowercase)
25 |     2. Sorting metadata keys
26 |     3. Using a consistent JSON serialization
27 |     """
28 |     # Normalize content
29 |     normalized_content = content.strip().lower()
30 |     
31 |     # Create hash content with normalized content
32 |     hash_content = normalized_content
33 |     
34 |     # Add metadata if present
35 |     if metadata:
36 |         # Filter out timestamp and dynamic fields
37 |         static_metadata = {
38 |             k: v for k, v in metadata.items() 
39 |             if k not in ['timestamp', 'content_hash', 'embedding']
40 |         }
41 |         if static_metadata:
42 |             # Sort keys and use consistent JSON serialization
43 |             hash_content += json.dumps(static_metadata, sort_keys=True, ensure_ascii=True)
44 |     
45 |     # Generate hash
46 |     return hashlib.sha256(hash_content.encode('utf-8')).hexdigest()
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/consolidation/__init__.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | """
16 | Dream-inspired memory consolidation system.
17 | 
18 | This module implements autonomous memory consolidation inspired by human cognitive 
19 | processes during sleep cycles, featuring exponential decay scoring, creative 
20 | association discovery, semantic compression, and controlled forgetting.
21 | """
22 | 
23 | from .base import ConsolidationBase
24 | from .decay import ExponentialDecayCalculator
25 | from .associations import CreativeAssociationEngine
26 | from .clustering import SemanticClusteringEngine
27 | from .compression import SemanticCompressionEngine
28 | from .forgetting import ControlledForgettingEngine
29 | from .consolidator import DreamInspiredConsolidator
30 | from .scheduler import ConsolidationScheduler
31 | from .health import ConsolidationHealthMonitor, HealthStatus, HealthMetric, HealthAlert
32 | 
33 | __all__ = [
34 |     'ConsolidationBase',
35 |     'ExponentialDecayCalculator',
36 |     'CreativeAssociationEngine', 
37 |     'SemanticClusteringEngine',
38 |     'SemanticCompressionEngine',
39 |     'ControlledForgettingEngine',
40 |     'DreamInspiredConsolidator',
41 |     'ConsolidationScheduler',
42 |     'ConsolidationHealthMonitor',
43 |     'HealthStatus',
44 |     'HealthMetric',
45 |     'HealthAlert'
46 | ]
```

--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Docker entrypoint script for MCP Memory Service
 3 | 
 4 | set -e
 5 | 
 6 | echo "[INFO] Starting MCP Memory Service in Docker container"
 7 | 
 8 | # Function to handle signals
 9 | handle_signal() {
10 |     echo "[INFO] Received signal, shutting down..."
11 |     if [ -n "$SERVER_PID" ]; then
12 |         kill -TERM $SERVER_PID 2>/dev/null || true
13 |     fi
14 |     exit 0
15 | }
16 | 
17 | # Set up signal handlers
18 | trap handle_signal SIGTERM SIGINT
19 | 
20 | # Function to keep stdin alive
21 | keep_stdin_alive() {
22 |     while true; do
23 |         # Send newline to stdin every 30 seconds to keep the pipe open
24 |         echo "" 2>/dev/null || break
25 |         sleep 30
26 |     done
27 | }
28 | 
29 | # Check if running in standalone mode
30 | if [ "${MCP_STANDALONE_MODE}" = "1" ]; then
31 |     echo "[INFO] Running in standalone mode"
32 |     exec /usr/local/bin/docker-entrypoint-persistent.sh "$@"
33 | fi
34 | 
35 | # Check if UV_ACTIVE is set
36 | if [ "${UV_ACTIVE}" = "1" ]; then
37 |     echo "[INFO] Running with UV wrapper"
38 |     # Start the keep-alive process in the background
39 |     keep_stdin_alive &
40 |     KEEPALIVE_PID=$!
41 |     
42 |     # Run the server
43 |     python -u uv_wrapper.py "$@" &
44 |     SERVER_PID=$!
45 |     
46 |     # Wait for the server process
47 |     wait $SERVER_PID
48 |     SERVER_EXIT_CODE=$?
49 |     
50 |     # Clean up the keep-alive process
51 |     kill $KEEPALIVE_PID 2>/dev/null || true
52 |     
53 |     exit $SERVER_EXIT_CODE
54 | else
55 |     echo "[INFO] Running directly with Python"
56 |     # Start the keep-alive process in the background
57 |     keep_stdin_alive &
58 |     KEEPALIVE_PID=$!
59 |     
60 |     # Run the server
61 |     python -u -m mcp_memory_service.server "$@" &
62 |     SERVER_PID=$!
63 |     
64 |     # Wait for the server process
65 |     wait $SERVER_PID
66 |     SERVER_EXIT_CODE=$?
67 |     
68 |     # Clean up the keep-alive process
69 |     kill $KEEPALIVE_PID 2>/dev/null || true
70 |     
71 |     exit $SERVER_EXIT_CODE
72 | fi
```

--------------------------------------------------------------------------------
/scripts/setup-lightweight.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # MCP Memory Service - Automated Setup
 3 | # Installs from Sundeepg98's fork with ONNX patches
 4 | #
 5 | # Usage: curl -sSL <gist-url> | bash
 6 | # Or: ./setup-mcp-memory.sh
 7 | 
 8 | set -e
 9 | 
10 | echo "🔧 MCP Memory Service - Optimized Setup"
11 | echo "========================================"
12 | echo ""
13 | 
14 | # Check prerequisites
15 | if ! command -v pipx &> /dev/null; then
16 |     echo "❌ pipx not found. Install with: pip install pipx"
17 |     exit 1
18 | fi
19 | 
20 | # Uninstall existing if present
21 | if pipx list | grep -q mcp-memory-service; then
22 |     echo "📦 Removing existing installation..."
23 |     pipx uninstall mcp-memory-service
24 | fi
25 | 
26 | # Install from fork
27 | echo "📥 Installing from Sundeepg98/mcp-memory-service fork..."
28 | pipx install "git+https://github.com/Sundeepg98/mcp-memory-service.git"
29 | 
30 | # Create data directory
31 | echo "📁 Creating data directories..."
32 | mkdir -p ~/.local/share/mcp-memory
33 | mkdir -p ~/.cache/mcp_memory/onnx_models
34 | 
35 | # Get the python path
36 | PYTHON_PATH=$(pipx environment --value PIPX_HOME)/venvs/mcp-memory-service/bin/python
37 | 
38 | echo ""
39 | echo "✅ Installation complete!"
40 | echo ""
41 | echo "📋 Next: Add this to ~/.claude/settings.json:"
42 | echo ""
43 | cat << 'EOF'
44 | {
45 |   "mcpServers": {
46 |     "memory": {
47 |       "type": "stdio",
48 |       "command": "PYTHON_PATH_PLACEHOLDER",
49 |       "args": ["-m", "mcp_memory_service.server"],
50 |       "env": {
51 |         "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
52 |         "MCP_QUALITY_BOOST_ENABLED": "true"
53 |       }
54 |     }
55 |   },
56 |   "env": {
57 |     "MCP_MEMORY_USE_ONNX": "true",
58 |     "MCP_CONSOLIDATION_ENABLED": "true"
59 |   }
60 | }
61 | EOF
62 | echo ""
63 | echo "Replace PYTHON_PATH_PLACEHOLDER with:"
64 | echo "  $PYTHON_PATH"
65 | echo ""
66 | echo "🔄 Then restart Claude Code"
67 | echo ""
68 | echo "📊 Disk usage: ~805MB (vs 7.7GB with transformers)"
69 | echo "🤖 ONNX models will auto-download on first use (~255MB)"
70 | 
```

--------------------------------------------------------------------------------
/scripts/sync/litestream/resolve_conflicts.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Simple conflict resolution helper
 3 | 
 4 | STAGING_DB="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec_staging.db"
 5 | 
 6 | # Colors for output
 7 | RED='\033[0;31m'
 8 | GREEN='\033[0;32m'
 9 | YELLOW='\033[1;33m'
10 | NC='\033[0m' # No Color
11 | 
12 | if [ ! -f "$STAGING_DB" ]; then
13 |     echo -e "${RED}No staging database found${NC}"
14 |     exit 1
15 | fi
16 | 
17 | # Get conflicts
18 | CONFLICTS=$(sqlite3 "$STAGING_DB" "
19 | SELECT id, content, staged_at, conflict_status 
20 | FROM staged_memories 
21 | WHERE conflict_status IN ('detected', 'push_failed')
22 | ORDER BY staged_at DESC;
23 | ")
24 | 
25 | if [ -z "$CONFLICTS" ]; then
26 |     echo -e "${GREEN}No conflicts to resolve${NC}"
27 |     exit 0
28 | fi
29 | 
30 | echo -e "${YELLOW}Found conflicts to resolve:${NC}"
31 | echo ""
32 | 
33 | echo "$CONFLICTS" | while IFS='|' read -r id content staged_at status; do
34 |     echo -e "${RED}Conflict: $status${NC}"
35 |     echo -e "Content: ${content:0:80}..."
36 |     echo -e "Staged: $staged_at"
37 |     echo -e "ID: $id"
38 |     echo ""
39 |     echo "Actions:"
40 |     echo "  1. Keep and retry push"
41 |     echo "  2. Delete (abandon change)"
42 |     echo "  3. Skip for now"
43 |     echo ""
44 |     
45 |     read -p "Choose action (1/2/3): " action
46 |     
47 |     case $action in
48 |         1)
49 |             sqlite3 "$STAGING_DB" "
50 |             UPDATE staged_memories 
51 |             SET conflict_status = 'none' 
52 |             WHERE id = '$id';
53 |             "
54 |             echo -e "${GREEN}Marked for retry${NC}"
55 |             ;;
56 |         2)
57 |             sqlite3 "$STAGING_DB" "DELETE FROM staged_memories WHERE id = '$id';"
58 |             echo -e "${YELLOW}Deleted${NC}"
59 |             ;;
60 |         3)
61 |             echo -e "${YELLOW}Skipped${NC}"
62 |             ;;
63 |         *)
64 |             echo -e "${YELLOW}Invalid choice, skipped${NC}"
65 |             ;;
66 |     esac
67 |     echo ""
68 | done
69 | 
70 | echo -e "${GREEN}Conflict resolution completed${NC}"
```

--------------------------------------------------------------------------------
/examples/memory_export_template.json:
--------------------------------------------------------------------------------

```json
 1 | {
 2 |   "export_metadata": {
 3 |     "source_machine": "example-hostname",
 4 |     "export_timestamp": "2025-08-21T12:00:00.000000",
 5 |     "total_memories": 3,
 6 |     "database_path": "/path/to/sqlite_vec.db",
 7 |     "platform": "Linux",
 8 |     "python_version": "3.11.0",
 9 |     "include_embeddings": false,
10 |     "filter_tags": null,
11 |     "exporter_version": "6.2.4"
12 |   },
13 |   "memories": [
14 |     {
15 |       "content": "MCP Memory Service is a Model Context Protocol server that provides semantic memory and persistent storage capabilities for Claude Desktop using SQLite-vec and sentence transformers.",
16 |       "content_hash": "example-hash-1234567890abcdef",
17 |       "tags": ["documentation", "project-overview"],
18 |       "created_at": 1692633600.0,
19 |       "updated_at": 1692633600.0,
20 |       "memory_type": "note",
21 |       "metadata": {
22 |         "source": "example-machine",
23 |         "project": "mcp-memory-service"
24 |       }
25 |     },
26 |     {
27 |       "content": "Key development commands: `uv run memory` to start server, `pytest tests/` for testing, `python install.py` for setup.",
28 |       "content_hash": "example-hash-abcdef1234567890",
29 |       "tags": ["commands", "development"],
30 |       "created_at": 1692634200.0,
31 |       "updated_at": 1692634200.0,
32 |       "memory_type": "reference",
33 |       "metadata": {
34 |         "source": "example-machine",
35 |         "category": "quick-reference"
36 |       }
37 |     },
38 |     {
39 |       "content": "SQLite-vec backend is now the default storage backend (v6.0+) offering fast performance and single-file database storage.",
40 |       "content_hash": "example-hash-fedcba0987654321",
41 |       "tags": ["architecture", "backend", "sqlite-vec"],
42 |       "created_at": 1692634800.0,
43 |       "updated_at": 1692634800.0,
44 |       "memory_type": "architectural-decision",
45 |       "metadata": {
46 |         "source": "example-machine",
47 |         "version": "v6.0.0"
48 |       }
49 |     }
50 |   ]
51 | }
```

--------------------------------------------------------------------------------
/docs/mastery/local-setup-and-run.md:
--------------------------------------------------------------------------------

```markdown
  1 | # MCP Memory Service — Local Setup and Run
  2 | 
  3 | Follow these steps to run the service locally, switch storage backends, and validate functionality.
  4 | 
  5 | ## 1) Install Dependencies
  6 | 
  7 | Using uv (recommended):
  8 | 
  9 | ```
 10 | uv sync
 11 | ```
 12 | 
 13 | Using pip:
 14 | 
 15 | ```
 16 | python -m venv .venv
 17 | source .venv/bin/activate  # Windows: .venv\Scripts\activate
 18 | pip install -e .
 19 | ```
 20 | 
 21 | If using SQLite-vec backend (recommended):
 22 | 
 23 | ```
 24 | uv add sqlite-vec sentence-transformers torch
 25 | # or
 26 | pip install sqlite-vec sentence-transformers torch
 27 | ```
 28 | 
 29 | ## 2) Choose Storage Backend
 30 | 
 31 | SQLite-vec (default):
 32 | 
 33 | ```
 34 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
 35 | # optional custom DB path
 36 | export MCP_MEMORY_SQLITE_PATH="$HOME/.local/share/mcp-memory/sqlite_vec.db"
 37 | ```
 38 | 
 39 | ChromaDB (deprecated):
 40 | 
 41 | ```
 42 | export MCP_MEMORY_STORAGE_BACKEND=chroma
 43 | export MCP_MEMORY_CHROMA_PATH="$HOME/.local/share/mcp-memory/chroma_db"
 44 | ```
 45 | 
 46 | Cloudflare:
 47 | 
 48 | ```
 49 | export MCP_MEMORY_STORAGE_BACKEND=cloudflare
 50 | export CLOUDFLARE_API_TOKEN=...
 51 | export CLOUDFLARE_ACCOUNT_ID=...
 52 | export CLOUDFLARE_VECTORIZE_INDEX=...
 53 | export CLOUDFLARE_D1_DATABASE_ID=...
 54 | ```
 55 | 
 56 | ## 3) Run the Server
 57 | 
 58 | Stdio MCP server (integrates with Claude Desktop):
 59 | 
 60 | ```
 61 | uv run memory server
 62 | ```
 63 | 
 64 | FastMCP HTTP server (for Claude Code / remote):
 65 | 
 66 | ```
 67 | uv run mcp-memory-server
 68 | ```
 69 | 
 70 | Configure Claude Desktop example (~/.claude/config.json):
 71 | 
 72 | ```
 73 | {
 74 |   "mcpServers": {
 75 |     "memory": {
 76 |       "command": "uv",
 77 |       "args": ["--directory", "/path/to/mcp-memory-service", "run", "memory", "server"],
 78 |       "env": { "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec" }
 79 |     }
 80 |   }
 81 | }
 82 | ```
 83 | 
 84 | ## 4) Verify Health and Basic Ops
 85 | 
 86 | CLI status:
 87 | 
 88 | ```
 89 | uv run memory status
 90 | ```
 91 | 
 92 | MCP tool flow (via client):
 93 | - store_memory → retrieve_memory → search_by_tag → delete_memory
 94 | 
 95 | ## 5) Run Tests
 96 | 
 97 | ```
 98 | pytest -q
 99 | # or
100 | uv run pytest -q
101 | ```
102 | 
103 | See also: `docs/mastery/testing-guide.md` and `docs/sqlite-vec-backend.md`.
104 | 
105 | 
```

--------------------------------------------------------------------------------
/docs/integrations.md:
--------------------------------------------------------------------------------

```markdown
 1 | # MCP Memory Service Integrations
 2 | 
 3 | This document catalogs tools, utilities, and integrations that extend the functionality of the MCP Memory Service.
 4 | 
 5 | ## Official Integrations
 6 | 
 7 | ### [MCP Memory Dashboard](https://github.com/doobidoo/mcp-memory-dashboard)(This is still wip!)
 8 | 
 9 | A web-based dashboard for viewing, searching, and managing your MCP Memory Service data. The dashboard allows you to:
10 | - Browse and search memories
11 | - View memory metadata and tags
12 | - Delete unwanted memories
13 | - Perform semantic searches
14 | - Monitor system health
15 | 
16 | ## Community Integrations
17 | 
18 | ### [Claude Memory Context](https://github.com/doobidoo/claude-memory-context)
19 | 
20 | A utility that enables Claude to start each conversation with awareness of the topics and important memories stored in your MCP Memory Service.
21 | 
22 | This tool:
23 | - Queries your MCP memory service for recent and important memories
24 | - Extracts topics and content summaries
25 | - Formats this information into a structured context section
26 | - Updates Claude project instructions automatically
27 | 
28 | The utility leverages Claude's project instructions feature without requiring any modifications to the MCP protocol. It can be automated to run periodically, ensuring Claude always has access to your latest memories.
29 | 
30 | See the [Claude Memory Context repository](https://github.com/doobidoo/claude-memory-context) for installation and usage instructions.
31 | 
32 | ---
33 | 
34 | ## Adding Your Integration
35 | 
36 | If you've built a tool or integration for the MCP Memory Service, we'd love to include it here. Please submit a pull request that adds your project to this document with:
37 | 
38 | 1. The name of your integration (with link to repository)
39 | 2. A brief description (2-3 sentences)
40 | 3. A list of key features
41 | 4. Any installation notes or special requirements
42 | 
43 | All listed integrations should be functional, documented, and actively maintained.
44 | 
```

--------------------------------------------------------------------------------
/claude-hooks/config.template.json:
--------------------------------------------------------------------------------

```json
 1 | {
 2 |   "memoryService": {
 3 |     "endpoint": "https://your-server:8443",
 4 |     "apiKey": "your-api-key-here",
 5 |     "defaultTags": ["claude-code", "auto-generated"],
 6 |     "maxMemoriesPerSession": 8,
 7 |     "enableSessionConsolidation": true
 8 |   },
 9 |   "projectDetection": {
10 |     "gitRepository": true,
11 |     "packageFiles": ["package.json", "pyproject.toml", "Cargo.toml", "go.mod", "pom.xml"],
12 |     "frameworkDetection": true,
13 |     "languageDetection": true,
14 |     "confidenceThreshold": 0.3
15 |   },
16 |   "memoryScoring": {
17 |     "weights": {
18 |       "timeDecay": 0.3,
19 |       "tagRelevance": 0.4,
20 |       "contentRelevance": 0.2,
21 |       "typeBonus": 0.1
22 |     },
23 |     "minRelevanceScore": 0.3,
24 |     "timeDecayRate": 0.1
25 |   },
26 |   "contextFormatting": {
27 |     "includeProjectSummary": true,
28 |     "includeRelevanceScores": false,
29 |     "groupByCategory": true,
30 |     "maxContentLength": 200,
31 |     "includeTimestamps": true
32 |   },
33 |   "sessionAnalysis": {
34 |     "extractTopics": true,
35 |     "extractDecisions": true,
36 |     "extractInsights": true,
37 |     "extractCodeChanges": true,
38 |     "extractNextSteps": true,
39 |     "minSessionLength": 100,
40 |     "minConfidence": 0.1
41 |   },
42 |   "hooks": {
43 |     "sessionStart": {
44 |       "enabled": true,
45 |       "timeout": 10000,
46 |       "priority": "high"
47 |     },
48 |     "sessionEnd": {
49 |       "enabled": true,
50 |       "timeout": 15000,
51 |       "priority": "normal"
52 |     },
53 |     "topicChange": {
54 |       "enabled": false,
55 |       "timeout": 5000,
56 |       "priority": "low"
57 |     }
58 |   },
59 |   "output": {
60 |     "verbose": true,
61 |     "showMemoryDetails": false,
62 |     "showProjectDetails": true,
63 |     "showScoringDetails": false,
64 |     "cleanMode": false
65 |   },
66 |   "logging": {
67 |     "level": "info",
68 |     "enableDebug": false,
69 |     "logToFile": false,
70 |     "logFilePath": "./claude-hooks.log"
71 |   },
72 |   "permissionRequest": {
73 |     "enabled": true,
74 |     "autoApprove": true,
75 |     "customSafePatterns": [],
76 |     "customDestructivePatterns": [],
77 |     "logDecisions": false
78 |   }
79 | }
```

--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint-unified.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Unified Docker entrypoint script for MCP Memory Service
 3 | # Supports both MCP protocol mode and HTTP server mode
 4 | 
 5 | set -e
 6 | 
 7 | echo "[INFO] Starting MCP Memory Service in Docker container"
 8 | 
 9 | # Function to handle signals
10 | handle_signal() {
11 |     echo "[INFO] Received signal, shutting down..."
12 |     if [ -n "$SERVER_PID" ]; then
13 |         kill -TERM $SERVER_PID 2>/dev/null || true
14 |     fi
15 |     exit 0
16 | }
17 | 
18 | # Set up signal handlers
19 | trap handle_signal SIGTERM SIGINT
20 | 
21 | # Determine mode based on environment variable
22 | MODE="${MCP_MODE:-mcp}"
23 | echo "[INFO] Running in $MODE mode"
24 | 
25 | if [ "$MODE" = "http" ] || [ "$MODE" = "api" ]; then
26 |     # HTTP Server Mode
27 |     echo "[INFO] Starting HTTP server with FastAPI/Uvicorn"
28 |     
29 |     # Ensure we have the HTTP server file
30 |     if [ ! -f "/app/run_server.py" ]; then
31 |         echo "[ERROR] run_server.py not found. Please ensure it's copied in the Dockerfile"
32 |         exit 1
33 |     fi
34 |     
35 |     # Start the HTTP server
36 |     exec python /app/run_server.py "$@"
37 |     
38 | elif [ "$MODE" = "mcp" ]; then
39 |     # MCP Protocol Mode (stdin/stdout)
40 |     echo "[INFO] Starting MCP protocol server (stdin/stdout communication)"
41 |     
42 |     # Function to keep stdin alive
43 |     keep_stdin_alive() {
44 |         while true; do
45 |             # Send newline to stdin every 30 seconds to keep the pipe open
46 |             echo "" 2>/dev/null || break
47 |             sleep 30
48 |         done
49 |     }
50 |     
51 |     # Start the keep-alive process in the background
52 |     keep_stdin_alive &
53 |     KEEPALIVE_PID=$!
54 |     
55 |     # Run the MCP server
56 |     python -u -m mcp_memory_service.server "$@" &
57 |     SERVER_PID=$!
58 |     
59 |     # Wait for the server process
60 |     wait $SERVER_PID
61 |     SERVER_EXIT_CODE=$?
62 |     
63 |     # Clean up the keep-alive process
64 |     kill $KEEPALIVE_PID 2>/dev/null || true
65 |     
66 |     exit $SERVER_EXIT_CODE
67 |     
68 | else
69 |     echo "[ERROR] Unknown mode: $MODE. Use 'mcp' for protocol mode or 'http' for API mode"
70 |     exit 1
71 | fi
```

--------------------------------------------------------------------------------
/archive/setup-development/setup_consolidation_mdns.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | 
 3 | # Setup script for MCP Memory Service with Consolidation and mDNS
 4 | echo "Setting up MCP Memory Service with Consolidation and mDNS HTTPS..."
 5 | 
 6 | # Enable consolidation system
 7 | export MCP_CONSOLIDATION_ENABLED=true
 8 | 
 9 | # Configure consolidation settings
10 | export MCP_DECAY_ENABLED=true
11 | export MCP_RETENTION_CRITICAL=365
12 | export MCP_RETENTION_REFERENCE=180
13 | export MCP_RETENTION_STANDARD=30
14 | export MCP_RETENTION_TEMPORARY=7
15 | 
16 | export MCP_ASSOCIATIONS_ENABLED=true
17 | export MCP_ASSOCIATION_MIN_SIMILARITY=0.3
18 | export MCP_ASSOCIATION_MAX_SIMILARITY=0.7
19 | export MCP_ASSOCIATION_MAX_PAIRS=100
20 | 
21 | export MCP_CLUSTERING_ENABLED=true
22 | export MCP_CLUSTERING_MIN_SIZE=5
23 | export MCP_CLUSTERING_ALGORITHM=dbscan
24 | 
25 | export MCP_COMPRESSION_ENABLED=true
26 | export MCP_COMPRESSION_MAX_LENGTH=500
27 | export MCP_COMPRESSION_PRESERVE_ORIGINALS=true
28 | 
29 | export MCP_FORGETTING_ENABLED=true
30 | export MCP_FORGETTING_RELEVANCE_THRESHOLD=0.1
31 | export MCP_FORGETTING_ACCESS_THRESHOLD=90
32 | 
33 | # Set consolidation schedule (cron-like)
34 | export MCP_SCHEDULE_DAILY="02:00"
35 | export MCP_SCHEDULE_WEEKLY="SUN 03:00"
36 | export MCP_SCHEDULE_MONTHLY="01 04:00"
37 | 
38 | # Configure mDNS multi-client server with HTTPS
39 | export MCP_MDNS_ENABLED=true
40 | export MCP_MDNS_SERVICE_NAME="memory"
41 | export MCP_HTTPS_ENABLED=true
42 | 
43 | # HTTP server configuration
44 | export MCP_HTTP_ENABLED=true
45 | export MCP_HTTP_HOST=0.0.0.0
46 | export MCP_HTTP_PORT=8000
47 | 
48 | # Storage backend
49 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
50 | 
51 | # API security
52 | export MCP_API_KEY="$(openssl rand -base64 32)"
53 | 
54 | echo "Configuration set! Environment variables:"
55 | echo "- Consolidation enabled: $MCP_CONSOLIDATION_ENABLED"
56 | echo "- mDNS enabled: $MCP_MDNS_ENABLED"
57 | echo "- HTTPS enabled: $MCP_HTTPS_ENABLED"
58 | echo "- Service name: $MCP_MDNS_SERVICE_NAME"
59 | echo "- API Key generated: [SET]"
60 | echo ""
61 | echo "Starting MCP Memory Service HTTP server..."
62 | 
63 | # Activate virtual environment and start the server
64 | source venv/bin/activate && python scripts/run_http_server.py
```

--------------------------------------------------------------------------------
/scripts/server/memory_offline.py:
--------------------------------------------------------------------------------

```python
 1 | #!/usr/bin/env python3
 2 | """
 3 | Memory service launcher with forced offline mode.
 4 | This script sets offline mode BEFORE importing anything else.
 5 | """
 6 | 
 7 | import os
 8 | import platform
 9 | import sys
10 | 
11 | def setup_offline_mode():
12 |     """Setup offline mode environment variables BEFORE any imports."""
13 |     print("Setting up offline mode...", file=sys.stderr)
14 |     
15 |     # Force offline mode
16 |     os.environ['HF_HUB_OFFLINE'] = '1'
17 |     os.environ['TRANSFORMERS_OFFLINE'] = '1'
18 |     
19 |     # Configure cache paths for Windows
20 |     username = os.environ.get('USERNAME', os.environ.get('USER', ''))
21 |     if platform.system() == "Windows" and username:
22 |         hf_home = f"C:\\Users\\{username}\\.cache\\huggingface"
23 |         transformers_cache = f"C:\\Users\\{username}\\.cache\\huggingface\\transformers"
24 |         sentence_transformers_home = f"C:\\Users\\{username}\\.cache\\torch\\sentence_transformers"
25 |     else:
26 |         hf_home = os.path.expanduser("~/.cache/huggingface")
27 |         transformers_cache = os.path.expanduser("~/.cache/huggingface/transformers")
28 |         sentence_transformers_home = os.path.expanduser("~/.cache/torch/sentence_transformers")
29 |     
30 |     # Set cache paths
31 |     os.environ['HF_HOME'] = hf_home
32 |     os.environ['TRANSFORMERS_CACHE'] = transformers_cache
33 |     os.environ['SENTENCE_TRANSFORMERS_HOME'] = sentence_transformers_home
34 |     
35 |     print(f"HF_HUB_OFFLINE: {os.environ.get('HF_HUB_OFFLINE')}", file=sys.stderr)
36 |     print(f"HF_HOME: {os.environ.get('HF_HOME')}", file=sys.stderr)
37 |     
38 |     # Add src to Python path
39 |     src_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'src')
40 |     if src_path not in sys.path:
41 |         sys.path.insert(0, src_path)
42 | 
43 | if __name__ == "__main__":
44 |     # Setup offline mode FIRST
45 |     setup_offline_mode()
46 |     
47 |     # Now import and run the memory server
48 |     print("Starting MCP Memory Service in offline mode...", file=sys.stderr)
49 |     from mcp_memory_service.server import main
50 |     main()
```

--------------------------------------------------------------------------------
/scripts/sync/litestream/staging_db_init.sql:
--------------------------------------------------------------------------------

```sql
 1 | -- Staging Database Schema for Offline Memory Changes
 2 | -- This database stores local changes when remote server is unavailable
 3 | 
 4 | -- Staged memories that need to be synchronized
 5 | CREATE TABLE IF NOT EXISTS staged_memories (
 6 |     id TEXT PRIMARY KEY,
 7 |     content TEXT NOT NULL,
 8 |     content_hash TEXT NOT NULL,
 9 |     tags TEXT, -- JSON array as string
10 |     metadata TEXT, -- JSON metadata as string
11 |     memory_type TEXT DEFAULT 'note',
12 |     operation TEXT NOT NULL CHECK (operation IN ('INSERT', 'UPDATE', 'DELETE')),
13 |     staged_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
14 |     original_created_at TIMESTAMP,
15 |     source_machine TEXT,
16 |     conflict_status TEXT DEFAULT 'none' CHECK (conflict_status IN ('none', 'detected', 'resolved'))
17 | );
18 | 
19 | -- Sync status tracking
20 | CREATE TABLE IF NOT EXISTS sync_status (
21 |     key TEXT PRIMARY KEY,
22 |     value TEXT,
23 |     updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
24 | );
25 | 
26 | -- Index for performance
27 | CREATE INDEX IF NOT EXISTS idx_staged_memories_hash ON staged_memories(content_hash);
28 | CREATE INDEX IF NOT EXISTS idx_staged_memories_staged_at ON staged_memories(staged_at);
29 | CREATE INDEX IF NOT EXISTS idx_staged_memories_operation ON staged_memories(operation);
30 | 
31 | -- Initialize sync status
32 | INSERT OR REPLACE INTO sync_status (key, value) VALUES 
33 | ('last_remote_sync', ''),
34 | ('last_local_sync', ''),
35 | ('staging_version', '1.0'),
36 | ('total_staged_changes', '0');
37 | 
38 | -- Triggers to maintain staged changes count
39 | CREATE TRIGGER IF NOT EXISTS update_staged_count_insert
40 | AFTER INSERT ON staged_memories
41 | BEGIN
42 |     UPDATE sync_status 
43 |     SET value = CAST((CAST(value AS INTEGER) + 1) AS TEXT), 
44 |         updated_at = CURRENT_TIMESTAMP 
45 |     WHERE key = 'total_staged_changes';
46 | END;
47 | 
48 | CREATE TRIGGER IF NOT EXISTS update_staged_count_delete
49 | AFTER DELETE ON staged_memories
50 | BEGIN
51 |     UPDATE sync_status 
52 |     SET value = CAST((CAST(value AS INTEGER) - 1) AS TEXT), 
53 |         updated_at = CURRENT_TIMESTAMP 
54 |     WHERE key = 'total_staged_changes';
55 | END;
```

--------------------------------------------------------------------------------
/.github/workflows/claude-code-review.yml:
--------------------------------------------------------------------------------

```yaml
 1 | name: Claude Code Review
 2 | 
 3 | on:
 4 |   pull_request:
 5 |     types: [opened, synchronize]
 6 |     # Optional: Only run on specific file changes
 7 |     # paths:
 8 |     #   - "src/**/*.ts"
 9 |     #   - "src/**/*.tsx"
10 |     #   - "src/**/*.js"
11 |     #   - "src/**/*.jsx"
12 | 
13 | jobs:
14 |   claude-review:
15 |     # SECURITY: Only run for repository owner to prevent API credit abuse
16 |     if: github.event.pull_request.user.login == github.repository_owner
17 | 
18 |     runs-on: ubuntu-latest
19 |     permissions:
20 |       contents: read
21 |       pull-requests: write  # Allow posting review comments
22 |       issues: write         # Allow posting issue comments
23 |       id-token: write
24 |     
25 |     steps:
26 |       - name: Checkout repository
27 |         uses: actions/checkout@v4
28 |         with:
29 |           fetch-depth: 1
30 | 
31 |       - name: Run Claude Code Review
32 |         id: claude-review
33 |         uses: anthropics/claude-code-action@v1
34 |         with:
35 |           claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
36 |           prompt: |
37 |             REPO: ${{ github.repository }}
38 |             PR NUMBER: ${{ github.event.pull_request.number }}
39 | 
40 |             Please review this pull request and provide feedback on:
41 |             - Code quality and best practices
42 |             - Potential bugs or issues
43 |             - Performance considerations
44 |             - Security concerns
45 |             - Test coverage
46 |             
47 |             Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback.
48 | 
49 |             Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR.
50 |           
51 |           # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
52 |           # or https://docs.claude.com/en/docs/claude-code/sdk#command-line for available options
53 |           claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"'
54 | 
55 | 
```

--------------------------------------------------------------------------------
/docs/guides/scripts.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Scripts Documentation
 2 | 
 3 | This document provides an overview of the available scripts in the `scripts/` directory and their purposes.
 4 | 
 5 | ## Essential Scripts
 6 | 
 7 | ### Server Management
 8 | - `run_memory_server.py`: Main script to start the memory service server
 9 |   ```bash
10 |   python scripts/run_memory_server.py
11 |   ```
12 | 
13 | ### Environment Verification
14 | - `verify_environment.py`: Verifies the installation environment and dependencies
15 |   ```bash
16 |   python scripts/verify_environment.py
17 |   ```
18 | 
19 | ### Installation Testing
20 | - `test_installation.py`: Tests the installation and basic functionality
21 |   ```bash
22 |   python scripts/test_installation.py
23 |   ```
24 | 
25 | ### Memory Management
26 | - `validate_memories.py`: Validates the integrity of stored memories
27 |   ```bash
28 |   python scripts/validate_memories.py
29 |   ```
30 | - `repair_memories.py`: Repairs corrupted or invalid memories
31 |   ```bash
32 |   python scripts/repair_memories.py
33 |   ```
34 | - `list-collections.py`: Lists all available memory collections
35 |   ```bash
36 |   python scripts/list-collections.py
37 |   ```
38 | 
39 | ## Migration Scripts
40 | - `mcp-migration.py`: Handles migration of MCP-related data
41 |   ```bash
42 |   python scripts/mcp-migration.py
43 |   ```
44 | - `memory-migration.py`: Handles migration of memory data
45 |   ```bash
46 |   python scripts/memory-migration.py
47 |   ```
48 | 
49 | ## Troubleshooting Scripts
50 | - `verify_pytorch_windows.py`: Verifies PyTorch installation on Windows
51 |   ```bash
52 |   python scripts/verify_pytorch_windows.py
53 |   ```
54 | - `verify_torch.py`: General PyTorch verification
55 |   ```bash
56 |   python scripts/verify_torch.py
57 |   ```
58 | 
59 | ## Usage Notes
60 | - Most scripts can be run directly with Python
61 | - Some scripts may require specific environment variables to be set
62 | - Always run verification scripts after installation or major updates
63 | - Use migration scripts with caution and ensure backups are available
64 | 
65 | ## Script Dependencies
66 | - Python 3.10+
67 | - Required packages listed in `requirements.txt`
68 | - Some scripts may require additional dependencies listed in `requirements-migration.txt` 
```

--------------------------------------------------------------------------------
/archive/setup-development/test_service.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | 
 3 | # Test script to debug service startup issues
 4 | echo "=== MCP Memory Service Debug Test ==="
 5 | 
 6 | # Set working directory
 7 | cd /home/hkr/repositories/mcp-memory-service
 8 | 
 9 | # Set environment variables (same as service)
10 | export PATH=/home/hkr/repositories/mcp-memory-service/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
11 | export PYTHONPATH=/home/hkr/repositories/mcp-memory-service/src
12 | export MCP_CONSOLIDATION_ENABLED=true
13 | export MCP_MDNS_ENABLED=true
14 | export MCP_HTTPS_ENABLED=true
15 | export MCP_MDNS_SERVICE_NAME="MCP Memory"
16 | export MCP_HTTP_ENABLED=true
17 | export MCP_HTTP_HOST=0.0.0.0
18 | export MCP_HTTP_PORT=8000
19 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
20 | export MCP_API_KEY=mcp-0b1ccbde2197a08dcb12d41af4044be6
21 | 
22 | echo "Working directory: $(pwd)"
23 | echo "Python executable: $(which python)"
24 | echo "Virtual env Python: /home/hkr/repositories/mcp-memory-service/venv/bin/python"
25 | 
26 | # Check if venv Python exists
27 | if [ -f "/home/hkr/repositories/mcp-memory-service/venv/bin/python" ]; then
28 |     echo "✅ Virtual environment Python exists"
29 | else
30 |     echo "❌ Virtual environment Python missing!"
31 |     exit 1
32 | fi
33 | 
34 | # Check if run_http_server.py exists
35 | if [ -f "/home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py" ]; then
36 |     echo "✅ Server script exists"
37 | else
38 |     echo "❌ Server script missing!"
39 |     exit 1
40 | fi
41 | 
42 | # Test Python import
43 | echo "=== Testing Python imports ==="
44 | /home/hkr/repositories/mcp-memory-service/venv/bin/python -c "
45 | import sys
46 | sys.path.insert(0, '/home/hkr/repositories/mcp-memory-service/src')
47 | try:
48 |     from mcp_memory_service.web.app import app
49 |     print('✅ Web app import successful')
50 | except Exception as e:
51 |     print(f'❌ Web app import failed: {e}')
52 |     sys.exit(1)
53 | "
54 | 
55 | echo "=== Testing server startup (5 seconds) ==="
56 | timeout 5s /home/hkr/repositories/mcp-memory-service/venv/bin/python /home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py || echo "Server test completed"
57 | 
58 | echo "=== Debug test finished ==="
```

--------------------------------------------------------------------------------
/src/mcp_memory_service/web/dependencies.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | """
16 | FastAPI dependencies for the HTTP interface.
17 | """
18 | 
19 | import logging
20 | from typing import Optional
21 | from fastapi import HTTPException, Depends
22 | 
23 | from ..storage.base import MemoryStorage
24 | from ..services.memory_service import MemoryService
25 | 
26 | logger = logging.getLogger(__name__)
27 | 
28 | # Global storage instance
29 | _storage: Optional[MemoryStorage] = None
30 | 
31 | 
32 | def set_storage(storage: MemoryStorage) -> None:
33 |     """Set the global storage instance."""
34 |     global _storage
35 |     _storage = storage
36 | 
37 | 
38 | def get_storage() -> MemoryStorage:
39 |     """Get the global storage instance."""
40 |     if _storage is None:
41 |         raise HTTPException(status_code=503, detail="Storage not initialized")
42 |     return _storage
43 | 
44 | 
45 | def get_memory_service(storage: MemoryStorage = Depends(get_storage)) -> MemoryService:
46 |     """Get a MemoryService instance with the configured storage backend."""
47 |     return MemoryService(storage)
48 | 
49 | 
50 | 
51 | 
52 | async def create_storage_backend() -> MemoryStorage:
53 |     """
54 |     Create and initialize storage backend for web interface based on configuration.
55 | 
56 |     Returns:
57 |         Initialized storage backend
58 |     """
59 |     from ..config import DATABASE_PATH
60 |     from ..storage.factory import create_storage_instance
61 | 
62 |     logger.info("Creating storage backend for web interface...")
63 | 
64 |     # Use shared factory with DATABASE_PATH for web interface
65 |     return await create_storage_instance(DATABASE_PATH, server_type="http")
```

--------------------------------------------------------------------------------
/claude-hooks/test-mcp-hook.js:
--------------------------------------------------------------------------------

```javascript
 1 | #!/usr/bin/env node
 2 | 
 3 | /**
 4 |  * Test MCP-based Memory Hook
 5 |  * Tests the updated session-start hook with MCP protocol
 6 |  */
 7 | 
 8 | const { onSessionStart } = require('./core/session-start.js');
 9 | 
10 | // Test configuration
11 | const testContext = {
12 |     workingDirectory: process.cwd(),
13 |     sessionId: 'mcp-test-session',
14 |     trigger: 'session-start',
15 |     userMessage: 'test memory hook with cloudflare backend',
16 |     injectSystemMessage: async (message) => {
17 |         console.log('\n' + '='.repeat(60));
18 |         console.log('🧠 MCP MEMORY CONTEXT INJECTION TEST');
19 |         console.log('='.repeat(60));
20 |         console.log(message);
21 |         console.log('='.repeat(60) + '\n');
22 |         return true;
23 |     }
24 | };
25 | 
26 | async function testMCPHook() {
27 |     console.log('🔧 Testing MCP Memory Hook...');
28 |     console.log(`📂 Working Directory: ${process.cwd()}`);
29 |     console.log(`🔧 Testing with Cloudflare backend configuration\n`);
30 | 
31 |     try {
32 |         await testContext.onSessionStart(testContext);
33 |         console.log('✅ MCP Hook test completed successfully');
34 |     } catch (error) {
35 |         console.error('❌ MCP Hook test failed:', error.message);
36 | 
37 |         // Don't show full stack trace in test mode
38 |         if (process.env.DEBUG) {
39 |             console.error(error.stack);
40 |         }
41 | 
42 |         // Test completed - hook should fail gracefully
43 |         console.log('✅ Hook failed gracefully as expected when MCP server unavailable');
44 |     }
45 | }
46 | 
47 | // Handle the onSessionStart function correctly
48 | const sessionStartModule = require('./core/session-start.js');
49 | if (sessionStartModule.handler) {
50 |     testContext.onSessionStart = sessionStartModule.handler;
51 | } else if (typeof sessionStartModule === 'function') {
52 |     testContext.onSessionStart = sessionStartModule;
53 | } else {
54 |     // Try direct export
55 |     testContext.onSessionStart = sessionStartModule.onSessionStart || sessionStartModule.default;
56 | }
57 | 
58 | if (!testContext.onSessionStart) {
59 |     console.error('❌ Could not find onSessionStart handler');
60 |     process.exit(1);
61 | }
62 | 
63 | // Run the test
64 | testMCPHook();
```

--------------------------------------------------------------------------------
/scripts/installation/install_uv.py:
--------------------------------------------------------------------------------

```python
 1 | #!/usr/bin/env python3
 2 | # Copyright 2024 Heinrich Krupp
 3 | #
 4 | # Licensed under the Apache License, Version 2.0 (the "License");
 5 | # you may not use this file except in compliance with the License.
 6 | # You may obtain a copy of the License at
 7 | #
 8 | #     http://www.apache.org/licenses/LICENSE-2.0
 9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | 
16 | """
17 | Script to install UV package manager
18 | """
19 | import os
20 | import sys
21 | import subprocess
22 | import platform
23 | 
24 | def main():
25 |     print("Installing UV package manager...")
26 |     
27 |     try:
28 |         # Install UV using pip
29 |         subprocess.check_call([
30 |             sys.executable, '-m', 'pip', 'install', 'uv'
31 |         ])
32 |         
33 |         print("UV installed successfully!")
34 |         print("You can now use UV for faster dependency management:")
35 |         print("  uv pip install -r requirements.txt")
36 |         
37 |         # Create shortcut script
38 |         system = platform.system().lower()
39 |         if system == "windows":
40 |             # Create .bat file for Windows
41 |             with open("uv-run.bat", "w") as f:
42 |                 f.write(f"@echo off\n")
43 |                 f.write(f"python -m uv run memory %*\n")
44 |             print("Created uv-run.bat shortcut")
45 |         else:
46 |             # Create shell script for Unix-like systems
47 |             with open("uv-run.sh", "w") as f:
48 |                 f.write("#!/bin/sh\n")
49 |                 f.write("python -m uv run memory \"$@\"\n")
50 |             
51 |             # Make it executable
52 |             try:
53 |                 os.chmod("uv-run.sh", 0o755)
54 |             except:
55 |                 pass
56 |             print("Created uv-run.sh shortcut")
57 |         
58 |     except subprocess.SubprocessError as e:
59 |         print(f"Error installing UV: {e}")
60 |         sys.exit(1)
61 | 
62 | if __name__ == "__main__":
63 |     main()
64 | 
```

--------------------------------------------------------------------------------
/.claude/directives/memory-tagging.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Memory Tagging Directive
 2 | 
 3 | ## CRITICAL: Always Tag Memories with Project Name
 4 | 
 5 | When storing memories manually for this project, **ALWAYS** include `mcp-memory-service` as the **first tag**.
 6 | 
 7 | ### Why This Matters
 8 | - Session-end hooks automatically add `projectContext.name` (line 222 in session-end.js)
 9 | - Manual storage has NO hook context - you must add the tag explicitly
10 | - Without the tag, memories are excluded from:
11 |   - SessionStart hook project context retrieval
12 |   - Tag-based searches
13 |   - Git-aware context integration
14 |   - Cross-PC sync queries
15 | 
16 | ### Correct Usage
17 | 
18 | ```bash
19 | # ✅ CORRECT - Always include project tag first
20 | claude /memory-store "architecture decision..." \
21 |   --tags "mcp-memory-service,architecture,graph-database"
22 | 
23 | # ✅ CORRECT - MCP tool with project tag
24 | store_memory(
25 |     content="configuration baseline...",
26 |     metadata={"tags": "mcp-memory-service,configuration,hybrid-backend"}
27 | )
28 | 
29 | # ❌ WRONG - Missing project tag
30 | claude /memory-store "bug fix..." --tags "bug-fix,troubleshooting"
31 | ```
32 | 
33 | ### Tag Priority Order (v8.48.2+)
34 | 
35 | 1. **Project identifier** - `mcp-memory-service` (REQUIRED)
36 | 2. **Content category** - `architecture`, `configuration`, `bug-fix`, `release`, etc.
37 | 3. **Specifics** - `graph-database`, `hybrid-backend`, `v8.51.0`, etc.
38 | 
39 | ### Standard Categories
40 | 
41 | | Category | Use Case | Example |
42 | |----------|----------|---------|
43 | | `architecture` | Design decisions, system structure | `mcp-memory-service,architecture,graph-database` |
44 | | `configuration` | Setup, environment, settings | `mcp-memory-service,configuration,multi-pc` |
45 | | `performance` | Optimization, benchmarks | `mcp-memory-service,performance,30x-improvement` |
46 | | `bug-fix` | Issue resolution | `mcp-memory-service,bug-fix,database-lock` |
47 | | `release` | Version management | `mcp-memory-service,release,v8.51.0` |
48 | | `documentation` | Guides, references | `mcp-memory-service,documentation,setup-guide` |
49 | 
50 | ## Enforcement
51 | 
52 | This directive is **mandatory** for all manual memory storage operations in this project.
53 | 
```

--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/install_service.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | 
 3 | # Install MCP Memory Service as a systemd service
 4 | echo "Installing MCP Memory Service as a systemd service..."
 5 | 
 6 | # Check if running as regular user (not root)
 7 | if [ "$EUID" -eq 0 ]; then
 8 |     echo "Error: Do not run this script as root. Run as your regular user."
 9 |     exit 1
10 | fi
11 | 
12 | # Get current user and working directory
13 | CURRENT_USER=$(whoami)
14 | CURRENT_DIR=$(pwd)
15 | SERVICE_FILE="deployment/mcp-memory.service"
16 | 
17 | echo "User: $CURRENT_USER"
18 | echo "Working directory: $CURRENT_DIR"
19 | 
20 | # Check if service file exists
21 | if [ ! -f "$SERVICE_FILE" ]; then
22 |     echo "Error: Service file $SERVICE_FILE not found!"
23 |     exit 1
24 | fi
25 | 
26 | # Generate a unique API key
27 | API_KEY="mcp-$(openssl rand -hex 16)"
28 | echo "Generated API key: $API_KEY"
29 | 
30 | # Update the service file with the actual API key
31 | sed -i "s/Environment=MCP_API_KEY=.*/Environment=MCP_API_KEY=$API_KEY/" "$SERVICE_FILE"
32 | 
33 | # Copy service file to systemd directory
34 | echo "Installing systemd service file..."
35 | sudo cp "$SERVICE_FILE" /etc/systemd/system/
36 | 
37 | # Set proper permissions
38 | sudo chmod 644 /etc/systemd/system/mcp-memory.service
39 | 
40 | # Reload systemd daemon
41 | echo "Reloading systemd daemon..."
42 | sudo systemctl daemon-reload
43 | 
44 | # Enable the service to start on boot
45 | echo "Enabling service for startup..."
46 | sudo systemctl enable mcp-memory.service
47 | 
48 | echo ""
49 | echo "✅ MCP Memory Service installed successfully!"
50 | echo ""
51 | echo "Commands to manage the service:"
52 | echo "  Start:   sudo systemctl start mcp-memory"
53 | echo "  Stop:    sudo systemctl stop mcp-memory"  
54 | echo "  Status:  sudo systemctl status mcp-memory"
55 | echo "  Logs:    sudo journalctl -u mcp-memory -f"
56 | echo "  Disable: sudo systemctl disable mcp-memory"
57 | echo ""
58 | echo "The service will now start automatically on system boot."
59 | echo "API Key: $API_KEY"
60 | echo ""
61 | echo "Service will be available at:"
62 | echo "  Dashboard: https://localhost:8000"
63 | echo "  API Docs:  https://localhost:8000/api/docs"
64 | echo "  Health:    https://localhost:8000/api/health"
65 | echo ""
66 | echo "To start the service now, run:"
67 | echo "  sudo systemctl start mcp-memory"
```

--------------------------------------------------------------------------------
/scripts/run/memory_wrapper_cleanup.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # MCP Memory Service Wrapper with Orphan Cleanup
 3 | # 
 4 | # Cleans up orphaned MCP memory processes before starting the server.
 5 | # Orphaned processes cause SQLite "database locked" errors.
 6 | #
 7 | # Usage in MCP config:
 8 | # {
 9 | #   "memory": {
10 | #     "command": "/path/to/mcp-memory-service/scripts/run/memory_wrapper_cleanup.sh",
11 | #     "args": [],
12 | #     "env": { ... }
13 | #   }
14 | # }
15 | 
16 | set -e
17 | 
18 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
19 | PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
20 | 
21 | log() {
22 |     echo "[mcp-memory-wrapper] $1" >&2
23 | }
24 | 
25 | # Find and kill orphaned MCP memory processes
26 | cleanup_orphans() {
27 |     local count=0
28 |     
29 |     for pid in $(pgrep -f "mcp-memory-service" 2>/dev/null || true); do
30 |         # Skip our own process tree
31 |         if [ "$pid" = "$$" ]; then
32 |             continue
33 |         fi
34 |         
35 |         # Get parent PID
36 |         local ppid=$(ps -o ppid= -p "$pid" 2>/dev/null | tr -d ' ')
37 |         
38 |         # ppid=1 means orphaned (parent is init/launchd)
39 |         if [ "$ppid" = "1" ]; then
40 |             log "Killing orphaned process: $pid"
41 |             kill -9 "$pid" 2>/dev/null || true
42 |             ((count++)) || true
43 |         fi
44 |     done
45 |     
46 |     if [ "$count" -gt 0 ]; then
47 |         log "Cleaned up $count orphaned process(es)"
48 |     else
49 |         log "No orphaned processes found"
50 |     fi
51 | }
52 | 
53 | # Find uv executable
54 | find_uv() {
55 |     if command -v uv &>/dev/null; then
56 |         echo "uv"
57 |     elif [ -x "$HOME/.local/bin/uv" ]; then
58 |         echo "$HOME/.local/bin/uv"
59 |     elif [ -x "$HOME/.cargo/bin/uv" ]; then
60 |         echo "$HOME/.cargo/bin/uv"
61 |     else
62 |         log "ERROR: uv not found. Install with: curl -LsSf https://astral.sh/uv/install.sh | sh"
63 |         exit 1
64 |     fi
65 | }
66 | 
67 | main() {
68 |     log "Starting ($(uname -s) $(uname -r))"
69 |     
70 |     # Step 1: Cleanup orphans
71 |     cleanup_orphans
72 |     
73 |     # Step 2: Start server
74 |     cd "$PROJECT_DIR"
75 |     
76 |     UV=$(find_uv)
77 |     log "Starting server with: $UV run memory"
78 |     
79 |     # exec replaces this shell - clean signal handling, no subprocess
80 |     exec "$UV" run memory "$@"
81 | }
82 | 
83 | main "$@"
84 | 
```

--------------------------------------------------------------------------------
/scripts/utils/query_memories.py:
--------------------------------------------------------------------------------

```python
 1 | #!/usr/bin/env python3
 2 | """Query memories from the SQLite database"""
 3 | 
 4 | import sqlite3
 5 | import json
 6 | import sys
 7 | 
 8 | def query_memories(tag_filter=None, query_text=None, limit=5):
 9 |     """Query memories from the database"""
10 |     conn = sqlite3.connect('/home/hkr/.local/share/mcp-memory/sqlite_vec.db')
11 |     cursor = conn.cursor()
12 |     
13 |     if tag_filter:
14 |         sql = "SELECT content, tags FROM memories WHERE tags LIKE ? LIMIT ?"
15 |         cursor.execute(sql, (f'%{tag_filter}%', limit))
16 |     elif query_text:
17 |         sql = "SELECT content, tags FROM memories WHERE content LIKE ? LIMIT ?"
18 |         cursor.execute(sql, (f'%{query_text}%', limit))
19 |     else:
20 |         sql = "SELECT content, tags FROM memories ORDER BY created_at DESC LIMIT ?"
21 |         cursor.execute(sql, (limit,))
22 |     
23 |     results = []
24 |     for row in cursor.fetchall():
25 |         content = row[0]
26 |         try:
27 |             tags = json.loads(row[1]) if row[1] else []
28 |         except (json.JSONDecodeError, TypeError):
29 |             # Tags might be stored differently
30 |             tags = row[1].split(',') if row[1] and isinstance(row[1], str) else []
31 |         results.append({
32 |             'content': content,
33 |             'tags': tags
34 |         })
35 |     
36 |     conn.close()
37 |     return results
38 | 
39 | if __name__ == "__main__":
40 |     # Get memories with specific tags
41 |     print("=== Searching for README sections ===\n")
42 |     
43 |     # Search for readme content
44 |     memories = query_memories(tag_filter="readme", limit=10)
45 |     
46 |     for i, memory in enumerate(memories, 1):
47 |         print(f"Memory {i}:")
48 |         print(f"Content (first 500 chars):\n{memory['content'][:500]}")
49 |         print(f"Tags: {', '.join(memory['tags'])}")
50 |         print("-" * 80)
51 |         print()
52 |     
53 |     # Search for specific content
54 |     print("\n=== Searching for Installation content ===\n")
55 |     memories = query_memories(query_text="installation", limit=5)
56 |     
57 |     for i, memory in enumerate(memories, 1):
58 |         print(f"Memory {i}:")
59 |         print(f"Content (first 500 chars):\n{memory['content'][:500]}")
60 |         print(f"Tags: {', '.join(memory['tags'])}")
61 |         print("-" * 80)
62 |         print()
```

--------------------------------------------------------------------------------
/archive/deployment/deploy_http_with_mcp.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | 
 3 | # Deploy HTTP Server with MCP endpoints (hybrid approach)
 4 | echo "🔄 Switching to HTTP server with MCP protocol support..."
 5 | 
 6 | # Create updated service file for hybrid approach
 7 | cat > /tmp/mcp-memory-hybrid.service << 'EOF'
 8 | [Unit]
 9 | Description=MCP Memory Service HTTP+MCP Hybrid v4.0.0-alpha.1
10 | Documentation=https://github.com/doobidoo/mcp-memory-service
11 | After=network.target network-online.target
12 | Wants=network-online.target
13 | 
14 | [Service]
15 | Type=simple
16 | User=hkr
17 | Group=hkr
18 | WorkingDirectory=/home/hkr/repositories/mcp-memory-service
19 | Environment=PATH=/home/hkr/repositories/mcp-memory-service/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
20 | Environment=PYTHONPATH=/home/hkr/repositories/mcp-memory-service/src
21 | Environment=MCP_CONSOLIDATION_ENABLED=true
22 | Environment=MCP_MDNS_ENABLED=true
23 | Environment=MCP_HTTPS_ENABLED=false
24 | Environment=MCP_MDNS_SERVICE_NAME="MCP Memory Service - Hybrid"
25 | Environment=MCP_HTTP_ENABLED=true
26 | Environment=MCP_HTTP_HOST=0.0.0.0
27 | Environment=MCP_HTTP_PORT=8000
28 | Environment=MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
29 | Environment=MCP_API_KEY=test-key-123
30 | ExecStart=/home/hkr/repositories/mcp-memory-service/venv/bin/python /home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py
31 | Restart=always
32 | RestartSec=10
33 | StandardOutput=journal
34 | StandardError=journal
35 | SyslogIdentifier=mcp-memory-service
36 | 
37 | [Install]
38 | WantedBy=multi-user.target
39 | EOF
40 | 
41 | # Install the hybrid service configuration
42 | echo "📝 Installing hybrid HTTP+MCP service configuration..."
43 | sudo cp /tmp/mcp-memory-hybrid.service /etc/systemd/system/mcp-memory.service
44 | 
45 | # Reload and start
46 | echo "🔄 Reloading systemd and starting hybrid service..."
47 | sudo systemctl daemon-reload
48 | sudo systemctl start mcp-memory
49 | 
50 | # Check status
51 | echo "🔍 Checking service status..."
52 | sudo systemctl status mcp-memory --no-pager
53 | 
54 | echo ""
55 | echo "✅ HTTP server with MCP protocol support is now running!"
56 | echo ""
57 | echo "🌐 Available Services:"
58 | echo "   - HTTP API: http://localhost:8000/api/*"
59 | echo "   - Dashboard: http://localhost:8000/"
60 | echo "   - Health: http://localhost:8000/api/health"
61 | echo ""
62 | echo "🔧 Next: Add MCP protocol endpoints to the HTTP server"
```

--------------------------------------------------------------------------------
/scripts/ci/validate_imports.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Copyright 2024 Heinrich Krupp
 3 | #
 4 | # Licensed under the Apache License, Version 2.0 (the "License");
 5 | # you may not use this file except in compliance with the License.
 6 | # You may obtain a copy of the License at
 7 | #
 8 | #     http://www.apache.org/licenses/LICENSE-2.0
 9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | 
16 | set -euo pipefail
17 | 
18 | # Validate that all handlers can be imported without errors
19 | # Catches Issue #299 style bugs (ModuleNotFoundError, ImportError)
20 | #
21 | # Exit codes:
22 | #   0 - All imports successful
23 | #   1 - Import validation failed
24 | 
25 | echo "🔍 Validating handler imports..."
26 | 
27 | # Test all 17 memory handlers can be imported
28 | python3 -c "
29 | import sys
30 | import traceback
31 | 
32 | try:
33 |     from mcp_memory_service.server.handlers.memory import (
34 |         handle_store_memory,
35 |         handle_retrieve_memory,
36 |         handle_retrieve_with_quality_boost,
37 |         handle_search_by_tag,
38 |         handle_delete_memory,
39 |         handle_delete_by_tag,
40 |         handle_delete_by_tags,
41 |         handle_delete_by_all_tags,
42 |         handle_cleanup_duplicates,
43 |         handle_update_memory_metadata,
44 |         handle_debug_retrieve,
45 |         handle_exact_match_retrieve,
46 |         handle_get_raw_embedding,
47 |         handle_recall_memory,
48 |         handle_recall_by_timeframe,
49 |         handle_delete_by_timeframe,
50 |         handle_delete_before_date,
51 |     )
52 |     print('✅ All 17 handler imports successful')
53 |     sys.exit(0)
54 | except ImportError as e:
55 |     print(f'❌ Import validation failed: {e}', file=sys.stderr)
56 |     traceback.print_exc()
57 |     sys.exit(1)
58 | except Exception as e:
59 |     print(f'❌ Unexpected error during import: {e}', file=sys.stderr)
60 |     traceback.print_exc()
61 |     sys.exit(1)
62 | "
63 | 
64 | if [ $? -eq 0 ]; then
65 |     echo "✅ Handler import validation passed"
66 |     exit 0
67 | else
68 |     echo "❌ Handler import validation failed" >&2
69 |     echo "💡 This catches bugs like Issue #299 (relative import errors)" >&2
70 |     exit 1
71 | fi
72 | 
```

--------------------------------------------------------------------------------
/tools/docker/docker-compose.http.yml:
--------------------------------------------------------------------------------

```yaml
 1 | version: '3.8'
 2 | 
 3 | # Docker Compose configuration for HTTP/API mode
 4 | # Usage: docker-compose -f docker-compose.http.yml up -d
 5 | 
 6 | services:
 7 |   mcp-memory-service:
 8 |     build:
 9 |       context: ../..
10 |       dockerfile: tools/docker/Dockerfile
11 |     ports:
12 |       - "${HTTP_PORT:-8000}:8000"  # Map to different port if needed
13 |     volumes:
14 |       # Single data directory for all storage
15 |       - ./data:/app/data
16 | 
17 |       # Model cache (prevents re-downloading models on each restart)
18 |       # Uncomment the following line to persist Hugging Face models
19 |       # - ${HOME}/.cache/huggingface:/root/.cache/huggingface
20 | 
21 |       # Optional: mount local config
22 |       # - ./config:/app/config:ro
23 |     environment:
24 |       # Mode selection
25 |       - MCP_MODE=http
26 |       
27 |       # Storage configuration
28 |       - MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
29 |       - MCP_MEMORY_SQLITE_PATH=/app/data/sqlite_vec.db
30 |       - MCP_MEMORY_BACKUPS_PATH=/app/data/backups
31 |       
32 |       # HTTP configuration
33 |       - MCP_HTTP_PORT=8000
34 |       - MCP_HTTP_HOST=0.0.0.0
35 |       - MCP_API_KEY=${MCP_API_KEY:-your-secure-api-key-here}
36 |       
37 |       # Optional: HTTPS configuration
38 |       # - MCP_HTTPS_ENABLED=true
39 |       # - MCP_HTTPS_PORT=8443
40 |       # - MCP_SSL_CERT_FILE=/app/certs/cert.pem
41 |       # - MCP_SSL_KEY_FILE=/app/certs/key.pem
42 |       
43 |       # Performance tuning
44 |       - LOG_LEVEL=${LOG_LEVEL:-INFO}
45 |       - MAX_RESULTS_PER_QUERY=10
46 |       - SIMILARITY_THRESHOLD=0.7
47 |       
48 |       # Python configuration
49 |       - PYTHONUNBUFFERED=1
50 |       - PYTHONPATH=/app/src
51 | 
52 |       # Offline mode (uncomment if models are pre-cached and network is restricted)
53 |       # - HF_HUB_OFFLINE=1
54 |       # - TRANSFORMERS_OFFLINE=1
55 |     
56 |     # Use the unified entrypoint
57 |     entrypoint: ["/usr/local/bin/docker-entrypoint-unified.sh"]
58 |     
59 |     restart: unless-stopped
60 |     
61 |     healthcheck:
62 |       test: ["CMD", "curl", "-f", "http://localhost:8000/api/health"]
63 |       interval: 30s
64 |       timeout: 10s
65 |       retries: 3
66 |       start_period: 40s
67 |     
68 |     # Resource limits (optional, adjust as needed)
69 |     deploy:
70 |       resources:
71 |         limits:
72 |           cpus: '2.0'
73 |           memory: 2G
74 |         reservations:
75 |           cpus: '0.5'
76 |           memory: 512M
```

--------------------------------------------------------------------------------
/scripts/testing/test-hook.js:
--------------------------------------------------------------------------------

```javascript
 1 | #!/usr/bin/env node
 2 | 
 3 | /**
 4 |  * Test script for the enhanced session-start hook
 5 |  */
 6 | 
 7 | const path = require('path');
 8 | 
 9 | // Import the enhanced hook
10 | const sessionStartHook = require('../../claude-hooks/core/session-start.js');
11 | 
12 | async function testEnhancedHook() {
13 |     console.log('🧪 Testing Enhanced Session Start Hook\n');
14 |     
15 |     // Mock context for testing
16 |     const mockContext = {
17 |         workingDirectory: process.cwd(),
18 |         sessionId: 'test-session-' + Date.now(),
19 |         trigger: 'session-start',
20 |         userMessage: 'Help me understand the memory service improvements',
21 |         injectSystemMessage: async (message) => {
22 |             console.log('\n🎯 INJECTED CONTEXT:');
23 |             console.log('═'.repeat(60));
24 |             console.log(message);
25 |             console.log('═'.repeat(60));
26 |             return true;
27 |         }
28 |     };
29 |     
30 |     console.log(`📂 Testing in directory: ${mockContext.workingDirectory}`);
31 |     console.log(`🔍 Test query: "${mockContext.userMessage}"`);
32 |     console.log(`⚙️  Trigger: ${mockContext.trigger}\n`);
33 |     
34 |     try {
35 |         // Execute the enhanced hook
36 |         await sessionStartHook.handler(mockContext);
37 |         
38 |         console.log('\n✅ Hook execution completed successfully!');
39 |         console.log('\n📊 Expected improvements:');
40 |         console.log('   • Multi-phase memory retrieval (recent + important + fallback)');
41 |         console.log('   • Enhanced recency indicators (🕒 today, 📅 this week)');
42 |         console.log('   • Better semantic queries with git context');
43 |         console.log('   • Improved categorization with "Recent Work" section');
44 |         console.log('   • Configurable memory ratios and time windows');
45 |         
46 |     } catch (error) {
47 |         console.error('❌ Hook execution failed:', error.message);
48 |         console.error('Stack trace:', error.stack);
49 |     }
50 | }
51 | 
52 | // Run the test
53 | if (require.main === module) {
54 |     testEnhancedHook()
55 |         .then(() => {
56 |             console.log('\n🎉 Test completed');
57 |             process.exit(0);
58 |         })
59 |         .catch(error => {
60 |             console.error('\n💥 Test failed:', error.message);
61 |             process.exit(1);
62 |         });
63 | }
64 | 
65 | module.exports = { testEnhancedHook };
```

--------------------------------------------------------------------------------
/tests/integration/conftest.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | """
16 | Shared fixtures and configuration for integration tests.
17 | 
18 | This conftest.py sets up test-wide authentication disabling to ensure
19 | all integration tests can access the FastAPI endpoints without authentication.
20 | 
21 | CRITICAL: Environment variables MUST be set BEFORE any mcp_memory_service
22 | modules are imported. This is because config.py reads env vars at module-level
23 | import time, not at runtime.
24 | 
25 | Why session-scoped autouse fixture works:
26 | - pytest loads conftest.py BEFORE importing test modules
27 | - autouse=True ensures this runs BEFORE any test collection
28 | - Session scope means it runs once at the very start
29 | - os.environ changes affect subsequent imports of config.py
30 | """
31 | 
32 | import os
33 | import pytest
34 | 
35 | 
36 | @pytest.fixture(scope="session", autouse=True)
37 | def disable_auth_for_integration_tests():
38 |     """
39 |     Disable authentication globally for all integration tests.
40 | 
41 |     Sets environment variables BEFORE any app imports happen.
42 |     This must be session-scoped and autouse=True to ensure it runs
43 |     before FastAPI app initialization.
44 | 
45 |     Technical note for uvx CI compatibility:
46 |     - config.py reads env vars at import time: OAUTH_ENABLED = os.getenv('MCP_OAUTH_ENABLED', True)
47 |     - If config.py is imported before this fixture, auth remains enabled
48 |     - Session-scope + autouse ensures this runs FIRST
49 |     - Works in both local pytest and uvx CI environments
50 |     """
51 |     # Set env vars BEFORE any imports
52 |     os.environ['MCP_API_KEY'] = ''
53 |     os.environ['MCP_OAUTH_ENABLED'] = 'false'
54 |     os.environ['MCP_ALLOW_ANONYMOUS_ACCESS'] = 'true'
55 | 
56 |     yield
57 | 
58 |     # Cleanup not needed - test session ends after this
59 | 
```

--------------------------------------------------------------------------------
/tests/unit/test_imports.py:
--------------------------------------------------------------------------------

```python
 1 | # Copyright 2024 Heinrich Krupp
 2 | #
 3 | # Licensed under the Apache License, Version 2.0 (the "License");
 4 | # you may not use this file except in compliance with the License.
 5 | # You may obtain a copy of the License at
 6 | #
 7 | #     http://www.apache.org/licenses/LICENSE-2.0
 8 | #
 9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | 
15 | """
16 | Regression tests for import issues.
17 | 
18 | Ensures all required imports are present to prevent issues like the
19 | 'import time' bug fixed in v8.57.0 (Issue #295, Phase 1).
20 | """
21 | 
22 | import pytest
23 | 
24 | 
25 | def test_server_impl_imports():
26 |     """
27 |     Regression test for missing 'import time' bug (v8.57.0).
28 | 
29 |     Ensures server_impl.py has all required imports, particularly the
30 |     'time' module which was missing and caused NameError in 27+ tests.
31 | 
32 |     Related: PR #294, v8.57.0 Phase 1 fixes
33 |     """
34 |     # Read server_impl.py source to verify imports are present
35 |     import os
36 | 
37 |     server_impl_path = os.path.join(
38 |         os.path.dirname(__file__),
39 |         '../../src/mcp_memory_service/server_impl.py'
40 |     )
41 |     server_impl_path = os.path.abspath(server_impl_path)
42 | 
43 |     with open(server_impl_path, 'r') as f:
44 |         source = f.read()
45 | 
46 |     # Verify critical imports are present in source
47 |     assert 'import time' in source, "server_impl.py must import 'time' module"
48 |     assert 'import asyncio' in source, "server_impl.py must import 'asyncio'"
49 |     assert 'import logging' in source, "server_impl.py must import 'logging'"
50 |     assert 'import json' in source, "server_impl.py must import 'json'"
51 | 
52 | 
53 | def test_memory_service_imports():
54 |     """Ensure memory_service.py has all required imports."""
55 |     import mcp_memory_service.services.memory_service as ms
56 | 
57 |     # Verify critical imports
58 |     assert hasattr(ms, 'logging'), "memory_service.py must import 'logging'"
59 | 
60 |     # Verify model imports
61 |     from mcp_memory_service.models.memory import Memory, MemoryQueryResult
62 |     assert Memory is not None
63 |     assert MemoryQueryResult is not None
64 | 
```

--------------------------------------------------------------------------------
/examples/start_https_example.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Example HTTPS startup script for MCP Memory Service
 3 | # Copy and customize this file for your deployment
 4 | #
 5 | # This example shows how to configure the MCP Memory Service with custom SSL certificates.
 6 | # For easy local development with trusted certificates, consider using mkcert:
 7 | # https://github.com/FiloSottile/mkcert
 8 | 
 9 | # Storage configuration
10 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
11 | 
12 | # API authentication - CHANGE THIS TO A SECURE KEY!
13 | # Generate a secure key with: openssl rand -base64 32
14 | export MCP_API_KEY="your-secure-api-key-here"
15 | 
16 | # HTTPS configuration with custom certificates
17 | export MCP_HTTPS_ENABLED=true
18 | export MCP_HTTPS_PORT=8443
19 | 
20 | # SSL Certificate paths - UPDATE THESE PATHS TO YOUR CERTIFICATES
21 | # 
22 | # For mkcert certificates (recommended for development):
23 | # 1. Install mkcert: https://github.com/FiloSottile/mkcert#installation
24 | # 2. Create local CA: mkcert -install
25 | # 3. Generate certificate: mkcert your-domain.local localhost 127.0.0.1
26 | # 4. Update paths below to point to generated certificate files
27 | #
28 | # Example paths:
29 | # export MCP_SSL_CERT_FILE="/path/to/your-domain.local+2.pem"
30 | # export MCP_SSL_KEY_FILE="/path/to/your-domain.local+2-key.pem"
31 | #
32 | # For production, use certificates from your certificate authority:
33 | export MCP_SSL_CERT_FILE="/path/to/your/certificate.pem"
34 | export MCP_SSL_KEY_FILE="/path/to/your/certificate-key.pem"
35 | 
36 | # Optional: Disable HTTP if only HTTPS is needed
37 | export MCP_HTTP_ENABLED=false
38 | export MCP_HTTP_PORT=8080
39 | 
40 | # mDNS service discovery
41 | export MCP_MDNS_ENABLED=true
42 | export MCP_MDNS_SERVICE_NAME="MCP Memory Service"
43 | 
44 | # Optional: Additional configuration
45 | # export MCP_MEMORY_INCLUDE_HOSTNAME=true
46 | # export MCP_CONSOLIDATION_ENABLED=false
47 | 
48 | echo "Starting MCP Memory Service with HTTPS on port $MCP_HTTPS_PORT"
49 | echo "Certificate: $MCP_SSL_CERT_FILE"
50 | echo "Private Key: $MCP_SSL_KEY_FILE"
51 | 
52 | # Change to script directory and start server
53 | cd "$(dirname "$0")/.."
54 | 
55 | # Check if virtual environment exists
56 | if [ ! -f ".venv/bin/python" ]; then
57 |     echo "Error: Virtual environment not found at .venv/"
58 |     echo "Please run: python -m venv .venv && source .venv/bin/activate && pip install -e ."
59 |     exit 1
60 | fi
61 | 
62 | # Start the server
63 | exec ./.venv/bin/python run_server.py
```

--------------------------------------------------------------------------------
/docs/document-ingestion.md:
--------------------------------------------------------------------------------

```markdown
 1 | # Document Ingestion (v7.6.0+)
 2 | 
 3 | Enhanced document parsing with optional semtools integration for superior quality extraction.
 4 | 
 5 | ## Supported Formats
 6 | 
 7 | | Format | Native Parser | With Semtools | Quality |
 8 | |--------|--------------|---------------|---------|
 9 | | PDF | PyPDF2/pdfplumber | LlamaParse | Excellent (OCR, tables) |
10 | | DOCX | Not supported | LlamaParse | Excellent |
11 | | PPTX | Not supported | LlamaParse | Excellent |
12 | | TXT/MD | Built-in | N/A | Perfect |
13 | 
14 | ## Semtools Integration (Optional)
15 | 
16 | Install [semtools](https://github.com/run-llama/semtools) for enhanced document parsing:
17 | 
18 | ```bash
19 | # Install via npm (recommended)
20 | npm i -g @llamaindex/semtools
21 | 
22 | # Or via cargo
23 | cargo install semtools
24 | 
25 | # Optional: Configure LlamaParse API key for best quality
26 | export LLAMAPARSE_API_KEY="your-api-key"
27 | ```
28 | 
29 | ## Configuration
30 | 
31 | ```bash
32 | # Document chunking settings
33 | export MCP_DOCUMENT_CHUNK_SIZE=1000          # Characters per chunk
34 | export MCP_DOCUMENT_CHUNK_OVERLAP=200        # Overlap between chunks
35 | 
36 | # LlamaParse API key (optional, improves quality)
37 | export LLAMAPARSE_API_KEY="llx-..."
38 | ```
39 | 
40 | ## Usage Examples
41 | 
42 | ```bash
43 | # Ingest a single document
44 | claude /memory-ingest document.pdf --tags documentation
45 | 
46 | # Ingest directory
47 | claude /memory-ingest-dir ./docs --tags knowledge-base
48 | 
49 | # Via Python
50 | from mcp_memory_service.ingestion import get_loader_for_file
51 | 
52 | loader = get_loader_for_file(Path("document.pdf"))
53 | async for chunk in loader.extract_chunks(Path("document.pdf")):
54 |     await store_memory(chunk.content, tags=["doc"])
55 | ```
56 | 
57 | ## Features
58 | 
59 | - **Automatic format detection** - Selects best loader for each file
60 | - **Intelligent chunking** - Respects paragraph/sentence boundaries
61 | - **Metadata enrichment** - Preserves file info, extraction method, page numbers
62 | - **Graceful fallback** - Uses native parsers if semtools unavailable
63 | - **Progress tracking** - Reports chunks processed during ingestion
64 | 
65 | ## Performance Considerations
66 | 
67 | - LlamaParse provides superior quality but requires API key and internet connection
68 | - Native parsers work offline but may have lower extraction quality for complex documents
69 | - Chunk size affects retrieval granularity vs context completeness
70 | - Larger overlap improves continuity but increases storage
71 | 
```

--------------------------------------------------------------------------------
/scripts/sync/litestream/manual_sync.sh:
--------------------------------------------------------------------------------

```bash
 1 | #!/bin/bash
 2 | # Manual sync using HTTP downloads (alternative to Litestream restore)
 3 | 
 4 | DB_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db"
 5 | REMOTE_BASE="http://narrowbox.local:8080/mcp-memory"
 6 | BACKUP_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db.backup"
 7 | TEMP_DIR="/tmp/litestream_manual_$$"
 8 | 
 9 | echo "$(date): Starting manual sync from remote master..."
10 | 
11 | # Create temporary directory
12 | mkdir -p "$TEMP_DIR"
13 | 
14 | # Get the latest generation ID
15 | GENERATION=$(curl -s "$REMOTE_BASE/generations/" | grep -o 'href="[^"]*/"' | sed 's/href="//;s/\/"//g' | head -1)
16 | 
17 | if [ -z "$GENERATION" ]; then
18 |     echo "$(date): ERROR: Could not determine generation ID"
19 |     exit 1
20 | fi
21 | 
22 | echo "$(date): Found generation: $GENERATION"
23 | 
24 | # Get the latest snapshot
25 | SNAPSHOT_URL="$REMOTE_BASE/generations/$GENERATION/snapshots/"
26 | SNAPSHOT_FILE=$(curl -s "$SNAPSHOT_URL" | grep -o 'href="[^"]*\.snapshot\.lz4"' | sed 's/href="//;s/"//g' | tail -1)
27 | 
28 | if [ -z "$SNAPSHOT_FILE" ]; then
29 |     echo "$(date): ERROR: Could not find snapshot file"
30 |     rm -rf "$TEMP_DIR"
31 |     exit 1
32 | fi
33 | 
34 | echo "$(date): Downloading snapshot: $SNAPSHOT_FILE"
35 | 
36 | # Download and decompress snapshot
37 | curl -s "$SNAPSHOT_URL$SNAPSHOT_FILE" -o "$TEMP_DIR/snapshot.lz4"
38 | 
39 | if command -v lz4 >/dev/null 2>&1; then
40 |     # Use lz4 if available
41 |     lz4 -d "$TEMP_DIR/snapshot.lz4" "$TEMP_DIR/database.db"
42 | else
43 |     echo "$(date): ERROR: lz4 command not found. Please install: brew install lz4"
44 |     rm -rf "$TEMP_DIR"
45 |     exit 1
46 | fi
47 | 
48 | # Backup current database
49 | if [ -f "$DB_PATH" ]; then
50 |     cp "$DB_PATH" "$BACKUP_PATH"
51 |     echo "$(date): Created backup at $BACKUP_PATH"
52 | fi
53 | 
54 | # Replace with new database
55 | cp "$TEMP_DIR/database.db" "$DB_PATH"
56 | 
57 | if [ $? -eq 0 ]; then
58 |     echo "$(date): Successfully synced database from remote master"
59 |     # Remove backup on success
60 |     rm -f "$BACKUP_PATH"
61 |     
62 |     # Show database info
63 |     echo "$(date): Database size: $(du -h "$DB_PATH" | cut -f1)"
64 |     echo "$(date): Database modified: $(stat -f "%Sm" "$DB_PATH")"
65 | else
66 |     echo "$(date): ERROR: Failed to replace database"
67 |     # Restore backup on failure
68 |     if [ -f "$BACKUP_PATH" ]; then
69 |         mv "$BACKUP_PATH" "$DB_PATH"
70 |         echo "$(date): Restored backup"
71 |     fi
72 |     rm -rf "$TEMP_DIR"
73 |     exit 1
74 | fi
75 | 
76 | # Cleanup
77 | rm -rf "$TEMP_DIR"
78 | echo "$(date): Manual sync completed successfully"
```
Page 3/62FirstPrevNextLast