This is page 3 of 47. Use http://codebase.md/doobidoo/mcp-memory-service?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .claude
│ ├── agents
│ │ ├── amp-bridge.md
│ │ ├── amp-pr-automator.md
│ │ ├── code-quality-guard.md
│ │ ├── gemini-pr-automator.md
│ │ └── github-release-manager.md
│ ├── settings.local.json.backup
│ └── settings.local.json.local
├── .commit-message
├── .dockerignore
├── .env.example
├── .env.sqlite.backup
├── .envnn#
├── .gitattributes
├── .github
│ ├── FUNDING.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── performance_issue.yml
│ ├── pull_request_template.md
│ └── workflows
│ ├── bridge-tests.yml
│ ├── CACHE_FIX.md
│ ├── claude-code-review.yml
│ ├── claude.yml
│ ├── cleanup-images.yml.disabled
│ ├── dev-setup-validation.yml
│ ├── docker-publish.yml
│ ├── LATEST_FIXES.md
│ ├── main-optimized.yml.disabled
│ ├── main.yml
│ ├── publish-and-test.yml
│ ├── README_OPTIMIZATION.md
│ ├── release-tag.yml.disabled
│ ├── release.yml
│ ├── roadmap-review-reminder.yml
│ ├── SECRET_CONDITIONAL_FIX.md
│ └── WORKFLOW_FIXES.md
├── .gitignore
├── .mcp.json.backup
├── .mcp.json.template
├── .pyscn
│ ├── .gitignore
│ └── reports
│ └── analyze_20251123_214224.html
├── AGENTS.md
├── archive
│ ├── deployment
│ │ ├── deploy_fastmcp_fixed.sh
│ │ ├── deploy_http_with_mcp.sh
│ │ └── deploy_mcp_v4.sh
│ ├── deployment-configs
│ │ ├── empty_config.yml
│ │ └── smithery.yaml
│ ├── development
│ │ └── test_fastmcp.py
│ ├── docs-removed-2025-08-23
│ │ ├── authentication.md
│ │ ├── claude_integration.md
│ │ ├── claude-code-compatibility.md
│ │ ├── claude-code-integration.md
│ │ ├── claude-code-quickstart.md
│ │ ├── claude-desktop-setup.md
│ │ ├── complete-setup-guide.md
│ │ ├── database-synchronization.md
│ │ ├── development
│ │ │ ├── autonomous-memory-consolidation.md
│ │ │ ├── CLEANUP_PLAN.md
│ │ │ ├── CLEANUP_README.md
│ │ │ ├── CLEANUP_SUMMARY.md
│ │ │ ├── dream-inspired-memory-consolidation.md
│ │ │ ├── hybrid-slm-memory-consolidation.md
│ │ │ ├── mcp-milestone.md
│ │ │ ├── multi-client-architecture.md
│ │ │ ├── test-results.md
│ │ │ └── TIMESTAMP_FIX_SUMMARY.md
│ │ ├── distributed-sync.md
│ │ ├── invocation_guide.md
│ │ ├── macos-intel.md
│ │ ├── master-guide.md
│ │ ├── mcp-client-configuration.md
│ │ ├── multi-client-server.md
│ │ ├── service-installation.md
│ │ ├── sessions
│ │ │ └── MCP_ENHANCEMENT_SESSION_MEMORY_v4.1.0.md
│ │ ├── UBUNTU_SETUP.md
│ │ ├── ubuntu.md
│ │ ├── windows-setup.md
│ │ └── windows.md
│ ├── docs-root-cleanup-2025-08-23
│ │ ├── AWESOME_LIST_SUBMISSION.md
│ │ ├── CLOUDFLARE_IMPLEMENTATION.md
│ │ ├── DOCUMENTATION_ANALYSIS.md
│ │ ├── DOCUMENTATION_CLEANUP_PLAN.md
│ │ ├── DOCUMENTATION_CONSOLIDATION_COMPLETE.md
│ │ ├── LITESTREAM_SETUP_GUIDE.md
│ │ ├── lm_studio_system_prompt.md
│ │ ├── PYTORCH_DOWNLOAD_FIX.md
│ │ └── README-ORIGINAL-BACKUP.md
│ ├── investigations
│ │ └── MACOS_HOOKS_INVESTIGATION.md
│ ├── litestream-configs-v6.3.0
│ │ ├── install_service.sh
│ │ ├── litestream_master_config_fixed.yml
│ │ ├── litestream_master_config.yml
│ │ ├── litestream_replica_config_fixed.yml
│ │ ├── litestream_replica_config.yml
│ │ ├── litestream_replica_simple.yml
│ │ ├── litestream-http.service
│ │ ├── litestream.service
│ │ └── requirements-cloudflare.txt
│ ├── release-notes
│ │ └── release-notes-v7.1.4.md
│ └── setup-development
│ ├── README.md
│ ├── setup_consolidation_mdns.sh
│ ├── STARTUP_SETUP_GUIDE.md
│ └── test_service.sh
├── CHANGELOG-HISTORIC.md
├── CHANGELOG.md
├── claude_commands
│ ├── memory-context.md
│ ├── memory-health.md
│ ├── memory-ingest-dir.md
│ ├── memory-ingest.md
│ ├── memory-recall.md
│ ├── memory-search.md
│ ├── memory-store.md
│ ├── README.md
│ └── session-start.md
├── claude-hooks
│ ├── config.json
│ ├── config.template.json
│ ├── CONFIGURATION.md
│ ├── core
│ │ ├── memory-retrieval.js
│ │ ├── mid-conversation.js
│ │ ├── session-end.js
│ │ ├── session-start.js
│ │ └── topic-change.js
│ ├── debug-pattern-test.js
│ ├── install_claude_hooks_windows.ps1
│ ├── install_hooks.py
│ ├── memory-mode-controller.js
│ ├── MIGRATION.md
│ ├── README-NATURAL-TRIGGERS.md
│ ├── README-phase2.md
│ ├── README.md
│ ├── simple-test.js
│ ├── statusline.sh
│ ├── test-adaptive-weights.js
│ ├── test-dual-protocol-hook.js
│ ├── test-mcp-hook.js
│ ├── test-natural-triggers.js
│ ├── test-recency-scoring.js
│ ├── tests
│ │ ├── integration-test.js
│ │ ├── phase2-integration-test.js
│ │ ├── test-code-execution.js
│ │ ├── test-cross-session.json
│ │ ├── test-session-tracking.json
│ │ └── test-threading.json
│ ├── utilities
│ │ ├── adaptive-pattern-detector.js
│ │ ├── context-formatter.js
│ │ ├── context-shift-detector.js
│ │ ├── conversation-analyzer.js
│ │ ├── dynamic-context-updater.js
│ │ ├── git-analyzer.js
│ │ ├── mcp-client.js
│ │ ├── memory-client.js
│ │ ├── memory-scorer.js
│ │ ├── performance-manager.js
│ │ ├── project-detector.js
│ │ ├── session-tracker.js
│ │ ├── tiered-conversation-monitor.js
│ │ └── version-checker.js
│ └── WINDOWS-SESSIONSTART-BUG.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Development-Sprint-November-2025.md
├── docs
│ ├── amp-cli-bridge.md
│ ├── api
│ │ ├── code-execution-interface.md
│ │ ├── memory-metadata-api.md
│ │ ├── PHASE1_IMPLEMENTATION_SUMMARY.md
│ │ ├── PHASE2_IMPLEMENTATION_SUMMARY.md
│ │ ├── PHASE2_REPORT.md
│ │ └── tag-standardization.md
│ ├── architecture
│ │ ├── search-enhancement-spec.md
│ │ └── search-examples.md
│ ├── architecture.md
│ ├── archive
│ │ └── obsolete-workflows
│ │ ├── load_memory_context.md
│ │ └── README.md
│ ├── assets
│ │ └── images
│ │ ├── dashboard-v3.3.0-preview.png
│ │ ├── memory-awareness-hooks-example.png
│ │ ├── project-infographic.svg
│ │ └── README.md
│ ├── CLAUDE_CODE_QUICK_REFERENCE.md
│ ├── cloudflare-setup.md
│ ├── deployment
│ │ ├── docker.md
│ │ ├── dual-service.md
│ │ ├── production-guide.md
│ │ └── systemd-service.md
│ ├── development
│ │ ├── ai-agent-instructions.md
│ │ ├── code-quality
│ │ │ ├── phase-2a-completion.md
│ │ │ ├── phase-2a-handle-get-prompt.md
│ │ │ ├── phase-2a-index.md
│ │ │ ├── phase-2a-install-package.md
│ │ │ └── phase-2b-session-summary.md
│ │ ├── code-quality-workflow.md
│ │ ├── dashboard-workflow.md
│ │ ├── issue-management.md
│ │ ├── pr-review-guide.md
│ │ ├── refactoring-notes.md
│ │ ├── release-checklist.md
│ │ └── todo-tracker.md
│ ├── docker-optimized-build.md
│ ├── document-ingestion.md
│ ├── DOCUMENTATION_AUDIT.md
│ ├── enhancement-roadmap-issue-14.md
│ ├── examples
│ │ ├── analysis-scripts.js
│ │ ├── maintenance-session-example.md
│ │ ├── memory-distribution-chart.jsx
│ │ └── tag-schema.json
│ ├── first-time-setup.md
│ ├── glama-deployment.md
│ ├── guides
│ │ ├── advanced-command-examples.md
│ │ ├── chromadb-migration.md
│ │ ├── commands-vs-mcp-server.md
│ │ ├── mcp-enhancements.md
│ │ ├── mdns-service-discovery.md
│ │ ├── memory-consolidation-guide.md
│ │ ├── migration.md
│ │ ├── scripts.md
│ │ └── STORAGE_BACKENDS.md
│ ├── HOOK_IMPROVEMENTS.md
│ ├── hooks
│ │ └── phase2-code-execution-migration.md
│ ├── http-server-management.md
│ ├── ide-compatability.md
│ ├── IMAGE_RETENTION_POLICY.md
│ ├── images
│ │ └── dashboard-placeholder.md
│ ├── implementation
│ │ ├── health_checks.md
│ │ └── performance.md
│ ├── IMPLEMENTATION_PLAN_HTTP_SSE.md
│ ├── integration
│ │ ├── homebrew.md
│ │ └── multi-client.md
│ ├── integrations
│ │ ├── gemini.md
│ │ ├── groq-bridge.md
│ │ ├── groq-integration-summary.md
│ │ └── groq-model-comparison.md
│ ├── integrations.md
│ ├── legacy
│ │ └── dual-protocol-hooks.md
│ ├── LM_STUDIO_COMPATIBILITY.md
│ ├── maintenance
│ │ └── memory-maintenance.md
│ ├── mastery
│ │ ├── api-reference.md
│ │ ├── architecture-overview.md
│ │ ├── configuration-guide.md
│ │ ├── local-setup-and-run.md
│ │ ├── testing-guide.md
│ │ └── troubleshooting.md
│ ├── migration
│ │ └── code-execution-api-quick-start.md
│ ├── natural-memory-triggers
│ │ ├── cli-reference.md
│ │ ├── installation-guide.md
│ │ └── performance-optimization.md
│ ├── oauth-setup.md
│ ├── pr-graphql-integration.md
│ ├── quick-setup-cloudflare-dual-environment.md
│ ├── README.md
│ ├── remote-configuration-wiki-section.md
│ ├── research
│ │ ├── code-execution-interface-implementation.md
│ │ └── code-execution-interface-summary.md
│ ├── ROADMAP.md
│ ├── sqlite-vec-backend.md
│ ├── statistics
│ │ ├── charts
│ │ │ ├── activity_patterns.png
│ │ │ ├── contributors.png
│ │ │ ├── growth_trajectory.png
│ │ │ ├── monthly_activity.png
│ │ │ └── october_sprint.png
│ │ ├── data
│ │ │ ├── activity_by_day.csv
│ │ │ ├── activity_by_hour.csv
│ │ │ ├── contributors.csv
│ │ │ └── monthly_activity.csv
│ │ ├── generate_charts.py
│ │ └── REPOSITORY_STATISTICS.md
│ ├── technical
│ │ ├── development.md
│ │ ├── memory-migration.md
│ │ ├── migration-log.md
│ │ ├── sqlite-vec-embedding-fixes.md
│ │ └── tag-storage.md
│ ├── testing
│ │ └── regression-tests.md
│ ├── testing-cloudflare-backend.md
│ ├── troubleshooting
│ │ ├── cloudflare-api-token-setup.md
│ │ ├── cloudflare-authentication.md
│ │ ├── general.md
│ │ ├── hooks-quick-reference.md
│ │ ├── pr162-schema-caching-issue.md
│ │ ├── session-end-hooks.md
│ │ └── sync-issues.md
│ └── tutorials
│ ├── advanced-techniques.md
│ ├── data-analysis.md
│ └── demo-session-walkthrough.md
├── examples
│ ├── claude_desktop_config_template.json
│ ├── claude_desktop_config_windows.json
│ ├── claude-desktop-http-config.json
│ ├── config
│ │ └── claude_desktop_config.json
│ ├── http-mcp-bridge.js
│ ├── memory_export_template.json
│ ├── README.md
│ ├── setup
│ │ └── setup_multi_client_complete.py
│ └── start_https_example.sh
├── install_service.py
├── install.py
├── LICENSE
├── NOTICE
├── pyproject.toml
├── pytest.ini
├── README.md
├── run_server.py
├── scripts
│ ├── .claude
│ │ └── settings.local.json
│ ├── archive
│ │ └── check_missing_timestamps.py
│ ├── backup
│ │ ├── backup_memories.py
│ │ ├── backup_sqlite_vec.sh
│ │ ├── export_distributable_memories.sh
│ │ └── restore_memories.py
│ ├── benchmarks
│ │ ├── benchmark_code_execution_api.py
│ │ ├── benchmark_hybrid_sync.py
│ │ └── benchmark_server_caching.py
│ ├── database
│ │ ├── analyze_sqlite_vec_db.py
│ │ ├── check_sqlite_vec_status.py
│ │ ├── db_health_check.py
│ │ └── simple_timestamp_check.py
│ ├── development
│ │ ├── debug_server_initialization.py
│ │ ├── find_orphaned_files.py
│ │ ├── fix_mdns.sh
│ │ ├── fix_sitecustomize.py
│ │ ├── remote_ingest.sh
│ │ ├── setup-git-merge-drivers.sh
│ │ ├── uv-lock-merge.sh
│ │ └── verify_hybrid_sync.py
│ ├── hooks
│ │ └── pre-commit
│ ├── installation
│ │ ├── install_linux_service.py
│ │ ├── install_macos_service.py
│ │ ├── install_uv.py
│ │ ├── install_windows_service.py
│ │ ├── install.py
│ │ ├── setup_backup_cron.sh
│ │ ├── setup_claude_mcp.sh
│ │ └── setup_cloudflare_resources.py
│ ├── linux
│ │ ├── service_status.sh
│ │ ├── start_service.sh
│ │ ├── stop_service.sh
│ │ ├── uninstall_service.sh
│ │ └── view_logs.sh
│ ├── maintenance
│ │ ├── assign_memory_types.py
│ │ ├── check_memory_types.py
│ │ ├── cleanup_corrupted_encoding.py
│ │ ├── cleanup_memories.py
│ │ ├── cleanup_organize.py
│ │ ├── consolidate_memory_types.py
│ │ ├── consolidation_mappings.json
│ │ ├── delete_orphaned_vectors_fixed.py
│ │ ├── fast_cleanup_duplicates_with_tracking.sh
│ │ ├── find_all_duplicates.py
│ │ ├── find_cloudflare_duplicates.py
│ │ ├── find_duplicates.py
│ │ ├── memory-types.md
│ │ ├── README.md
│ │ ├── recover_timestamps_from_cloudflare.py
│ │ ├── regenerate_embeddings.py
│ │ ├── repair_malformed_tags.py
│ │ ├── repair_memories.py
│ │ ├── repair_sqlite_vec_embeddings.py
│ │ ├── repair_zero_embeddings.py
│ │ ├── restore_from_json_export.py
│ │ └── scan_todos.sh
│ ├── migration
│ │ ├── cleanup_mcp_timestamps.py
│ │ ├── legacy
│ │ │ └── migrate_chroma_to_sqlite.py
│ │ ├── mcp-migration.py
│ │ ├── migrate_sqlite_vec_embeddings.py
│ │ ├── migrate_storage.py
│ │ ├── migrate_tags.py
│ │ ├── migrate_timestamps.py
│ │ ├── migrate_to_cloudflare.py
│ │ ├── migrate_to_sqlite_vec.py
│ │ ├── migrate_v5_enhanced.py
│ │ ├── TIMESTAMP_CLEANUP_README.md
│ │ └── verify_mcp_timestamps.py
│ ├── pr
│ │ ├── amp_collect_results.sh
│ │ ├── amp_detect_breaking_changes.sh
│ │ ├── amp_generate_tests.sh
│ │ ├── amp_pr_review.sh
│ │ ├── amp_quality_gate.sh
│ │ ├── amp_suggest_fixes.sh
│ │ ├── auto_review.sh
│ │ ├── detect_breaking_changes.sh
│ │ ├── generate_tests.sh
│ │ ├── lib
│ │ │ └── graphql_helpers.sh
│ │ ├── quality_gate.sh
│ │ ├── resolve_threads.sh
│ │ ├── run_pyscn_analysis.sh
│ │ ├── run_quality_checks.sh
│ │ ├── thread_status.sh
│ │ └── watch_reviews.sh
│ ├── quality
│ │ ├── fix_dead_code_install.sh
│ │ ├── phase1_dead_code_analysis.md
│ │ ├── phase2_complexity_analysis.md
│ │ ├── README_PHASE1.md
│ │ ├── README_PHASE2.md
│ │ ├── track_pyscn_metrics.sh
│ │ └── weekly_quality_review.sh
│ ├── README.md
│ ├── run
│ │ ├── run_mcp_memory.sh
│ │ ├── run-with-uv.sh
│ │ └── start_sqlite_vec.sh
│ ├── run_memory_server.py
│ ├── server
│ │ ├── check_http_server.py
│ │ ├── check_server_health.py
│ │ ├── memory_offline.py
│ │ ├── preload_models.py
│ │ ├── run_http_server.py
│ │ ├── run_memory_server.py
│ │ ├── start_http_server.bat
│ │ └── start_http_server.sh
│ ├── service
│ │ ├── deploy_dual_services.sh
│ │ ├── install_http_service.sh
│ │ ├── mcp-memory-http.service
│ │ ├── mcp-memory.service
│ │ ├── memory_service_manager.sh
│ │ ├── service_control.sh
│ │ ├── service_utils.py
│ │ └── update_service.sh
│ ├── sync
│ │ ├── check_drift.py
│ │ ├── claude_sync_commands.py
│ │ ├── export_memories.py
│ │ ├── import_memories.py
│ │ ├── litestream
│ │ │ ├── apply_local_changes.sh
│ │ │ ├── enhanced_memory_store.sh
│ │ │ ├── init_staging_db.sh
│ │ │ ├── io.litestream.replication.plist
│ │ │ ├── manual_sync.sh
│ │ │ ├── memory_sync.sh
│ │ │ ├── pull_remote_changes.sh
│ │ │ ├── push_to_remote.sh
│ │ │ ├── README.md
│ │ │ ├── resolve_conflicts.sh
│ │ │ ├── setup_local_litestream.sh
│ │ │ ├── setup_remote_litestream.sh
│ │ │ ├── staging_db_init.sql
│ │ │ ├── stash_local_changes.sh
│ │ │ ├── sync_from_remote_noconfig.sh
│ │ │ └── sync_from_remote.sh
│ │ ├── README.md
│ │ ├── safe_cloudflare_update.sh
│ │ ├── sync_memory_backends.py
│ │ └── sync_now.py
│ ├── testing
│ │ ├── run_complete_test.py
│ │ ├── run_memory_test.sh
│ │ ├── simple_test.py
│ │ ├── test_cleanup_logic.py
│ │ ├── test_cloudflare_backend.py
│ │ ├── test_docker_functionality.py
│ │ ├── test_installation.py
│ │ ├── test_mdns.py
│ │ ├── test_memory_api.py
│ │ ├── test_memory_simple.py
│ │ ├── test_migration.py
│ │ ├── test_search_api.py
│ │ ├── test_sqlite_vec_embeddings.py
│ │ ├── test_sse_events.py
│ │ ├── test-connection.py
│ │ └── test-hook.js
│ ├── utils
│ │ ├── claude_commands_utils.py
│ │ ├── generate_personalized_claude_md.sh
│ │ ├── groq
│ │ ├── groq_agent_bridge.py
│ │ ├── list-collections.py
│ │ ├── memory_wrapper_uv.py
│ │ ├── query_memories.py
│ │ ├── smithery_wrapper.py
│ │ ├── test_groq_bridge.sh
│ │ └── uv_wrapper.py
│ └── validation
│ ├── check_dev_setup.py
│ ├── check_documentation_links.py
│ ├── diagnose_backend_config.py
│ ├── validate_configuration_complete.py
│ ├── validate_memories.py
│ ├── validate_migration.py
│ ├── validate_timestamp_integrity.py
│ ├── verify_environment.py
│ ├── verify_pytorch_windows.py
│ └── verify_torch.py
├── SECURITY.md
├── selective_timestamp_recovery.py
├── SPONSORS.md
├── src
│ └── mcp_memory_service
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── client.py
│ │ ├── operations.py
│ │ ├── sync_wrapper.py
│ │ └── types.py
│ ├── backup
│ │ ├── __init__.py
│ │ └── scheduler.py
│ ├── cli
│ │ ├── __init__.py
│ │ ├── ingestion.py
│ │ ├── main.py
│ │ └── utils.py
│ ├── config.py
│ ├── consolidation
│ │ ├── __init__.py
│ │ ├── associations.py
│ │ ├── base.py
│ │ ├── clustering.py
│ │ ├── compression.py
│ │ ├── consolidator.py
│ │ ├── decay.py
│ │ ├── forgetting.py
│ │ ├── health.py
│ │ └── scheduler.py
│ ├── dependency_check.py
│ ├── discovery
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── mdns_service.py
│ ├── embeddings
│ │ ├── __init__.py
│ │ └── onnx_embeddings.py
│ ├── ingestion
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── chunker.py
│ │ ├── csv_loader.py
│ │ ├── json_loader.py
│ │ ├── pdf_loader.py
│ │ ├── registry.py
│ │ ├── semtools_loader.py
│ │ └── text_loader.py
│ ├── lm_studio_compat.py
│ ├── mcp_server.py
│ ├── models
│ │ ├── __init__.py
│ │ └── memory.py
│ ├── server.py
│ ├── services
│ │ ├── __init__.py
│ │ └── memory_service.py
│ ├── storage
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── cloudflare.py
│ │ ├── factory.py
│ │ ├── http_client.py
│ │ ├── hybrid.py
│ │ └── sqlite_vec.py
│ ├── sync
│ │ ├── __init__.py
│ │ ├── exporter.py
│ │ ├── importer.py
│ │ └── litestream_config.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── cache_manager.py
│ │ ├── content_splitter.py
│ │ ├── db_utils.py
│ │ ├── debug.py
│ │ ├── document_processing.py
│ │ ├── gpu_detection.py
│ │ ├── hashing.py
│ │ ├── http_server_manager.py
│ │ ├── port_detection.py
│ │ ├── system_detection.py
│ │ └── time_parser.py
│ └── web
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── analytics.py
│ │ ├── backup.py
│ │ ├── consolidation.py
│ │ ├── documents.py
│ │ ├── events.py
│ │ ├── health.py
│ │ ├── manage.py
│ │ ├── mcp.py
│ │ ├── memories.py
│ │ ├── search.py
│ │ └── sync.py
│ ├── app.py
│ ├── dependencies.py
│ ├── oauth
│ │ ├── __init__.py
│ │ ├── authorization.py
│ │ ├── discovery.py
│ │ ├── middleware.py
│ │ ├── models.py
│ │ ├── registration.py
│ │ └── storage.py
│ ├── sse.py
│ └── static
│ ├── app.js
│ ├── index.html
│ ├── README.md
│ ├── sse_test.html
│ └── style.css
├── start_http_debug.bat
├── start_http_server.sh
├── test_document.txt
├── test_version_checker.js
├── tests
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── test_compact_types.py
│ │ └── test_operations.py
│ ├── bridge
│ │ ├── mock_responses.js
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── test_http_mcp_bridge.js
│ ├── conftest.py
│ ├── consolidation
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── test_associations.py
│ │ ├── test_clustering.py
│ │ ├── test_compression.py
│ │ ├── test_consolidator.py
│ │ ├── test_decay.py
│ │ └── test_forgetting.py
│ ├── contracts
│ │ └── api-specification.yml
│ ├── integration
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── test_api_key_fallback.py
│ │ ├── test_api_memories_chronological.py
│ │ ├── test_api_tag_time_search.py
│ │ ├── test_api_with_memory_service.py
│ │ ├── test_bridge_integration.js
│ │ ├── test_cli_interfaces.py
│ │ ├── test_cloudflare_connection.py
│ │ ├── test_concurrent_clients.py
│ │ ├── test_data_serialization_consistency.py
│ │ ├── test_http_server_startup.py
│ │ ├── test_mcp_memory.py
│ │ ├── test_mdns_integration.py
│ │ ├── test_oauth_basic_auth.py
│ │ ├── test_oauth_flow.py
│ │ ├── test_server_handlers.py
│ │ └── test_store_memory.py
│ ├── performance
│ │ ├── test_background_sync.py
│ │ └── test_hybrid_live.py
│ ├── README.md
│ ├── smithery
│ │ └── test_smithery.py
│ ├── sqlite
│ │ └── simple_sqlite_vec_test.py
│ ├── test_client.py
│ ├── test_content_splitting.py
│ ├── test_database.py
│ ├── test_hybrid_cloudflare_limits.py
│ ├── test_hybrid_storage.py
│ ├── test_memory_ops.py
│ ├── test_semantic_search.py
│ ├── test_sqlite_vec_storage.py
│ ├── test_time_parser.py
│ ├── test_timestamp_preservation.py
│ ├── timestamp
│ │ ├── test_hook_vs_manual_storage.py
│ │ ├── test_issue99_final_validation.py
│ │ ├── test_search_retrieval_inconsistency.py
│ │ ├── test_timestamp_issue.py
│ │ └── test_timestamp_simple.py
│ └── unit
│ ├── conftest.py
│ ├── test_cloudflare_storage.py
│ ├── test_csv_loader.py
│ ├── test_fastapi_dependencies.py
│ ├── test_import.py
│ ├── test_json_loader.py
│ ├── test_mdns_simple.py
│ ├── test_mdns.py
│ ├── test_memory_service.py
│ ├── test_memory.py
│ ├── test_semtools_loader.py
│ ├── test_storage_interface_compatibility.py
│ └── test_tag_time_filtering.py
├── tools
│ ├── docker
│ │ ├── DEPRECATED.md
│ │ ├── docker-compose.http.yml
│ │ ├── docker-compose.pythonpath.yml
│ │ ├── docker-compose.standalone.yml
│ │ ├── docker-compose.uv.yml
│ │ ├── docker-compose.yml
│ │ ├── docker-entrypoint-persistent.sh
│ │ ├── docker-entrypoint-unified.sh
│ │ ├── docker-entrypoint.sh
│ │ ├── Dockerfile
│ │ ├── Dockerfile.glama
│ │ ├── Dockerfile.slim
│ │ ├── README.md
│ │ └── test-docker-modes.sh
│ └── README.md
└── uv.lock
```
# Files
--------------------------------------------------------------------------------
/src/mcp_memory_service/utils/hashing.py:
--------------------------------------------------------------------------------
```python
1 | # Copyright 2024 Heinrich Krupp
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import hashlib
16 | import json
17 | from typing import Any, Dict, Optional
18 |
19 | def generate_content_hash(content: str, metadata: Optional[Dict[str, Any]] = None) -> str:
20 | """
21 | Generate a unique hash for content and metadata.
22 |
23 | This improved version ensures consistent hashing by:
24 | 1. Normalizing content (strip whitespace, lowercase)
25 | 2. Sorting metadata keys
26 | 3. Using a consistent JSON serialization
27 | """
28 | # Normalize content
29 | normalized_content = content.strip().lower()
30 |
31 | # Create hash content with normalized content
32 | hash_content = normalized_content
33 |
34 | # Add metadata if present
35 | if metadata:
36 | # Filter out timestamp and dynamic fields
37 | static_metadata = {
38 | k: v for k, v in metadata.items()
39 | if k not in ['timestamp', 'content_hash', 'embedding']
40 | }
41 | if static_metadata:
42 | # Sort keys and use consistent JSON serialization
43 | hash_content += json.dumps(static_metadata, sort_keys=True, ensure_ascii=True)
44 |
45 | # Generate hash
46 | return hashlib.sha256(hash_content.encode('utf-8')).hexdigest()
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/consolidation/__init__.py:
--------------------------------------------------------------------------------
```python
1 | # Copyright 2024 Heinrich Krupp
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """
16 | Dream-inspired memory consolidation system.
17 |
18 | This module implements autonomous memory consolidation inspired by human cognitive
19 | processes during sleep cycles, featuring exponential decay scoring, creative
20 | association discovery, semantic compression, and controlled forgetting.
21 | """
22 |
23 | from .base import ConsolidationBase
24 | from .decay import ExponentialDecayCalculator
25 | from .associations import CreativeAssociationEngine
26 | from .clustering import SemanticClusteringEngine
27 | from .compression import SemanticCompressionEngine
28 | from .forgetting import ControlledForgettingEngine
29 | from .consolidator import DreamInspiredConsolidator
30 | from .scheduler import ConsolidationScheduler
31 | from .health import ConsolidationHealthMonitor, HealthStatus, HealthMetric, HealthAlert
32 |
33 | __all__ = [
34 | 'ConsolidationBase',
35 | 'ExponentialDecayCalculator',
36 | 'CreativeAssociationEngine',
37 | 'SemanticClusteringEngine',
38 | 'SemanticCompressionEngine',
39 | 'ControlledForgettingEngine',
40 | 'DreamInspiredConsolidator',
41 | 'ConsolidationScheduler',
42 | 'ConsolidationHealthMonitor',
43 | 'HealthStatus',
44 | 'HealthMetric',
45 | 'HealthAlert'
46 | ]
```
--------------------------------------------------------------------------------
/claude-hooks/config.template.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "memoryService": {
3 | "endpoint": "https://your-server:8443",
4 | "apiKey": "your-api-key-here",
5 | "defaultTags": ["claude-code", "auto-generated"],
6 | "maxMemoriesPerSession": 8,
7 | "enableSessionConsolidation": true
8 | },
9 | "projectDetection": {
10 | "gitRepository": true,
11 | "packageFiles": ["package.json", "pyproject.toml", "Cargo.toml", "go.mod", "pom.xml"],
12 | "frameworkDetection": true,
13 | "languageDetection": true,
14 | "confidenceThreshold": 0.3
15 | },
16 | "memoryScoring": {
17 | "weights": {
18 | "timeDecay": 0.3,
19 | "tagRelevance": 0.4,
20 | "contentRelevance": 0.2,
21 | "typeBonus": 0.1
22 | },
23 | "minRelevanceScore": 0.3,
24 | "timeDecayRate": 0.1
25 | },
26 | "contextFormatting": {
27 | "includeProjectSummary": true,
28 | "includeRelevanceScores": false,
29 | "groupByCategory": true,
30 | "maxContentLength": 200,
31 | "includeTimestamps": true
32 | },
33 | "sessionAnalysis": {
34 | "extractTopics": true,
35 | "extractDecisions": true,
36 | "extractInsights": true,
37 | "extractCodeChanges": true,
38 | "extractNextSteps": true,
39 | "minSessionLength": 100,
40 | "minConfidence": 0.1
41 | },
42 | "hooks": {
43 | "sessionStart": {
44 | "enabled": true,
45 | "timeout": 10000,
46 | "priority": "high"
47 | },
48 | "sessionEnd": {
49 | "enabled": true,
50 | "timeout": 15000,
51 | "priority": "normal"
52 | },
53 | "topicChange": {
54 | "enabled": false,
55 | "timeout": 5000,
56 | "priority": "low"
57 | }
58 | },
59 | "output": {
60 | "verbose": true,
61 | "showMemoryDetails": false,
62 | "showProjectDetails": true,
63 | "showScoringDetails": false,
64 | "cleanMode": false
65 | },
66 | "logging": {
67 | "level": "info",
68 | "enableDebug": false,
69 | "logToFile": false,
70 | "logFilePath": "./claude-hooks.log"
71 | }
72 | }
```
--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Docker entrypoint script for MCP Memory Service
3 |
4 | set -e
5 |
6 | echo "[INFO] Starting MCP Memory Service in Docker container"
7 |
8 | # Function to handle signals
9 | handle_signal() {
10 | echo "[INFO] Received signal, shutting down..."
11 | if [ -n "$SERVER_PID" ]; then
12 | kill -TERM $SERVER_PID 2>/dev/null || true
13 | fi
14 | exit 0
15 | }
16 |
17 | # Set up signal handlers
18 | trap handle_signal SIGTERM SIGINT
19 |
20 | # Function to keep stdin alive
21 | keep_stdin_alive() {
22 | while true; do
23 | # Send newline to stdin every 30 seconds to keep the pipe open
24 | echo "" 2>/dev/null || break
25 | sleep 30
26 | done
27 | }
28 |
29 | # Check if running in standalone mode
30 | if [ "${MCP_STANDALONE_MODE}" = "1" ]; then
31 | echo "[INFO] Running in standalone mode"
32 | exec /usr/local/bin/docker-entrypoint-persistent.sh "$@"
33 | fi
34 |
35 | # Check if UV_ACTIVE is set
36 | if [ "${UV_ACTIVE}" = "1" ]; then
37 | echo "[INFO] Running with UV wrapper"
38 | # Start the keep-alive process in the background
39 | keep_stdin_alive &
40 | KEEPALIVE_PID=$!
41 |
42 | # Run the server
43 | python -u uv_wrapper.py "$@" &
44 | SERVER_PID=$!
45 |
46 | # Wait for the server process
47 | wait $SERVER_PID
48 | SERVER_EXIT_CODE=$?
49 |
50 | # Clean up the keep-alive process
51 | kill $KEEPALIVE_PID 2>/dev/null || true
52 |
53 | exit $SERVER_EXIT_CODE
54 | else
55 | echo "[INFO] Running directly with Python"
56 | # Start the keep-alive process in the background
57 | keep_stdin_alive &
58 | KEEPALIVE_PID=$!
59 |
60 | # Run the server
61 | python -u -m mcp_memory_service.server "$@" &
62 | SERVER_PID=$!
63 |
64 | # Wait for the server process
65 | wait $SERVER_PID
66 | SERVER_EXIT_CODE=$?
67 |
68 | # Clean up the keep-alive process
69 | kill $KEEPALIVE_PID 2>/dev/null || true
70 |
71 | exit $SERVER_EXIT_CODE
72 | fi
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/resolve_conflicts.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Simple conflict resolution helper
3 |
4 | STAGING_DB="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec_staging.db"
5 |
6 | # Colors for output
7 | RED='\033[0;31m'
8 | GREEN='\033[0;32m'
9 | YELLOW='\033[1;33m'
10 | NC='\033[0m' # No Color
11 |
12 | if [ ! -f "$STAGING_DB" ]; then
13 | echo -e "${RED}No staging database found${NC}"
14 | exit 1
15 | fi
16 |
17 | # Get conflicts
18 | CONFLICTS=$(sqlite3 "$STAGING_DB" "
19 | SELECT id, content, staged_at, conflict_status
20 | FROM staged_memories
21 | WHERE conflict_status IN ('detected', 'push_failed')
22 | ORDER BY staged_at DESC;
23 | ")
24 |
25 | if [ -z "$CONFLICTS" ]; then
26 | echo -e "${GREEN}No conflicts to resolve${NC}"
27 | exit 0
28 | fi
29 |
30 | echo -e "${YELLOW}Found conflicts to resolve:${NC}"
31 | echo ""
32 |
33 | echo "$CONFLICTS" | while IFS='|' read -r id content staged_at status; do
34 | echo -e "${RED}Conflict: $status${NC}"
35 | echo -e "Content: ${content:0:80}..."
36 | echo -e "Staged: $staged_at"
37 | echo -e "ID: $id"
38 | echo ""
39 | echo "Actions:"
40 | echo " 1. Keep and retry push"
41 | echo " 2. Delete (abandon change)"
42 | echo " 3. Skip for now"
43 | echo ""
44 |
45 | read -p "Choose action (1/2/3): " action
46 |
47 | case $action in
48 | 1)
49 | sqlite3 "$STAGING_DB" "
50 | UPDATE staged_memories
51 | SET conflict_status = 'none'
52 | WHERE id = '$id';
53 | "
54 | echo -e "${GREEN}Marked for retry${NC}"
55 | ;;
56 | 2)
57 | sqlite3 "$STAGING_DB" "DELETE FROM staged_memories WHERE id = '$id';"
58 | echo -e "${YELLOW}Deleted${NC}"
59 | ;;
60 | 3)
61 | echo -e "${YELLOW}Skipped${NC}"
62 | ;;
63 | *)
64 | echo -e "${YELLOW}Invalid choice, skipped${NC}"
65 | ;;
66 | esac
67 | echo ""
68 | done
69 |
70 | echo -e "${GREEN}Conflict resolution completed${NC}"
```
--------------------------------------------------------------------------------
/examples/memory_export_template.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "export_metadata": {
3 | "source_machine": "example-hostname",
4 | "export_timestamp": "2025-08-21T12:00:00.000000",
5 | "total_memories": 3,
6 | "database_path": "/path/to/sqlite_vec.db",
7 | "platform": "Linux",
8 | "python_version": "3.11.0",
9 | "include_embeddings": false,
10 | "filter_tags": null,
11 | "exporter_version": "6.2.4"
12 | },
13 | "memories": [
14 | {
15 | "content": "MCP Memory Service is a Model Context Protocol server that provides semantic memory and persistent storage capabilities for Claude Desktop using SQLite-vec and sentence transformers.",
16 | "content_hash": "example-hash-1234567890abcdef",
17 | "tags": ["documentation", "project-overview"],
18 | "created_at": 1692633600.0,
19 | "updated_at": 1692633600.0,
20 | "memory_type": "note",
21 | "metadata": {
22 | "source": "example-machine",
23 | "project": "mcp-memory-service"
24 | }
25 | },
26 | {
27 | "content": "Key development commands: `uv run memory` to start server, `pytest tests/` for testing, `python install.py` for setup.",
28 | "content_hash": "example-hash-abcdef1234567890",
29 | "tags": ["commands", "development"],
30 | "created_at": 1692634200.0,
31 | "updated_at": 1692634200.0,
32 | "memory_type": "reference",
33 | "metadata": {
34 | "source": "example-machine",
35 | "category": "quick-reference"
36 | }
37 | },
38 | {
39 | "content": "SQLite-vec backend is now the default storage backend (v6.0+) offering fast performance and single-file database storage.",
40 | "content_hash": "example-hash-fedcba0987654321",
41 | "tags": ["architecture", "backend", "sqlite-vec"],
42 | "created_at": 1692634800.0,
43 | "updated_at": 1692634800.0,
44 | "memory_type": "architectural-decision",
45 | "metadata": {
46 | "source": "example-machine",
47 | "version": "v6.0.0"
48 | }
49 | }
50 | ]
51 | }
```
--------------------------------------------------------------------------------
/docs/mastery/local-setup-and-run.md:
--------------------------------------------------------------------------------
```markdown
1 | # MCP Memory Service — Local Setup and Run
2 |
3 | Follow these steps to run the service locally, switch storage backends, and validate functionality.
4 |
5 | ## 1) Install Dependencies
6 |
7 | Using uv (recommended):
8 |
9 | ```
10 | uv sync
11 | ```
12 |
13 | Using pip:
14 |
15 | ```
16 | python -m venv .venv
17 | source .venv/bin/activate # Windows: .venv\Scripts\activate
18 | pip install -e .
19 | ```
20 |
21 | If using SQLite-vec backend (recommended):
22 |
23 | ```
24 | uv add sqlite-vec sentence-transformers torch
25 | # or
26 | pip install sqlite-vec sentence-transformers torch
27 | ```
28 |
29 | ## 2) Choose Storage Backend
30 |
31 | SQLite-vec (default):
32 |
33 | ```
34 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
35 | # optional custom DB path
36 | export MCP_MEMORY_SQLITE_PATH="$HOME/.local/share/mcp-memory/sqlite_vec.db"
37 | ```
38 |
39 | ChromaDB (deprecated):
40 |
41 | ```
42 | export MCP_MEMORY_STORAGE_BACKEND=chroma
43 | export MCP_MEMORY_CHROMA_PATH="$HOME/.local/share/mcp-memory/chroma_db"
44 | ```
45 |
46 | Cloudflare:
47 |
48 | ```
49 | export MCP_MEMORY_STORAGE_BACKEND=cloudflare
50 | export CLOUDFLARE_API_TOKEN=...
51 | export CLOUDFLARE_ACCOUNT_ID=...
52 | export CLOUDFLARE_VECTORIZE_INDEX=...
53 | export CLOUDFLARE_D1_DATABASE_ID=...
54 | ```
55 |
56 | ## 3) Run the Server
57 |
58 | Stdio MCP server (integrates with Claude Desktop):
59 |
60 | ```
61 | uv run memory server
62 | ```
63 |
64 | FastMCP HTTP server (for Claude Code / remote):
65 |
66 | ```
67 | uv run mcp-memory-server
68 | ```
69 |
70 | Configure Claude Desktop example (~/.claude/config.json):
71 |
72 | ```
73 | {
74 | "mcpServers": {
75 | "memory": {
76 | "command": "uv",
77 | "args": ["--directory", "/path/to/mcp-memory-service", "run", "memory", "server"],
78 | "env": { "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec" }
79 | }
80 | }
81 | }
82 | ```
83 |
84 | ## 4) Verify Health and Basic Ops
85 |
86 | CLI status:
87 |
88 | ```
89 | uv run memory status
90 | ```
91 |
92 | MCP tool flow (via client):
93 | - store_memory → retrieve_memory → search_by_tag → delete_memory
94 |
95 | ## 5) Run Tests
96 |
97 | ```
98 | pytest -q
99 | # or
100 | uv run pytest -q
101 | ```
102 |
103 | See also: `docs/mastery/testing-guide.md` and `docs/sqlite-vec-backend.md`.
104 |
105 |
```
--------------------------------------------------------------------------------
/docs/integrations.md:
--------------------------------------------------------------------------------
```markdown
1 | # MCP Memory Service Integrations
2 |
3 | This document catalogs tools, utilities, and integrations that extend the functionality of the MCP Memory Service.
4 |
5 | ## Official Integrations
6 |
7 | ### [MCP Memory Dashboard](https://github.com/doobidoo/mcp-memory-dashboard)(This is still wip!)
8 |
9 | A web-based dashboard for viewing, searching, and managing your MCP Memory Service data. The dashboard allows you to:
10 | - Browse and search memories
11 | - View memory metadata and tags
12 | - Delete unwanted memories
13 | - Perform semantic searches
14 | - Monitor system health
15 |
16 | ## Community Integrations
17 |
18 | ### [Claude Memory Context](https://github.com/doobidoo/claude-memory-context)
19 |
20 | A utility that enables Claude to start each conversation with awareness of the topics and important memories stored in your MCP Memory Service.
21 |
22 | This tool:
23 | - Queries your MCP memory service for recent and important memories
24 | - Extracts topics and content summaries
25 | - Formats this information into a structured context section
26 | - Updates Claude project instructions automatically
27 |
28 | The utility leverages Claude's project instructions feature without requiring any modifications to the MCP protocol. It can be automated to run periodically, ensuring Claude always has access to your latest memories.
29 |
30 | See the [Claude Memory Context repository](https://github.com/doobidoo/claude-memory-context) for installation and usage instructions.
31 |
32 | ---
33 |
34 | ## Adding Your Integration
35 |
36 | If you've built a tool or integration for the MCP Memory Service, we'd love to include it here. Please submit a pull request that adds your project to this document with:
37 |
38 | 1. The name of your integration (with link to repository)
39 | 2. A brief description (2-3 sentences)
40 | 3. A list of key features
41 | 4. Any installation notes or special requirements
42 |
43 | All listed integrations should be functional, documented, and actively maintained.
44 |
```
--------------------------------------------------------------------------------
/tools/docker/docker-entrypoint-unified.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Unified Docker entrypoint script for MCP Memory Service
3 | # Supports both MCP protocol mode and HTTP server mode
4 |
5 | set -e
6 |
7 | echo "[INFO] Starting MCP Memory Service in Docker container"
8 |
9 | # Function to handle signals
10 | handle_signal() {
11 | echo "[INFO] Received signal, shutting down..."
12 | if [ -n "$SERVER_PID" ]; then
13 | kill -TERM $SERVER_PID 2>/dev/null || true
14 | fi
15 | exit 0
16 | }
17 |
18 | # Set up signal handlers
19 | trap handle_signal SIGTERM SIGINT
20 |
21 | # Determine mode based on environment variable
22 | MODE="${MCP_MODE:-mcp}"
23 | echo "[INFO] Running in $MODE mode"
24 |
25 | if [ "$MODE" = "http" ] || [ "$MODE" = "api" ]; then
26 | # HTTP Server Mode
27 | echo "[INFO] Starting HTTP server with FastAPI/Uvicorn"
28 |
29 | # Ensure we have the HTTP server file
30 | if [ ! -f "/app/run_server.py" ]; then
31 | echo "[ERROR] run_server.py not found. Please ensure it's copied in the Dockerfile"
32 | exit 1
33 | fi
34 |
35 | # Start the HTTP server
36 | exec python /app/run_server.py "$@"
37 |
38 | elif [ "$MODE" = "mcp" ]; then
39 | # MCP Protocol Mode (stdin/stdout)
40 | echo "[INFO] Starting MCP protocol server (stdin/stdout communication)"
41 |
42 | # Function to keep stdin alive
43 | keep_stdin_alive() {
44 | while true; do
45 | # Send newline to stdin every 30 seconds to keep the pipe open
46 | echo "" 2>/dev/null || break
47 | sleep 30
48 | done
49 | }
50 |
51 | # Start the keep-alive process in the background
52 | keep_stdin_alive &
53 | KEEPALIVE_PID=$!
54 |
55 | # Run the MCP server
56 | python -u -m mcp_memory_service.server "$@" &
57 | SERVER_PID=$!
58 |
59 | # Wait for the server process
60 | wait $SERVER_PID
61 | SERVER_EXIT_CODE=$?
62 |
63 | # Clean up the keep-alive process
64 | kill $KEEPALIVE_PID 2>/dev/null || true
65 |
66 | exit $SERVER_EXIT_CODE
67 |
68 | else
69 | echo "[ERROR] Unknown mode: $MODE. Use 'mcp' for protocol mode or 'http' for API mode"
70 | exit 1
71 | fi
```
--------------------------------------------------------------------------------
/archive/setup-development/setup_consolidation_mdns.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 |
3 | # Setup script for MCP Memory Service with Consolidation and mDNS
4 | echo "Setting up MCP Memory Service with Consolidation and mDNS HTTPS..."
5 |
6 | # Enable consolidation system
7 | export MCP_CONSOLIDATION_ENABLED=true
8 |
9 | # Configure consolidation settings
10 | export MCP_DECAY_ENABLED=true
11 | export MCP_RETENTION_CRITICAL=365
12 | export MCP_RETENTION_REFERENCE=180
13 | export MCP_RETENTION_STANDARD=30
14 | export MCP_RETENTION_TEMPORARY=7
15 |
16 | export MCP_ASSOCIATIONS_ENABLED=true
17 | export MCP_ASSOCIATION_MIN_SIMILARITY=0.3
18 | export MCP_ASSOCIATION_MAX_SIMILARITY=0.7
19 | export MCP_ASSOCIATION_MAX_PAIRS=100
20 |
21 | export MCP_CLUSTERING_ENABLED=true
22 | export MCP_CLUSTERING_MIN_SIZE=5
23 | export MCP_CLUSTERING_ALGORITHM=dbscan
24 |
25 | export MCP_COMPRESSION_ENABLED=true
26 | export MCP_COMPRESSION_MAX_LENGTH=500
27 | export MCP_COMPRESSION_PRESERVE_ORIGINALS=true
28 |
29 | export MCP_FORGETTING_ENABLED=true
30 | export MCP_FORGETTING_RELEVANCE_THRESHOLD=0.1
31 | export MCP_FORGETTING_ACCESS_THRESHOLD=90
32 |
33 | # Set consolidation schedule (cron-like)
34 | export MCP_SCHEDULE_DAILY="02:00"
35 | export MCP_SCHEDULE_WEEKLY="SUN 03:00"
36 | export MCP_SCHEDULE_MONTHLY="01 04:00"
37 |
38 | # Configure mDNS multi-client server with HTTPS
39 | export MCP_MDNS_ENABLED=true
40 | export MCP_MDNS_SERVICE_NAME="memory"
41 | export MCP_HTTPS_ENABLED=true
42 |
43 | # HTTP server configuration
44 | export MCP_HTTP_ENABLED=true
45 | export MCP_HTTP_HOST=0.0.0.0
46 | export MCP_HTTP_PORT=8000
47 |
48 | # Storage backend
49 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
50 |
51 | # API security
52 | export MCP_API_KEY="$(openssl rand -base64 32)"
53 |
54 | echo "Configuration set! Environment variables:"
55 | echo "- Consolidation enabled: $MCP_CONSOLIDATION_ENABLED"
56 | echo "- mDNS enabled: $MCP_MDNS_ENABLED"
57 | echo "- HTTPS enabled: $MCP_HTTPS_ENABLED"
58 | echo "- Service name: $MCP_MDNS_SERVICE_NAME"
59 | echo "- API Key generated: [SET]"
60 | echo ""
61 | echo "Starting MCP Memory Service HTTP server..."
62 |
63 | # Activate virtual environment and start the server
64 | source venv/bin/activate && python scripts/run_http_server.py
```
--------------------------------------------------------------------------------
/scripts/server/memory_offline.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """
3 | Memory service launcher with forced offline mode.
4 | This script sets offline mode BEFORE importing anything else.
5 | """
6 |
7 | import os
8 | import platform
9 | import sys
10 |
11 | def setup_offline_mode():
12 | """Setup offline mode environment variables BEFORE any imports."""
13 | print("Setting up offline mode...", file=sys.stderr)
14 |
15 | # Force offline mode
16 | os.environ['HF_HUB_OFFLINE'] = '1'
17 | os.environ['TRANSFORMERS_OFFLINE'] = '1'
18 |
19 | # Configure cache paths for Windows
20 | username = os.environ.get('USERNAME', os.environ.get('USER', ''))
21 | if platform.system() == "Windows" and username:
22 | hf_home = f"C:\\Users\\{username}\\.cache\\huggingface"
23 | transformers_cache = f"C:\\Users\\{username}\\.cache\\huggingface\\transformers"
24 | sentence_transformers_home = f"C:\\Users\\{username}\\.cache\\torch\\sentence_transformers"
25 | else:
26 | hf_home = os.path.expanduser("~/.cache/huggingface")
27 | transformers_cache = os.path.expanduser("~/.cache/huggingface/transformers")
28 | sentence_transformers_home = os.path.expanduser("~/.cache/torch/sentence_transformers")
29 |
30 | # Set cache paths
31 | os.environ['HF_HOME'] = hf_home
32 | os.environ['TRANSFORMERS_CACHE'] = transformers_cache
33 | os.environ['SENTENCE_TRANSFORMERS_HOME'] = sentence_transformers_home
34 |
35 | print(f"HF_HUB_OFFLINE: {os.environ.get('HF_HUB_OFFLINE')}", file=sys.stderr)
36 | print(f"HF_HOME: {os.environ.get('HF_HOME')}", file=sys.stderr)
37 |
38 | # Add src to Python path
39 | src_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'src')
40 | if src_path not in sys.path:
41 | sys.path.insert(0, src_path)
42 |
43 | if __name__ == "__main__":
44 | # Setup offline mode FIRST
45 | setup_offline_mode()
46 |
47 | # Now import and run the memory server
48 | print("Starting MCP Memory Service in offline mode...", file=sys.stderr)
49 | from mcp_memory_service.server import main
50 | main()
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/staging_db_init.sql:
--------------------------------------------------------------------------------
```sql
1 | -- Staging Database Schema for Offline Memory Changes
2 | -- This database stores local changes when remote server is unavailable
3 |
4 | -- Staged memories that need to be synchronized
5 | CREATE TABLE IF NOT EXISTS staged_memories (
6 | id TEXT PRIMARY KEY,
7 | content TEXT NOT NULL,
8 | content_hash TEXT NOT NULL,
9 | tags TEXT, -- JSON array as string
10 | metadata TEXT, -- JSON metadata as string
11 | memory_type TEXT DEFAULT 'note',
12 | operation TEXT NOT NULL CHECK (operation IN ('INSERT', 'UPDATE', 'DELETE')),
13 | staged_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
14 | original_created_at TIMESTAMP,
15 | source_machine TEXT,
16 | conflict_status TEXT DEFAULT 'none' CHECK (conflict_status IN ('none', 'detected', 'resolved'))
17 | );
18 |
19 | -- Sync status tracking
20 | CREATE TABLE IF NOT EXISTS sync_status (
21 | key TEXT PRIMARY KEY,
22 | value TEXT,
23 | updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
24 | );
25 |
26 | -- Index for performance
27 | CREATE INDEX IF NOT EXISTS idx_staged_memories_hash ON staged_memories(content_hash);
28 | CREATE INDEX IF NOT EXISTS idx_staged_memories_staged_at ON staged_memories(staged_at);
29 | CREATE INDEX IF NOT EXISTS idx_staged_memories_operation ON staged_memories(operation);
30 |
31 | -- Initialize sync status
32 | INSERT OR REPLACE INTO sync_status (key, value) VALUES
33 | ('last_remote_sync', ''),
34 | ('last_local_sync', ''),
35 | ('staging_version', '1.0'),
36 | ('total_staged_changes', '0');
37 |
38 | -- Triggers to maintain staged changes count
39 | CREATE TRIGGER IF NOT EXISTS update_staged_count_insert
40 | AFTER INSERT ON staged_memories
41 | BEGIN
42 | UPDATE sync_status
43 | SET value = CAST((CAST(value AS INTEGER) + 1) AS TEXT),
44 | updated_at = CURRENT_TIMESTAMP
45 | WHERE key = 'total_staged_changes';
46 | END;
47 |
48 | CREATE TRIGGER IF NOT EXISTS update_staged_count_delete
49 | AFTER DELETE ON staged_memories
50 | BEGIN
51 | UPDATE sync_status
52 | SET value = CAST((CAST(value AS INTEGER) - 1) AS TEXT),
53 | updated_at = CURRENT_TIMESTAMP
54 | WHERE key = 'total_staged_changes';
55 | END;
```
--------------------------------------------------------------------------------
/.github/workflows/claude.yml:
--------------------------------------------------------------------------------
```yaml
1 | name: Claude Code
2 |
3 | on:
4 | issue_comment:
5 | types: [created]
6 | pull_request_review_comment:
7 | types: [created]
8 | issues:
9 | types: [opened, assigned]
10 | pull_request_review:
11 | types: [submitted]
12 |
13 | jobs:
14 | claude:
15 | if: |
16 | (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
17 | (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
18 | (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
19 | (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
20 | runs-on: ubuntu-latest
21 | permissions:
22 | contents: read
23 | pull-requests: read
24 | issues: read
25 | id-token: write
26 | actions: read # Required for Claude to read CI results on PRs
27 | steps:
28 | - name: Checkout repository
29 | uses: actions/checkout@v4
30 | with:
31 | fetch-depth: 1
32 |
33 | - name: Run Claude Code
34 | id: claude
35 | uses: anthropics/claude-code-action@v1
36 | with:
37 | claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
38 |
39 | # This is an optional setting that allows Claude to read CI results on PRs
40 | additional_permissions: |
41 | actions: read
42 |
43 | # Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
44 | # prompt: 'Update the pull request description to include a summary of changes.'
45 |
46 | # Optional: Add claude_args to customize behavior and configuration
47 | # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
48 | # or https://docs.claude.com/en/docs/claude-code/sdk#command-line for available options
49 | # claude_args: '--model claude-opus-4-1-20250805 --allowed-tools Bash(gh pr:*)'
50 |
51 |
```
--------------------------------------------------------------------------------
/docs/guides/scripts.md:
--------------------------------------------------------------------------------
```markdown
1 | # Scripts Documentation
2 |
3 | This document provides an overview of the available scripts in the `scripts/` directory and their purposes.
4 |
5 | ## Essential Scripts
6 |
7 | ### Server Management
8 | - `run_memory_server.py`: Main script to start the memory service server
9 | ```bash
10 | python scripts/run_memory_server.py
11 | ```
12 |
13 | ### Environment Verification
14 | - `verify_environment.py`: Verifies the installation environment and dependencies
15 | ```bash
16 | python scripts/verify_environment.py
17 | ```
18 |
19 | ### Installation Testing
20 | - `test_installation.py`: Tests the installation and basic functionality
21 | ```bash
22 | python scripts/test_installation.py
23 | ```
24 |
25 | ### Memory Management
26 | - `validate_memories.py`: Validates the integrity of stored memories
27 | ```bash
28 | python scripts/validate_memories.py
29 | ```
30 | - `repair_memories.py`: Repairs corrupted or invalid memories
31 | ```bash
32 | python scripts/repair_memories.py
33 | ```
34 | - `list-collections.py`: Lists all available memory collections
35 | ```bash
36 | python scripts/list-collections.py
37 | ```
38 |
39 | ## Migration Scripts
40 | - `mcp-migration.py`: Handles migration of MCP-related data
41 | ```bash
42 | python scripts/mcp-migration.py
43 | ```
44 | - `memory-migration.py`: Handles migration of memory data
45 | ```bash
46 | python scripts/memory-migration.py
47 | ```
48 |
49 | ## Troubleshooting Scripts
50 | - `verify_pytorch_windows.py`: Verifies PyTorch installation on Windows
51 | ```bash
52 | python scripts/verify_pytorch_windows.py
53 | ```
54 | - `verify_torch.py`: General PyTorch verification
55 | ```bash
56 | python scripts/verify_torch.py
57 | ```
58 |
59 | ## Usage Notes
60 | - Most scripts can be run directly with Python
61 | - Some scripts may require specific environment variables to be set
62 | - Always run verification scripts after installation or major updates
63 | - Use migration scripts with caution and ensure backups are available
64 |
65 | ## Script Dependencies
66 | - Python 3.10+
67 | - Required packages listed in `requirements.txt`
68 | - Some scripts may require additional dependencies listed in `requirements-migration.txt`
```
--------------------------------------------------------------------------------
/archive/setup-development/test_service.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 |
3 | # Test script to debug service startup issues
4 | echo "=== MCP Memory Service Debug Test ==="
5 |
6 | # Set working directory
7 | cd /home/hkr/repositories/mcp-memory-service
8 |
9 | # Set environment variables (same as service)
10 | export PATH=/home/hkr/repositories/mcp-memory-service/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
11 | export PYTHONPATH=/home/hkr/repositories/mcp-memory-service/src
12 | export MCP_CONSOLIDATION_ENABLED=true
13 | export MCP_MDNS_ENABLED=true
14 | export MCP_HTTPS_ENABLED=true
15 | export MCP_MDNS_SERVICE_NAME="MCP Memory"
16 | export MCP_HTTP_ENABLED=true
17 | export MCP_HTTP_HOST=0.0.0.0
18 | export MCP_HTTP_PORT=8000
19 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
20 | export MCP_API_KEY=mcp-0b1ccbde2197a08dcb12d41af4044be6
21 |
22 | echo "Working directory: $(pwd)"
23 | echo "Python executable: $(which python)"
24 | echo "Virtual env Python: /home/hkr/repositories/mcp-memory-service/venv/bin/python"
25 |
26 | # Check if venv Python exists
27 | if [ -f "/home/hkr/repositories/mcp-memory-service/venv/bin/python" ]; then
28 | echo "✅ Virtual environment Python exists"
29 | else
30 | echo "❌ Virtual environment Python missing!"
31 | exit 1
32 | fi
33 |
34 | # Check if run_http_server.py exists
35 | if [ -f "/home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py" ]; then
36 | echo "✅ Server script exists"
37 | else
38 | echo "❌ Server script missing!"
39 | exit 1
40 | fi
41 |
42 | # Test Python import
43 | echo "=== Testing Python imports ==="
44 | /home/hkr/repositories/mcp-memory-service/venv/bin/python -c "
45 | import sys
46 | sys.path.insert(0, '/home/hkr/repositories/mcp-memory-service/src')
47 | try:
48 | from mcp_memory_service.web.app import app
49 | print('✅ Web app import successful')
50 | except Exception as e:
51 | print(f'❌ Web app import failed: {e}')
52 | sys.exit(1)
53 | "
54 |
55 | echo "=== Testing server startup (5 seconds) ==="
56 | timeout 5s /home/hkr/repositories/mcp-memory-service/venv/bin/python /home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py || echo "Server test completed"
57 |
58 | echo "=== Debug test finished ==="
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/web/dependencies.py:
--------------------------------------------------------------------------------
```python
1 | # Copyright 2024 Heinrich Krupp
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """
16 | FastAPI dependencies for the HTTP interface.
17 | """
18 |
19 | import logging
20 | from typing import Optional
21 | from fastapi import HTTPException, Depends
22 |
23 | from ..storage.base import MemoryStorage
24 | from ..services.memory_service import MemoryService
25 |
26 | logger = logging.getLogger(__name__)
27 |
28 | # Global storage instance
29 | _storage: Optional[MemoryStorage] = None
30 |
31 |
32 | def set_storage(storage: MemoryStorage) -> None:
33 | """Set the global storage instance."""
34 | global _storage
35 | _storage = storage
36 |
37 |
38 | def get_storage() -> MemoryStorage:
39 | """Get the global storage instance."""
40 | if _storage is None:
41 | raise HTTPException(status_code=503, detail="Storage not initialized")
42 | return _storage
43 |
44 |
45 | def get_memory_service(storage: MemoryStorage = Depends(get_storage)) -> MemoryService:
46 | """Get a MemoryService instance with the configured storage backend."""
47 | return MemoryService(storage)
48 |
49 |
50 |
51 |
52 | async def create_storage_backend() -> MemoryStorage:
53 | """
54 | Create and initialize storage backend for web interface based on configuration.
55 |
56 | Returns:
57 | Initialized storage backend
58 | """
59 | from ..config import DATABASE_PATH
60 | from ..storage.factory import create_storage_instance
61 |
62 | logger.info("Creating storage backend for web interface...")
63 |
64 | # Use shared factory with DATABASE_PATH for web interface
65 | return await create_storage_instance(DATABASE_PATH, server_type="http")
```
--------------------------------------------------------------------------------
/.github/workflows/claude-code-review.yml:
--------------------------------------------------------------------------------
```yaml
1 | name: Claude Code Review
2 |
3 | on:
4 | pull_request:
5 | types: [opened, synchronize]
6 | # Optional: Only run on specific file changes
7 | # paths:
8 | # - "src/**/*.ts"
9 | # - "src/**/*.tsx"
10 | # - "src/**/*.js"
11 | # - "src/**/*.jsx"
12 |
13 | jobs:
14 | claude-review:
15 | # Optional: Filter by PR author
16 | # if: |
17 | # github.event.pull_request.user.login == 'external-contributor' ||
18 | # github.event.pull_request.user.login == 'new-developer' ||
19 | # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
20 |
21 | runs-on: ubuntu-latest
22 | permissions:
23 | contents: read
24 | pull-requests: read
25 | issues: read
26 | id-token: write
27 |
28 | steps:
29 | - name: Checkout repository
30 | uses: actions/checkout@v4
31 | with:
32 | fetch-depth: 1
33 |
34 | - name: Run Claude Code Review
35 | id: claude-review
36 | uses: anthropics/claude-code-action@v1
37 | with:
38 | claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
39 | prompt: |
40 | REPO: ${{ github.repository }}
41 | PR NUMBER: ${{ github.event.pull_request.number }}
42 |
43 | Please review this pull request and provide feedback on:
44 | - Code quality and best practices
45 | - Potential bugs or issues
46 | - Performance considerations
47 | - Security concerns
48 | - Test coverage
49 |
50 | Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback.
51 |
52 | Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR.
53 |
54 | # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
55 | # or https://docs.claude.com/en/docs/claude-code/sdk#command-line for available options
56 | claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"'
57 |
58 |
```
--------------------------------------------------------------------------------
/claude-hooks/test-mcp-hook.js:
--------------------------------------------------------------------------------
```javascript
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * Test MCP-based Memory Hook
5 | * Tests the updated session-start hook with MCP protocol
6 | */
7 |
8 | const { onSessionStart } = require('./core/session-start.js');
9 |
10 | // Test configuration
11 | const testContext = {
12 | workingDirectory: process.cwd(),
13 | sessionId: 'mcp-test-session',
14 | trigger: 'session-start',
15 | userMessage: 'test memory hook with cloudflare backend',
16 | injectSystemMessage: async (message) => {
17 | console.log('\n' + '='.repeat(60));
18 | console.log('🧠 MCP MEMORY CONTEXT INJECTION TEST');
19 | console.log('='.repeat(60));
20 | console.log(message);
21 | console.log('='.repeat(60) + '\n');
22 | return true;
23 | }
24 | };
25 |
26 | async function testMCPHook() {
27 | console.log('🔧 Testing MCP Memory Hook...');
28 | console.log(`📂 Working Directory: ${process.cwd()}`);
29 | console.log(`🔧 Testing with Cloudflare backend configuration\n`);
30 |
31 | try {
32 | await testContext.onSessionStart(testContext);
33 | console.log('✅ MCP Hook test completed successfully');
34 | } catch (error) {
35 | console.error('❌ MCP Hook test failed:', error.message);
36 |
37 | // Don't show full stack trace in test mode
38 | if (process.env.DEBUG) {
39 | console.error(error.stack);
40 | }
41 |
42 | // Test completed - hook should fail gracefully
43 | console.log('✅ Hook failed gracefully as expected when MCP server unavailable');
44 | }
45 | }
46 |
47 | // Handle the onSessionStart function correctly
48 | const sessionStartModule = require('./core/session-start.js');
49 | if (sessionStartModule.handler) {
50 | testContext.onSessionStart = sessionStartModule.handler;
51 | } else if (typeof sessionStartModule === 'function') {
52 | testContext.onSessionStart = sessionStartModule;
53 | } else {
54 | // Try direct export
55 | testContext.onSessionStart = sessionStartModule.onSessionStart || sessionStartModule.default;
56 | }
57 |
58 | if (!testContext.onSessionStart) {
59 | console.error('❌ Could not find onSessionStart handler');
60 | process.exit(1);
61 | }
62 |
63 | // Run the test
64 | testMCPHook();
```
--------------------------------------------------------------------------------
/scripts/installation/install_uv.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | # Copyright 2024 Heinrich Krupp
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | """
17 | Script to install UV package manager
18 | """
19 | import os
20 | import sys
21 | import subprocess
22 | import platform
23 |
24 | def main():
25 | print("Installing UV package manager...")
26 |
27 | try:
28 | # Install UV using pip
29 | subprocess.check_call([
30 | sys.executable, '-m', 'pip', 'install', 'uv'
31 | ])
32 |
33 | print("UV installed successfully!")
34 | print("You can now use UV for faster dependency management:")
35 | print(" uv pip install -r requirements.txt")
36 |
37 | # Create shortcut script
38 | system = platform.system().lower()
39 | if system == "windows":
40 | # Create .bat file for Windows
41 | with open("uv-run.bat", "w") as f:
42 | f.write(f"@echo off\n")
43 | f.write(f"python -m uv run memory %*\n")
44 | print("Created uv-run.bat shortcut")
45 | else:
46 | # Create shell script for Unix-like systems
47 | with open("uv-run.sh", "w") as f:
48 | f.write("#!/bin/sh\n")
49 | f.write("python -m uv run memory \"$@\"\n")
50 |
51 | # Make it executable
52 | try:
53 | os.chmod("uv-run.sh", 0o755)
54 | except:
55 | pass
56 | print("Created uv-run.sh shortcut")
57 |
58 | except subprocess.SubprocessError as e:
59 | print(f"Error installing UV: {e}")
60 | sys.exit(1)
61 |
62 | if __name__ == "__main__":
63 | main()
64 |
```
--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/install_service.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 |
3 | # Install MCP Memory Service as a systemd service
4 | echo "Installing MCP Memory Service as a systemd service..."
5 |
6 | # Check if running as regular user (not root)
7 | if [ "$EUID" -eq 0 ]; then
8 | echo "Error: Do not run this script as root. Run as your regular user."
9 | exit 1
10 | fi
11 |
12 | # Get current user and working directory
13 | CURRENT_USER=$(whoami)
14 | CURRENT_DIR=$(pwd)
15 | SERVICE_FILE="deployment/mcp-memory.service"
16 |
17 | echo "User: $CURRENT_USER"
18 | echo "Working directory: $CURRENT_DIR"
19 |
20 | # Check if service file exists
21 | if [ ! -f "$SERVICE_FILE" ]; then
22 | echo "Error: Service file $SERVICE_FILE not found!"
23 | exit 1
24 | fi
25 |
26 | # Generate a unique API key
27 | API_KEY="mcp-$(openssl rand -hex 16)"
28 | echo "Generated API key: $API_KEY"
29 |
30 | # Update the service file with the actual API key
31 | sed -i "s/Environment=MCP_API_KEY=.*/Environment=MCP_API_KEY=$API_KEY/" "$SERVICE_FILE"
32 |
33 | # Copy service file to systemd directory
34 | echo "Installing systemd service file..."
35 | sudo cp "$SERVICE_FILE" /etc/systemd/system/
36 |
37 | # Set proper permissions
38 | sudo chmod 644 /etc/systemd/system/mcp-memory.service
39 |
40 | # Reload systemd daemon
41 | echo "Reloading systemd daemon..."
42 | sudo systemctl daemon-reload
43 |
44 | # Enable the service to start on boot
45 | echo "Enabling service for startup..."
46 | sudo systemctl enable mcp-memory.service
47 |
48 | echo ""
49 | echo "✅ MCP Memory Service installed successfully!"
50 | echo ""
51 | echo "Commands to manage the service:"
52 | echo " Start: sudo systemctl start mcp-memory"
53 | echo " Stop: sudo systemctl stop mcp-memory"
54 | echo " Status: sudo systemctl status mcp-memory"
55 | echo " Logs: sudo journalctl -u mcp-memory -f"
56 | echo " Disable: sudo systemctl disable mcp-memory"
57 | echo ""
58 | echo "The service will now start automatically on system boot."
59 | echo "API Key: $API_KEY"
60 | echo ""
61 | echo "Service will be available at:"
62 | echo " Dashboard: https://localhost:8000"
63 | echo " API Docs: https://localhost:8000/api/docs"
64 | echo " Health: https://localhost:8000/api/health"
65 | echo ""
66 | echo "To start the service now, run:"
67 | echo " sudo systemctl start mcp-memory"
```
--------------------------------------------------------------------------------
/scripts/utils/query_memories.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """Query memories from the SQLite database"""
3 |
4 | import sqlite3
5 | import json
6 | import sys
7 |
8 | def query_memories(tag_filter=None, query_text=None, limit=5):
9 | """Query memories from the database"""
10 | conn = sqlite3.connect('/home/hkr/.local/share/mcp-memory/sqlite_vec.db')
11 | cursor = conn.cursor()
12 |
13 | if tag_filter:
14 | sql = "SELECT content, tags FROM memories WHERE tags LIKE ? LIMIT ?"
15 | cursor.execute(sql, (f'%{tag_filter}%', limit))
16 | elif query_text:
17 | sql = "SELECT content, tags FROM memories WHERE content LIKE ? LIMIT ?"
18 | cursor.execute(sql, (f'%{query_text}%', limit))
19 | else:
20 | sql = "SELECT content, tags FROM memories ORDER BY created_at DESC LIMIT ?"
21 | cursor.execute(sql, (limit,))
22 |
23 | results = []
24 | for row in cursor.fetchall():
25 | content = row[0]
26 | try:
27 | tags = json.loads(row[1]) if row[1] else []
28 | except (json.JSONDecodeError, TypeError):
29 | # Tags might be stored differently
30 | tags = row[1].split(',') if row[1] and isinstance(row[1], str) else []
31 | results.append({
32 | 'content': content,
33 | 'tags': tags
34 | })
35 |
36 | conn.close()
37 | return results
38 |
39 | if __name__ == "__main__":
40 | # Get memories with specific tags
41 | print("=== Searching for README sections ===\n")
42 |
43 | # Search for readme content
44 | memories = query_memories(tag_filter="readme", limit=10)
45 |
46 | for i, memory in enumerate(memories, 1):
47 | print(f"Memory {i}:")
48 | print(f"Content (first 500 chars):\n{memory['content'][:500]}")
49 | print(f"Tags: {', '.join(memory['tags'])}")
50 | print("-" * 80)
51 | print()
52 |
53 | # Search for specific content
54 | print("\n=== Searching for Installation content ===\n")
55 | memories = query_memories(query_text="installation", limit=5)
56 |
57 | for i, memory in enumerate(memories, 1):
58 | print(f"Memory {i}:")
59 | print(f"Content (first 500 chars):\n{memory['content'][:500]}")
60 | print(f"Tags: {', '.join(memory['tags'])}")
61 | print("-" * 80)
62 | print()
```
--------------------------------------------------------------------------------
/archive/deployment/deploy_http_with_mcp.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 |
3 | # Deploy HTTP Server with MCP endpoints (hybrid approach)
4 | echo "🔄 Switching to HTTP server with MCP protocol support..."
5 |
6 | # Create updated service file for hybrid approach
7 | cat > /tmp/mcp-memory-hybrid.service << 'EOF'
8 | [Unit]
9 | Description=MCP Memory Service HTTP+MCP Hybrid v4.0.0-alpha.1
10 | Documentation=https://github.com/doobidoo/mcp-memory-service
11 | After=network.target network-online.target
12 | Wants=network-online.target
13 |
14 | [Service]
15 | Type=simple
16 | User=hkr
17 | Group=hkr
18 | WorkingDirectory=/home/hkr/repositories/mcp-memory-service
19 | Environment=PATH=/home/hkr/repositories/mcp-memory-service/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
20 | Environment=PYTHONPATH=/home/hkr/repositories/mcp-memory-service/src
21 | Environment=MCP_CONSOLIDATION_ENABLED=true
22 | Environment=MCP_MDNS_ENABLED=true
23 | Environment=MCP_HTTPS_ENABLED=false
24 | Environment=MCP_MDNS_SERVICE_NAME="MCP Memory Service - Hybrid"
25 | Environment=MCP_HTTP_ENABLED=true
26 | Environment=MCP_HTTP_HOST=0.0.0.0
27 | Environment=MCP_HTTP_PORT=8000
28 | Environment=MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
29 | Environment=MCP_API_KEY=test-key-123
30 | ExecStart=/home/hkr/repositories/mcp-memory-service/venv/bin/python /home/hkr/repositories/mcp-memory-service/scripts/run_http_server.py
31 | Restart=always
32 | RestartSec=10
33 | StandardOutput=journal
34 | StandardError=journal
35 | SyslogIdentifier=mcp-memory-service
36 |
37 | [Install]
38 | WantedBy=multi-user.target
39 | EOF
40 |
41 | # Install the hybrid service configuration
42 | echo "📝 Installing hybrid HTTP+MCP service configuration..."
43 | sudo cp /tmp/mcp-memory-hybrid.service /etc/systemd/system/mcp-memory.service
44 |
45 | # Reload and start
46 | echo "🔄 Reloading systemd and starting hybrid service..."
47 | sudo systemctl daemon-reload
48 | sudo systemctl start mcp-memory
49 |
50 | # Check status
51 | echo "🔍 Checking service status..."
52 | sudo systemctl status mcp-memory --no-pager
53 |
54 | echo ""
55 | echo "✅ HTTP server with MCP protocol support is now running!"
56 | echo ""
57 | echo "🌐 Available Services:"
58 | echo " - HTTP API: http://localhost:8000/api/*"
59 | echo " - Dashboard: http://localhost:8000/"
60 | echo " - Health: http://localhost:8000/api/health"
61 | echo ""
62 | echo "🔧 Next: Add MCP protocol endpoints to the HTTP server"
```
--------------------------------------------------------------------------------
/tools/docker/docker-compose.http.yml:
--------------------------------------------------------------------------------
```yaml
1 | version: '3.8'
2 |
3 | # Docker Compose configuration for HTTP/API mode
4 | # Usage: docker-compose -f docker-compose.http.yml up -d
5 |
6 | services:
7 | mcp-memory-service:
8 | build:
9 | context: ../..
10 | dockerfile: tools/docker/Dockerfile
11 | ports:
12 | - "${HTTP_PORT:-8000}:8000" # Map to different port if needed
13 | volumes:
14 | # Single data directory for all storage
15 | - ./data:/app/data
16 |
17 | # Model cache (prevents re-downloading models on each restart)
18 | # Uncomment the following line to persist Hugging Face models
19 | # - ${HOME}/.cache/huggingface:/root/.cache/huggingface
20 |
21 | # Optional: mount local config
22 | # - ./config:/app/config:ro
23 | environment:
24 | # Mode selection
25 | - MCP_MODE=http
26 |
27 | # Storage configuration
28 | - MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
29 | - MCP_MEMORY_SQLITE_PATH=/app/data/sqlite_vec.db
30 | - MCP_MEMORY_BACKUPS_PATH=/app/data/backups
31 |
32 | # HTTP configuration
33 | - MCP_HTTP_PORT=8000
34 | - MCP_HTTP_HOST=0.0.0.0
35 | - MCP_API_KEY=${MCP_API_KEY:-your-secure-api-key-here}
36 |
37 | # Optional: HTTPS configuration
38 | # - MCP_HTTPS_ENABLED=true
39 | # - MCP_HTTPS_PORT=8443
40 | # - MCP_SSL_CERT_FILE=/app/certs/cert.pem
41 | # - MCP_SSL_KEY_FILE=/app/certs/key.pem
42 |
43 | # Performance tuning
44 | - LOG_LEVEL=${LOG_LEVEL:-INFO}
45 | - MAX_RESULTS_PER_QUERY=10
46 | - SIMILARITY_THRESHOLD=0.7
47 |
48 | # Python configuration
49 | - PYTHONUNBUFFERED=1
50 | - PYTHONPATH=/app/src
51 |
52 | # Offline mode (uncomment if models are pre-cached and network is restricted)
53 | # - HF_HUB_OFFLINE=1
54 | # - TRANSFORMERS_OFFLINE=1
55 |
56 | # Use the unified entrypoint
57 | entrypoint: ["/usr/local/bin/docker-entrypoint-unified.sh"]
58 |
59 | restart: unless-stopped
60 |
61 | healthcheck:
62 | test: ["CMD", "curl", "-f", "http://localhost:8000/api/health"]
63 | interval: 30s
64 | timeout: 10s
65 | retries: 3
66 | start_period: 40s
67 |
68 | # Resource limits (optional, adjust as needed)
69 | deploy:
70 | resources:
71 | limits:
72 | cpus: '2.0'
73 | memory: 2G
74 | reservations:
75 | cpus: '0.5'
76 | memory: 512M
```
--------------------------------------------------------------------------------
/scripts/testing/test-hook.js:
--------------------------------------------------------------------------------
```javascript
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * Test script for the enhanced session-start hook
5 | */
6 |
7 | const path = require('path');
8 |
9 | // Import the enhanced hook
10 | const sessionStartHook = require('../../claude-hooks/core/session-start.js');
11 |
12 | async function testEnhancedHook() {
13 | console.log('🧪 Testing Enhanced Session Start Hook\n');
14 |
15 | // Mock context for testing
16 | const mockContext = {
17 | workingDirectory: process.cwd(),
18 | sessionId: 'test-session-' + Date.now(),
19 | trigger: 'session-start',
20 | userMessage: 'Help me understand the memory service improvements',
21 | injectSystemMessage: async (message) => {
22 | console.log('\n🎯 INJECTED CONTEXT:');
23 | console.log('═'.repeat(60));
24 | console.log(message);
25 | console.log('═'.repeat(60));
26 | return true;
27 | }
28 | };
29 |
30 | console.log(`📂 Testing in directory: ${mockContext.workingDirectory}`);
31 | console.log(`🔍 Test query: "${mockContext.userMessage}"`);
32 | console.log(`⚙️ Trigger: ${mockContext.trigger}\n`);
33 |
34 | try {
35 | // Execute the enhanced hook
36 | await sessionStartHook.handler(mockContext);
37 |
38 | console.log('\n✅ Hook execution completed successfully!');
39 | console.log('\n📊 Expected improvements:');
40 | console.log(' • Multi-phase memory retrieval (recent + important + fallback)');
41 | console.log(' • Enhanced recency indicators (🕒 today, 📅 this week)');
42 | console.log(' • Better semantic queries with git context');
43 | console.log(' • Improved categorization with "Recent Work" section');
44 | console.log(' • Configurable memory ratios and time windows');
45 |
46 | } catch (error) {
47 | console.error('❌ Hook execution failed:', error.message);
48 | console.error('Stack trace:', error.stack);
49 | }
50 | }
51 |
52 | // Run the test
53 | if (require.main === module) {
54 | testEnhancedHook()
55 | .then(() => {
56 | console.log('\n🎉 Test completed');
57 | process.exit(0);
58 | })
59 | .catch(error => {
60 | console.error('\n💥 Test failed:', error.message);
61 | process.exit(1);
62 | });
63 | }
64 |
65 | module.exports = { testEnhancedHook };
```
--------------------------------------------------------------------------------
/examples/start_https_example.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Example HTTPS startup script for MCP Memory Service
3 | # Copy and customize this file for your deployment
4 | #
5 | # This example shows how to configure the MCP Memory Service with custom SSL certificates.
6 | # For easy local development with trusted certificates, consider using mkcert:
7 | # https://github.com/FiloSottile/mkcert
8 |
9 | # Storage configuration
10 | export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
11 |
12 | # API authentication - CHANGE THIS TO A SECURE KEY!
13 | # Generate a secure key with: openssl rand -base64 32
14 | export MCP_API_KEY="your-secure-api-key-here"
15 |
16 | # HTTPS configuration with custom certificates
17 | export MCP_HTTPS_ENABLED=true
18 | export MCP_HTTPS_PORT=8443
19 |
20 | # SSL Certificate paths - UPDATE THESE PATHS TO YOUR CERTIFICATES
21 | #
22 | # For mkcert certificates (recommended for development):
23 | # 1. Install mkcert: https://github.com/FiloSottile/mkcert#installation
24 | # 2. Create local CA: mkcert -install
25 | # 3. Generate certificate: mkcert your-domain.local localhost 127.0.0.1
26 | # 4. Update paths below to point to generated certificate files
27 | #
28 | # Example paths:
29 | # export MCP_SSL_CERT_FILE="/path/to/your-domain.local+2.pem"
30 | # export MCP_SSL_KEY_FILE="/path/to/your-domain.local+2-key.pem"
31 | #
32 | # For production, use certificates from your certificate authority:
33 | export MCP_SSL_CERT_FILE="/path/to/your/certificate.pem"
34 | export MCP_SSL_KEY_FILE="/path/to/your/certificate-key.pem"
35 |
36 | # Optional: Disable HTTP if only HTTPS is needed
37 | export MCP_HTTP_ENABLED=false
38 | export MCP_HTTP_PORT=8080
39 |
40 | # mDNS service discovery
41 | export MCP_MDNS_ENABLED=true
42 | export MCP_MDNS_SERVICE_NAME="MCP Memory Service"
43 |
44 | # Optional: Additional configuration
45 | # export MCP_MEMORY_INCLUDE_HOSTNAME=true
46 | # export MCP_CONSOLIDATION_ENABLED=false
47 |
48 | echo "Starting MCP Memory Service with HTTPS on port $MCP_HTTPS_PORT"
49 | echo "Certificate: $MCP_SSL_CERT_FILE"
50 | echo "Private Key: $MCP_SSL_KEY_FILE"
51 |
52 | # Change to script directory and start server
53 | cd "$(dirname "$0")/.."
54 |
55 | # Check if virtual environment exists
56 | if [ ! -f ".venv/bin/python" ]; then
57 | echo "Error: Virtual environment not found at .venv/"
58 | echo "Please run: python -m venv .venv && source .venv/bin/activate && pip install -e ."
59 | exit 1
60 | fi
61 |
62 | # Start the server
63 | exec ./.venv/bin/python run_server.py
```
--------------------------------------------------------------------------------
/docs/document-ingestion.md:
--------------------------------------------------------------------------------
```markdown
1 | # Document Ingestion (v7.6.0+)
2 |
3 | Enhanced document parsing with optional semtools integration for superior quality extraction.
4 |
5 | ## Supported Formats
6 |
7 | | Format | Native Parser | With Semtools | Quality |
8 | |--------|--------------|---------------|---------|
9 | | PDF | PyPDF2/pdfplumber | LlamaParse | Excellent (OCR, tables) |
10 | | DOCX | Not supported | LlamaParse | Excellent |
11 | | PPTX | Not supported | LlamaParse | Excellent |
12 | | TXT/MD | Built-in | N/A | Perfect |
13 |
14 | ## Semtools Integration (Optional)
15 |
16 | Install [semtools](https://github.com/run-llama/semtools) for enhanced document parsing:
17 |
18 | ```bash
19 | # Install via npm (recommended)
20 | npm i -g @llamaindex/semtools
21 |
22 | # Or via cargo
23 | cargo install semtools
24 |
25 | # Optional: Configure LlamaParse API key for best quality
26 | export LLAMAPARSE_API_KEY="your-api-key"
27 | ```
28 |
29 | ## Configuration
30 |
31 | ```bash
32 | # Document chunking settings
33 | export MCP_DOCUMENT_CHUNK_SIZE=1000 # Characters per chunk
34 | export MCP_DOCUMENT_CHUNK_OVERLAP=200 # Overlap between chunks
35 |
36 | # LlamaParse API key (optional, improves quality)
37 | export LLAMAPARSE_API_KEY="llx-..."
38 | ```
39 |
40 | ## Usage Examples
41 |
42 | ```bash
43 | # Ingest a single document
44 | claude /memory-ingest document.pdf --tags documentation
45 |
46 | # Ingest directory
47 | claude /memory-ingest-dir ./docs --tags knowledge-base
48 |
49 | # Via Python
50 | from mcp_memory_service.ingestion import get_loader_for_file
51 |
52 | loader = get_loader_for_file(Path("document.pdf"))
53 | async for chunk in loader.extract_chunks(Path("document.pdf")):
54 | await store_memory(chunk.content, tags=["doc"])
55 | ```
56 |
57 | ## Features
58 |
59 | - **Automatic format detection** - Selects best loader for each file
60 | - **Intelligent chunking** - Respects paragraph/sentence boundaries
61 | - **Metadata enrichment** - Preserves file info, extraction method, page numbers
62 | - **Graceful fallback** - Uses native parsers if semtools unavailable
63 | - **Progress tracking** - Reports chunks processed during ingestion
64 |
65 | ## Performance Considerations
66 |
67 | - LlamaParse provides superior quality but requires API key and internet connection
68 | - Native parsers work offline but may have lower extraction quality for complex documents
69 | - Chunk size affects retrieval granularity vs context completeness
70 | - Larger overlap improves continuity but increases storage
71 |
```
--------------------------------------------------------------------------------
/scripts/sync/litestream/manual_sync.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Manual sync using HTTP downloads (alternative to Litestream restore)
3 |
4 | DB_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db"
5 | REMOTE_BASE="http://narrowbox.local:8080/mcp-memory"
6 | BACKUP_PATH="/Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db.backup"
7 | TEMP_DIR="/tmp/litestream_manual_$$"
8 |
9 | echo "$(date): Starting manual sync from remote master..."
10 |
11 | # Create temporary directory
12 | mkdir -p "$TEMP_DIR"
13 |
14 | # Get the latest generation ID
15 | GENERATION=$(curl -s "$REMOTE_BASE/generations/" | grep -o 'href="[^"]*/"' | sed 's/href="//;s/\/"//g' | head -1)
16 |
17 | if [ -z "$GENERATION" ]; then
18 | echo "$(date): ERROR: Could not determine generation ID"
19 | exit 1
20 | fi
21 |
22 | echo "$(date): Found generation: $GENERATION"
23 |
24 | # Get the latest snapshot
25 | SNAPSHOT_URL="$REMOTE_BASE/generations/$GENERATION/snapshots/"
26 | SNAPSHOT_FILE=$(curl -s "$SNAPSHOT_URL" | grep -o 'href="[^"]*\.snapshot\.lz4"' | sed 's/href="//;s/"//g' | tail -1)
27 |
28 | if [ -z "$SNAPSHOT_FILE" ]; then
29 | echo "$(date): ERROR: Could not find snapshot file"
30 | rm -rf "$TEMP_DIR"
31 | exit 1
32 | fi
33 |
34 | echo "$(date): Downloading snapshot: $SNAPSHOT_FILE"
35 |
36 | # Download and decompress snapshot
37 | curl -s "$SNAPSHOT_URL$SNAPSHOT_FILE" -o "$TEMP_DIR/snapshot.lz4"
38 |
39 | if command -v lz4 >/dev/null 2>&1; then
40 | # Use lz4 if available
41 | lz4 -d "$TEMP_DIR/snapshot.lz4" "$TEMP_DIR/database.db"
42 | else
43 | echo "$(date): ERROR: lz4 command not found. Please install: brew install lz4"
44 | rm -rf "$TEMP_DIR"
45 | exit 1
46 | fi
47 |
48 | # Backup current database
49 | if [ -f "$DB_PATH" ]; then
50 | cp "$DB_PATH" "$BACKUP_PATH"
51 | echo "$(date): Created backup at $BACKUP_PATH"
52 | fi
53 |
54 | # Replace with new database
55 | cp "$TEMP_DIR/database.db" "$DB_PATH"
56 |
57 | if [ $? -eq 0 ]; then
58 | echo "$(date): Successfully synced database from remote master"
59 | # Remove backup on success
60 | rm -f "$BACKUP_PATH"
61 |
62 | # Show database info
63 | echo "$(date): Database size: $(du -h "$DB_PATH" | cut -f1)"
64 | echo "$(date): Database modified: $(stat -f "%Sm" "$DB_PATH")"
65 | else
66 | echo "$(date): ERROR: Failed to replace database"
67 | # Restore backup on failure
68 | if [ -f "$BACKUP_PATH" ]; then
69 | mv "$BACKUP_PATH" "$DB_PATH"
70 | echo "$(date): Restored backup"
71 | fi
72 | rm -rf "$TEMP_DIR"
73 | exit 1
74 | fi
75 |
76 | # Cleanup
77 | rm -rf "$TEMP_DIR"
78 | echo "$(date): Manual sync completed successfully"
```
--------------------------------------------------------------------------------
/docs/archive/obsolete-workflows/load_memory_context.md:
--------------------------------------------------------------------------------
```markdown
1 | # Memory Context Loading Prompt
2 |
3 | Use this prompt at the start of Claude Code sessions on machines in your local network:
4 |
5 | ---
6 |
7 | ## Prompt for Claude Code
8 |
9 | ```
10 | Load MCP Memory Service context for this project. Before we begin working, please retrieve and incorporate all stored knowledge about this codebase from my local memory service:
11 |
12 | **Memory Service Endpoint**: https://your-server-ip:8443/mcp
13 | **Authorization**: Bearer your-api-key
14 |
15 | Execute this command to load context:
16 | ```bash
17 | curl -k -s -X POST https://your-server-ip:8443/mcp \
18 | -H "Content-Type: application/json" \
19 | -H "Authorization: Bearer your-api-key" \
20 | -d '{"jsonrpc": "2.0", "id": 1, "method": "tools/call", "params": {"name": "retrieve_memory", "arguments": {"query": "claude-code-reference distributable-reference", "limit": 20}}}' \
21 | | jq -r '.result.content[0].text'
22 | ```
23 |
24 | This memory contains:
25 | - Complete project structure and architecture
26 | - All key development, testing, and deployment commands
27 | - Environment variables and configuration patterns
28 | - Recent changes including v5.0.2 ONNX implementation details
29 | - Issue management approaches and current project status
30 | - Testing practices and platform-specific optimizations
31 | - Remote service deployment and health monitoring
32 |
33 | After loading this context, you'll have comprehensive knowledge of the MCP Memory Service project equivalent to extensive codebase exploration, which will significantly reduce token usage and improve response accuracy.
34 |
35 | Please confirm successful context loading and summarize the key project information you've retrieved.
36 | ```
37 |
38 | ---
39 |
40 | ## Alternative Short Prompt
41 |
42 | For quick context loading:
43 |
44 | ```
45 | Load project context from memory service: curl -k -s -X POST https://your-server-ip:8443/mcp -H "Content-Type: application/json" -H "Authorization: Bearer your-api-key" -d '{"jsonrpc": "2.0", "id": 1, "method": "tools/call", "params": {"name": "retrieve_memory", "arguments": {"query": "claude-code-reference", "limit": 20}}}' | jq -r '.result.content[0].text'
46 |
47 | Incorporate this MCP Memory Service project knowledge before proceeding.
48 | ```
49 |
50 | ---
51 |
52 | ## Network Distribution
53 |
54 | 1. **Copy this prompt file** to other machines in your network
55 | 2. **Update IP address** if memory service moves
56 | 3. **Test connectivity** with: `curl -k -s https://your-server-ip:8443/api/health`
57 | 4. **Use at session start** for instant project context
58 |
59 | This eliminates repetitive codebase discovery across all your development machines.
```
--------------------------------------------------------------------------------
/scripts/service/service_control.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 |
3 | # MCP Memory Service Control Script
4 | SERVICE_NAME="mcp-memory"
5 |
6 | case "$1" in
7 | start)
8 | echo "Starting MCP Memory Service..."
9 | sudo systemctl start $SERVICE_NAME
10 | sleep 2
11 | sudo systemctl status $SERVICE_NAME --no-pager
12 | ;;
13 | stop)
14 | echo "Stopping MCP Memory Service..."
15 | sudo systemctl stop $SERVICE_NAME
16 | sudo systemctl status $SERVICE_NAME --no-pager
17 | ;;
18 | restart)
19 | echo "Restarting MCP Memory Service..."
20 | sudo systemctl restart $SERVICE_NAME
21 | sleep 2
22 | sudo systemctl status $SERVICE_NAME --no-pager
23 | ;;
24 | status)
25 | sudo systemctl status $SERVICE_NAME --no-pager
26 | ;;
27 | logs)
28 | echo "Showing recent logs (Ctrl+C to exit)..."
29 | sudo journalctl -u $SERVICE_NAME -f
30 | ;;
31 | health)
32 | echo "Checking service health..."
33 | curl -k -s https://localhost:8000/api/health | jq '.' 2>/dev/null || curl -k -s https://localhost:8000/api/health
34 | ;;
35 | enable)
36 | echo "Enabling service for startup..."
37 | sudo systemctl enable $SERVICE_NAME
38 | echo "Service will start automatically on boot"
39 | ;;
40 | disable)
41 | echo "Disabling service from startup..."
42 | sudo systemctl disable $SERVICE_NAME
43 | echo "Service will not start automatically on boot"
44 | ;;
45 | install)
46 | echo "Installing service..."
47 | ./install_service.sh
48 | ;;
49 | uninstall)
50 | echo "Uninstalling service..."
51 | sudo systemctl stop $SERVICE_NAME 2>/dev/null
52 | sudo systemctl disable $SERVICE_NAME 2>/dev/null
53 | sudo rm -f /etc/systemd/system/$SERVICE_NAME.service
54 | sudo systemctl daemon-reload
55 | echo "Service uninstalled"
56 | ;;
57 | *)
58 | echo "Usage: $0 {start|stop|restart|status|logs|health|enable|disable|install|uninstall}"
59 | echo ""
60 | echo "Commands:"
61 | echo " start - Start the service"
62 | echo " stop - Stop the service"
63 | echo " restart - Restart the service"
64 | echo " status - Show service status"
65 | echo " logs - Show live service logs"
66 | echo " health - Check API health endpoint"
67 | echo " enable - Enable service for startup"
68 | echo " disable - Disable service from startup"
69 | echo " install - Install the systemd service"
70 | echo " uninstall - Remove the systemd service"
71 | exit 1
72 | ;;
73 | esac
```
--------------------------------------------------------------------------------
/tests/smithery/test_smithery.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """
3 | Test script to verify Smithery configuration works correctly.
4 | This simulates how Smithery would invoke the service.
5 | """
6 | import os
7 | import sys
8 | import subprocess
9 | import tempfile
10 | import json
11 |
12 | def test_smithery_config():
13 | """Test the Smithery configuration by simulating the expected command."""
14 | print("Testing Smithery configuration...")
15 |
16 | # Create temporary paths for testing
17 | with tempfile.TemporaryDirectory() as temp_dir:
18 | chroma_path = os.path.join(temp_dir, "chroma_db")
19 | backups_path = os.path.join(temp_dir, "backups")
20 |
21 | # Create directories
22 | os.makedirs(chroma_path, exist_ok=True)
23 | os.makedirs(backups_path, exist_ok=True)
24 |
25 | # Set environment variables as Smithery would
26 | test_env = os.environ.copy()
27 | test_env.update({
28 | 'MCP_MEMORY_CHROMA_PATH': chroma_path,
29 | 'MCP_MEMORY_BACKUPS_PATH': backups_path,
30 | 'PYTHONUNBUFFERED': '1',
31 | 'PYTORCH_ENABLE_MPS_FALLBACK': '1'
32 | })
33 |
34 | # Command that Smithery would run
35 | cmd = [sys.executable, 'smithery_wrapper.py', '--version']
36 |
37 | print(f"Running command: {' '.join(cmd)}")
38 | print(f"Environment: {json.dumps({k: v for k, v in test_env.items() if k.startswith('MCP_') or k in ['PYTHONUNBUFFERED', 'PYTORCH_ENABLE_MPS_FALLBACK']}, indent=2)}")
39 |
40 | try:
41 | result = subprocess.run(
42 | cmd,
43 | env=test_env,
44 | capture_output=True,
45 | text=True,
46 | timeout=30
47 | )
48 |
49 | print(f"Return code: {result.returncode}")
50 | if result.stdout:
51 | print(f"STDOUT:\n{result.stdout}")
52 | if result.stderr:
53 | print(f"STDERR:\n{result.stderr}")
54 |
55 | if result.returncode == 0:
56 | print("✅ SUCCESS: Smithery configuration test passed!")
57 | return True
58 | else:
59 | print("❌ FAILED: Smithery configuration test failed!")
60 | return False
61 |
62 | except subprocess.TimeoutExpired:
63 | print("❌ FAILED: Command timed out")
64 | return False
65 | except Exception as e:
66 | print(f"❌ FAILED: Exception occurred: {e}")
67 | return False
68 |
69 | if __name__ == "__main__":
70 | success = test_smithery_config()
71 | sys.exit(0 if success else 1)
```
--------------------------------------------------------------------------------
/docs/integrations/groq-bridge.md:
--------------------------------------------------------------------------------
```markdown
1 | # Groq Agent Bridge - Requirements
2 |
3 | Install the required package:
4 |
5 | ```bash
6 | pip install groq
7 | # or
8 | uv pip install groq
9 | ```
10 |
11 | Set up your environment:
12 |
13 | ```bash
14 | export GROQ_API_KEY="your-api-key-here"
15 | ```
16 |
17 | ## Available Models
18 |
19 | The Groq bridge supports multiple high-performance models:
20 |
21 | | Model | Context | Best For | Speed |
22 | |-------|---------|----------|-------|
23 | | **llama-3.3-70b-versatile** | 128K | General purpose (default) | ~300ms |
24 | | **moonshotai/kimi-k2-instruct** | 256K | Agentic coding, tool calling | ~200ms |
25 | | **llama-3.1-8b-instant** | 128K | Fast, simple tasks | ~100ms |
26 |
27 | **Kimi K2 Features:**
28 | - 256K context window (largest on GroqCloud)
29 | - 1 trillion parameters (32B activated)
30 | - Excellent for front-end development and complex coding
31 | - Superior agentic intelligence and tool calling
32 | - 185 tokens/second throughput
33 |
34 | ## Usage Examples
35 |
36 | ### As a library from another AI agent:
37 |
38 | ```python
39 | from groq_agent_bridge import GroqAgentBridge
40 |
41 | # Initialize the bridge
42 | bridge = GroqAgentBridge()
43 |
44 | # Simple call
45 | response = bridge.call_model_raw("Explain quantum computing in simple terms")
46 | print(response)
47 |
48 | # Advanced call with options
49 | result = bridge.call_model(
50 | prompt="Generate Python code for a binary search tree",
51 | model="llama-3.3-70b-versatile",
52 | max_tokens=500,
53 | temperature=0.3,
54 | system_message="You are an expert Python programmer"
55 | )
56 | print(result)
57 | ```
58 |
59 | ### Command-line usage:
60 |
61 | ```bash
62 | # Simple usage (uses default llama-3.3-70b-versatile)
63 | ./scripts/utils/groq "What is machine learning?"
64 |
65 | # Use Kimi K2 for complex coding tasks
66 | ./scripts/utils/groq "Generate a React component with hooks" \
67 | --model "moonshotai/kimi-k2-instruct"
68 |
69 | # Fast simple queries with llama-3.1-8b-instant
70 | ./scripts/utils/groq "Rate complexity 1-10: def add(a,b): return a+b" \
71 | --model "llama-3.1-8b-instant"
72 |
73 | # Full options with default model
74 | ./scripts/utils/groq "Generate a SQL query" \
75 | --model "llama-3.3-70b-versatile" \
76 | --max-tokens 200 \
77 | --temperature 0.5 \
78 | --system "You are a database expert" \
79 | --json
80 | ```
81 |
82 | ### Integration with bash scripts:
83 |
84 | ```bash
85 | #!/bin/bash
86 | export GROQ_API_KEY="your-key"
87 |
88 | # Get response and save to file
89 | python groq_agent_bridge.py "Write a haiku about code" --temperature 0.9 > response.txt
90 |
91 | # JSON output for parsing
92 | json_response=$(python groq_agent_bridge.py "Explain REST APIs" --json)
93 | # Parse with jq or other tools
94 | ```
95 |
96 | This provides a completely non-interactive way for other AI agents to call Groq's models!
97 |
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/cli/utils.py:
--------------------------------------------------------------------------------
```python
1 | # Copyright 2024 Heinrich Krupp
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """
16 | CLI utilities for MCP Memory Service.
17 | """
18 |
19 | import os
20 | from typing import Optional
21 |
22 | from ..storage.base import MemoryStorage
23 |
24 |
25 | async def get_storage(backend: Optional[str] = None) -> MemoryStorage:
26 | """
27 | Get storage backend for CLI operations.
28 |
29 | Args:
30 | backend: Storage backend name ('sqlite_vec', 'cloudflare', or 'hybrid')
31 |
32 | Returns:
33 | Initialized storage backend
34 | """
35 | # Determine backend
36 | if backend is None:
37 | backend = os.getenv('MCP_MEMORY_STORAGE_BACKEND', 'sqlite_vec').lower()
38 |
39 | backend = backend.lower()
40 |
41 | if backend in ('sqlite_vec', 'sqlite-vec'):
42 | from ..storage.sqlite_vec import SqliteVecMemoryStorage
43 | from ..config import SQLITE_VEC_PATH
44 | storage = SqliteVecMemoryStorage(SQLITE_VEC_PATH)
45 | await storage.initialize()
46 | return storage
47 | elif backend == 'cloudflare':
48 | from ..storage.cloudflare import CloudflareStorage
49 | from ..config import (
50 | CLOUDFLARE_API_TOKEN, CLOUDFLARE_ACCOUNT_ID,
51 | CLOUDFLARE_VECTORIZE_INDEX, CLOUDFLARE_D1_DATABASE_ID,
52 | CLOUDFLARE_R2_BUCKET, CLOUDFLARE_EMBEDDING_MODEL,
53 | CLOUDFLARE_LARGE_CONTENT_THRESHOLD, CLOUDFLARE_MAX_RETRIES,
54 | CLOUDFLARE_BASE_DELAY
55 | )
56 | storage = CloudflareStorage(
57 | api_token=CLOUDFLARE_API_TOKEN,
58 | account_id=CLOUDFLARE_ACCOUNT_ID,
59 | vectorize_index=CLOUDFLARE_VECTORIZE_INDEX,
60 | d1_database_id=CLOUDFLARE_D1_DATABASE_ID,
61 | r2_bucket=CLOUDFLARE_R2_BUCKET,
62 | embedding_model=CLOUDFLARE_EMBEDDING_MODEL,
63 | large_content_threshold=CLOUDFLARE_LARGE_CONTENT_THRESHOLD,
64 | max_retries=CLOUDFLARE_MAX_RETRIES,
65 | base_delay=CLOUDFLARE_BASE_DELAY
66 | )
67 | await storage.initialize()
68 | return storage
69 | else:
70 | raise ValueError(f"Unsupported storage backend: {backend}")
```
--------------------------------------------------------------------------------
/scripts/migration/TIMESTAMP_CLEANUP_README.md:
--------------------------------------------------------------------------------
```markdown
1 | # MCP Memory Timestamp Cleanup Scripts
2 |
3 | ## Overview
4 |
5 | These scripts help clean up the timestamp mess in your MCP Memory ChromaDB database where multiple timestamp formats and fields have accumulated over time.
6 |
7 | ## Files
8 |
9 | 1. **`verify_mcp_timestamps.py`** - Verification script to check current timestamp state
10 | 2. **`cleanup_mcp_timestamps.py`** - Migration script to fix timestamp issues
11 |
12 | ## The Problem
13 |
14 | Your database has accumulated 8 different timestamp-related fields:
15 | - `timestamp` (integer) - Original design
16 | - `created_at` (float) - Duplicate data
17 | - `created_at_iso` (string) - ISO format duplicate
18 | - `timestamp_float` (float) - Another duplicate
19 | - `timestamp_str` (string) - String format duplicate
20 | - `updated_at` (float) - Update tracking
21 | - `updated_at_iso` (string) - Update tracking in ISO
22 | - `date` (generic) - Generic date field
23 |
24 | This causes:
25 | - 3x storage overhead for the same timestamp
26 | - Confusion about which field to use
27 | - Inconsistent data retrieval
28 |
29 | ## Usage
30 |
31 | ### Step 1: Verify Current State
32 |
33 | ```bash
34 | python3 scripts/migrations/verify_mcp_timestamps.py
35 | ```
36 |
37 | This will show:
38 | - Total memories in database
39 | - Distribution of timestamp fields
40 | - Memories missing timestamps
41 | - Sample values showing the redundancy
42 | - Date ranges for each timestamp type
43 |
44 | ### Step 2: Run Migration
45 |
46 | ```bash
47 | python3 scripts/migrations/cleanup_mcp_timestamps.py
48 | ```
49 |
50 | The migration will:
51 | 1. **Create a backup** of your database
52 | 2. **Standardize** all timestamps to integer format in the `timestamp` field
53 | 3. **Remove** all redundant timestamp fields
54 | 4. **Ensure** all memories have valid timestamps
55 | 5. **Optimize** the database with VACUUM
56 |
57 | ### Step 3: Verify Results
58 |
59 | ```bash
60 | python3 scripts/migrations/verify_mcp_timestamps.py
61 | ```
62 |
63 | After migration, you should see:
64 | - Only one timestamp field (`timestamp`)
65 | - All memories have timestamps
66 | - Clean data structure
67 |
68 | ## Safety
69 |
70 | - The migration script **always creates a backup** before making changes
71 | - Backup location: `/Users/hkr/Library/Application Support/mcp-memory/chroma_db/chroma.sqlite3.backup_YYYYMMDD_HHMMSS`
72 | - If anything goes wrong, you can restore the backup
73 |
74 | ## Restoration (if needed)
75 |
76 | If you need to restore from backup:
77 |
78 | ```bash
79 | # Stop Claude Desktop first
80 | cp "/path/to/backup" "/Users/hkr/Library/Application Support/mcp-memory/chroma_db/chroma.sqlite3"
81 | ```
82 |
83 | ## After Migration
84 |
85 | Update your MCP Memory Service code to only use the `timestamp` field (integer format) for all timestamp operations. This prevents the issue from recurring.
86 |
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/utils/http_server_manager.py:
--------------------------------------------------------------------------------
```python
1 | """HTTP Server Manager for MCP Memory Service multi-client coordination."""
2 |
3 | import asyncio
4 | import logging
5 | import os
6 | import subprocess
7 | import sys
8 | from pathlib import Path
9 | from typing import Optional
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | async def auto_start_http_server_if_needed() -> bool:
15 | """
16 | Auto-start HTTP server if needed for multi-client coordination.
17 |
18 | Returns:
19 | bool: True if server was started or already running, False if failed
20 | """
21 | try:
22 | # Check if HTTP auto-start is enabled
23 | if not os.getenv("MCP_MEMORY_HTTP_AUTO_START", "").lower() in ("true", "1"):
24 | logger.debug("HTTP auto-start not enabled")
25 | return False
26 |
27 | # Check if server is already running
28 | from ..utils.port_detection import is_port_in_use
29 | port = int(os.getenv("MCP_HTTP_PORT", "8000"))
30 |
31 | if await is_port_in_use("localhost", port):
32 | logger.info(f"HTTP server already running on port {port}")
33 | return True
34 |
35 | # Try to start the HTTP server
36 | logger.info(f"Starting HTTP server on port {port}")
37 |
38 | # Get the repository root
39 | repo_root = Path(__file__).parent.parent.parent.parent
40 |
41 | # Start the HTTP server as a background process
42 | cmd = [
43 | sys.executable, "-m", "src.mcp_memory_service.app",
44 | "--port", str(port),
45 | "--host", "localhost"
46 | ]
47 |
48 | process = subprocess.Popen(
49 | cmd,
50 | cwd=repo_root,
51 | stdout=subprocess.DEVNULL,
52 | stderr=subprocess.DEVNULL,
53 | start_new_session=True
54 | )
55 |
56 | # Wait a moment and check if the process started successfully
57 | await asyncio.sleep(1)
58 |
59 | if process.poll() is None: # Process is still running
60 | # Wait a bit more and check if port is now in use
61 | await asyncio.sleep(2)
62 | if await is_port_in_use("localhost", port):
63 | logger.info(f"Successfully started HTTP server on port {port}")
64 | return True
65 | else:
66 | logger.warning("HTTP server process started but port not in use")
67 | return False
68 | else:
69 | logger.warning(f"HTTP server process exited with code {process.returncode}")
70 | return False
71 |
72 | except Exception as e:
73 | logger.error(f"Failed to auto-start HTTP server: {e}")
74 | return False
```
--------------------------------------------------------------------------------
/archive/docs-removed-2025-08-23/claude_integration.md:
--------------------------------------------------------------------------------
```markdown
1 | # MCP Memory Service - Development Guidelines
2 |
3 | ## Commands
4 | - Run memory server: `python scripts/run_memory_server.py`
5 | - Run tests: `pytest tests/`
6 | - Run specific test: `pytest tests/test_memory_ops.py::test_store_memory -v`
7 | - Check environment: `python scripts/verify_environment_enhanced.py`
8 | - Windows installation: `python scripts/install_windows.py`
9 | - Build package: `python -m build`
10 |
11 | ## Installation Guidelines
12 | - Always install in a virtual environment: `python -m venv venv`
13 | - Use `install.py` for cross-platform installation
14 | - Windows requires special PyTorch installation with correct index URL:
15 | ```bash
16 | pip install torch==2.1.0 torchvision==2.1.0 torchaudio==2.1.0 --index-url https://download.pytorch.org/whl/cu118
17 | ```
18 | - For recursion errors, run: `python scripts/fix_sitecustomize.py`
19 |
20 | ## Memory Service Invocation
21 | - See the comprehensive [Invocation Guide](invocation_guide.md) for full details
22 | - Key trigger phrases:
23 | - **Storage**: "remember that", "remember this", "save to memory", "store in memory"
24 | - **Retrieval**: "do you remember", "recall", "retrieve from memory", "search your memory for"
25 | - **Tag-based**: "find memories with tag", "search for tag", "retrieve memories tagged"
26 | - **Deletion**: "forget", "delete from memory", "remove from memory"
27 |
28 | ## Code Style
29 | - Python 3.10+ with type hints
30 | - Use dataclasses for models (see `models/memory.py`)
31 | - Triple-quoted docstrings for modules and functions
32 | - Async/await pattern for all I/O operations
33 | - Error handling with specific exception types and informative messages
34 | - Logging with appropriate levels for different severity
35 | - Commit messages follow semantic release format: `type(scope): message`
36 |
37 | ## Project Structure
38 | - `src/mcp_memory_service/` - Core package code
39 | - `models/` - Data models
40 | - `storage/` - Database abstraction
41 | - `utils/` - Helper functions
42 | - `server.py` - MCP protocol implementation
43 | - `scripts/` - Utility scripts
44 | - `memory_wrapper.py` - Windows wrapper script
45 | - `install.py` - Cross-platform installation script
46 |
47 | ## Dependencies
48 | - ChromaDB (0.5.23) for vector database
49 | - sentence-transformers (>=2.2.2) for embeddings
50 | - PyTorch (platform-specific installation)
51 | - MCP protocol (>=1.0.0, <2.0.0) for client-server communication
52 |
53 | ## Troubleshooting
54 | - For Windows installation issues, use `scripts/install_windows.py`
55 | - Apple Silicon requires Python 3.10+ built for ARM64
56 | - CUDA issues: verify with `torch.cuda.is_available()`
57 | - For MCP protocol issues, check `server.py` for required methods
```
--------------------------------------------------------------------------------
/archive/investigations/MACOS_HOOKS_INVESTIGATION.md:
--------------------------------------------------------------------------------
```markdown
1 | # macOS Memory Hooks Investigation
2 |
3 | ## Issue
4 | Memory awareness hooks may work differently on macOS vs Linux when using MCP protocol.
5 |
6 | ## Current Linux Behavior (Manjaro)
7 | - **Problem**: Hooks try to spawn duplicate MCP server via `MCPClient(serverCommand)`
8 | - **Symptom**: Connection timeout when hooks execute
9 | - **Root Cause**: Claude Code already has MCP server on stdio, can't have two servers on same streams
10 | - **Current Workaround**: HTTP fallback (requires separate HTTP server on port 8443)
11 |
12 | ## Hypothesis: macOS May Work Differently
13 | User reports hooks work on macOS without HTTP fallback. Possible reasons:
14 | 1. macOS Claude Code may provide hooks access to existing MCP connection
15 | 2. Different process/stdio handling on macOS vs Linux
16 | 3. `useExistingServer: true` config may actually work on macOS
17 |
18 | ## Investigation Needed (On MacBook)
19 |
20 | ### Test 1: MCP-Only Configuration
21 | ```json
22 | {
23 | "memoryService": {
24 | "protocol": "mcp",
25 | "preferredProtocol": "mcp",
26 | "mcp": {
27 | "useExistingServer": true,
28 | "serverName": "memory"
29 | }
30 | }
31 | }
32 | ```
33 |
34 | **Expected on macOS (if hypothesis correct):**
35 | - ✅ Hooks connect successfully
36 | - ✅ No duplicate server spawned
37 | - ✅ Memory context injected on session start
38 |
39 | **Expected on Linux (current behavior):**
40 | - ❌ Connection timeout
41 | - ❌ Multiple server processes spawn
42 | - ❌ Fallback to HTTP needed
43 |
44 | ### Test 2: Check Memory Client Behavior
45 | 1. Run hook manually: `node ~/.claude/hooks/core/session-start.js`
46 | 2. Check process list: Does it spawn new `memory server` process?
47 | 3. Monitor connection: Does it timeout or succeed?
48 |
49 | ### Test 3: Platform Comparison
50 | ```bash
51 | # On macOS
52 | ps aux | grep "memory server" # How many instances?
53 | node ~/.claude/hooks/core/session-start.js # Does it work?
54 |
55 | # On Linux (current)
56 | ps aux | grep "memory server" # Multiple instances!
57 | node ~/.claude/hooks/core/session-start.js # Times out!
58 | ```
59 |
60 | ## Files to Check
61 | - `claude-hooks/utilities/memory-client.js` - MCP connection logic
62 | - `claude-hooks/utilities/mcp-client.js` - Server spawning code
63 | - `claude-hooks/install_hooks.py` - Config generation (line 268-273: useExistingServer)
64 |
65 | ## Next Steps
66 | 1. Test on MacBook with MCP-only config
67 | 2. If works on macOS: investigate platform-specific differences
68 | 3. Document proper cross-platform solution
69 | 4. Update hooks to work consistently on both platforms
70 |
71 | ## Current Status
72 | - **Linux**: Requires HTTP fallback (confirmed working)
73 | - **macOS**: TBD - needs verification
74 | - **Goal**: Understand why different, achieve consistent behavior
75 |
76 | ---
77 | Created: 2025-09-30
78 | Platform: Linux (Manjaro)
79 | Issue: Hooks/MCP connection conflict
80 |
```
--------------------------------------------------------------------------------
/scripts/service/deploy_dual_services.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 |
3 | echo "🚀 Deploying Dual MCP Services with mDNS..."
4 | echo " - FastMCP Server (port 8000) for Claude Code MCP clients"
5 | echo " - HTTP Dashboard (port 8080) for web interface"
6 | echo " - mDNS enabled for both services"
7 | echo ""
8 |
9 | # Stop existing services
10 | echo "⏹️ Stopping existing services..."
11 | sudo systemctl stop mcp-memory 2>/dev/null || true
12 | sudo systemctl stop mcp-http-dashboard 2>/dev/null || true
13 |
14 | # Install FastMCP service with mDNS
15 | echo "📝 Installing FastMCP service (port 8000)..."
16 | sudo cp /tmp/fastmcp-server-with-mdns.service /etc/systemd/system/mcp-memory.service
17 |
18 | # Install HTTP Dashboard service
19 | echo "📝 Installing HTTP Dashboard service (port 8080)..."
20 | sudo cp /tmp/mcp-http-dashboard.service /etc/systemd/system/mcp-http-dashboard.service
21 |
22 | # Reload systemd
23 | echo "🔄 Reloading systemd daemon..."
24 | sudo systemctl daemon-reload
25 |
26 | # Enable both services
27 | echo "🔛 Enabling both services for startup..."
28 | sudo systemctl enable mcp-memory
29 | sudo systemctl enable mcp-http-dashboard
30 |
31 | # Start FastMCP service first
32 | echo "▶️ Starting FastMCP server (port 8000)..."
33 | sudo systemctl start mcp-memory
34 | sleep 2
35 |
36 | # Start HTTP Dashboard service
37 | echo "▶️ Starting HTTP Dashboard (port 8080)..."
38 | sudo systemctl start mcp-http-dashboard
39 | sleep 2
40 |
41 | # Check status of both services
42 | echo ""
43 | echo "🔍 Checking service status..."
44 | echo ""
45 | echo "=== FastMCP Server (port 8000) ==="
46 | sudo systemctl status mcp-memory --no-pager
47 | echo ""
48 | echo "=== HTTP Dashboard (port 8080) ==="
49 | sudo systemctl status mcp-http-dashboard --no-pager
50 |
51 | echo ""
52 | echo "📊 Port status:"
53 | ss -tlnp | grep -E ":800[08]"
54 |
55 | echo ""
56 | echo "🌐 mDNS Services (if avahi is installed):"
57 | avahi-browse -t _http._tcp 2>/dev/null | grep -E "(MCP|Memory)" || echo "No mDNS services found (avahi may not be installed)"
58 | avahi-browse -t _mcp._tcp 2>/dev/null | grep -E "(MCP|Memory)" || echo "No MCP mDNS services found"
59 |
60 | echo ""
61 | echo "✅ Dual service deployment complete!"
62 | echo ""
63 | echo "🔗 Available Services:"
64 | echo " - FastMCP Protocol: http://memory.local:8000/mcp (for Claude Code)"
65 | echo " - HTTP Dashboard: http://memory.local:8080/ (for web access)"
66 | echo " - API Endpoints: http://memory.local:8080/api/* (for curl/scripts)"
67 | echo ""
68 | echo "📋 Service Management:"
69 | echo " - FastMCP logs: sudo journalctl -u mcp-memory -f"
70 | echo " - Dashboard logs: sudo journalctl -u mcp-http-dashboard -f"
71 | echo " - Stop FastMCP: sudo systemctl stop mcp-memory"
72 | echo " - Stop Dashboard: sudo systemctl stop mcp-http-dashboard"
73 | echo ""
74 | echo "🔍 mDNS Discovery:"
75 | echo " - Browse services: avahi-browse -t _http._tcp"
76 | echo " - Browse MCP: avahi-browse -t _mcp._tcp"
```
--------------------------------------------------------------------------------
/archive/docs-root-cleanup-2025-08-23/PYTORCH_DOWNLOAD_FIX.md:
--------------------------------------------------------------------------------
```markdown
1 | # PyTorch Download Issue - FIXED! 🎉
2 |
3 | ## Problem
4 | Claude Desktop was downloading PyTorch models (230MB+) on every startup, even with offline environment variables set in the config.
5 |
6 | ## Root Cause
7 | The issue was that **UV package manager isolation** prevented environment variables from being properly inherited, and model downloads happened before our offline configuration could take effect.
8 |
9 | ## Solution Applied
10 |
11 | ### 1. Created Offline Launcher Script
12 | **File**: `scripts/memory_offline.py`
13 | - Sets offline environment variables **before any imports**
14 | - Configures cache paths for Windows
15 | - Bypasses UV isolation by running Python directly
16 |
17 | ### 2. Updated Claude Desktop Config
18 | **Your config now uses**:
19 | ```json
20 | {
21 | "command": "python",
22 | "args": ["C:/REPOSITORIES/mcp-memory-service/scripts/memory_offline.py"]
23 | }
24 | ```
25 |
26 | **Instead of**:
27 | ```json
28 | {
29 | "command": "uv",
30 | "args": ["--directory", "...", "run", "memory"]
31 | }
32 | ```
33 |
34 | ### 3. Added Code-Level Offline Setup
35 | **File**: `src/mcp_memory_service/__init__.py`
36 | - Added `setup_offline_mode()` function
37 | - Runs immediately when module is imported
38 | - Provides fallback offline configuration
39 |
40 | ## Test Results ✅
41 |
42 | **Before Fix**:
43 | ```
44 | 2025-08-11T19:04:48.249Z [memory] [info] Message from client: {...}
45 | Downloading torch (230.2MiB) ← PROBLEM
46 | 2025-08-11T19:05:48.151Z [memory] [info] Request timed out
47 | ```
48 |
49 | **After Fix**:
50 | ```
51 | Setting up offline mode...
52 | HF_HUB_OFFLINE: 1
53 | HF_HOME: C:\Users\heinrich.krupp\.cache\huggingface
54 | Starting MCP Memory Service in offline mode...
55 | [No download messages] ← FIXED!
56 | ```
57 |
58 | ## Files Modified
59 |
60 | 1. **Your Claude Desktop Config**: `%APPDATA%\Claude\claude_desktop_config.json`
61 | - Changed from UV to direct Python execution
62 | - Uses new offline launcher script
63 |
64 | 2. **New Offline Launcher**: `scripts/memory_offline.py`
65 | - Forces offline mode before any ML library imports
66 | - Configures Windows cache paths automatically
67 |
68 | 3. **Core Module Init**: `src/mcp_memory_service/__init__.py`
69 | - Added offline mode setup as backup
70 | - Runs on module import
71 |
72 | 4. **Sample Config**: `examples/claude_desktop_config_windows.json`
73 | - Updated for other users
74 | - Uses new launcher approach
75 |
76 | ## Impact
77 |
78 | ✅ **No more 230MB PyTorch downloads on startup**
79 | ✅ **Faster Claude Desktop initialization**
80 | ✅ **Uses existing cached models (434 memories preserved)**
81 | ✅ **SQLite-vec backend still working**
82 |
83 | ## For Other Users
84 |
85 | Use the updated `examples/claude_desktop_config_windows.json` template and:
86 | 1. Replace `C:/REPOSITORIES/mcp-memory-service` with your path
87 | 2. Replace `YOUR_USERNAME` with your Windows username
88 | 3. Use `python` command with `scripts/memory_offline.py`
89 |
90 | The stubborn PyTorch download issue is now **completely resolved**! 🎉
```
--------------------------------------------------------------------------------
/src/mcp_memory_service/__init__.py:
--------------------------------------------------------------------------------
```python
1 | # Copyright 2024 Heinrich Krupp
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """MCP Memory Service initialization."""
16 |
17 | # CRITICAL: Set offline mode BEFORE any other imports to prevent model downloads
18 | import os
19 | import platform
20 |
21 | # Force offline mode for HuggingFace models - this MUST be done before any ML library imports
22 | def setup_offline_mode():
23 | """Setup offline mode environment variables to prevent model downloads."""
24 | # Set offline environment variables
25 | os.environ['HF_HUB_OFFLINE'] = '1'
26 | os.environ['TRANSFORMERS_OFFLINE'] = '1'
27 |
28 | # Configure cache paths
29 | username = os.environ.get('USERNAME', os.environ.get('USER', ''))
30 | if platform.system() == "Windows" and username:
31 | default_hf_home = f"C:\\Users\\{username}\\.cache\\huggingface"
32 | default_transformers_cache = f"C:\\Users\\{username}\\.cache\\huggingface\\transformers"
33 | default_sentence_transformers_home = f"C:\\Users\\{username}\\.cache\\torch\\sentence_transformers"
34 | else:
35 | default_hf_home = os.path.expanduser("~/.cache/huggingface")
36 | default_transformers_cache = os.path.expanduser("~/.cache/huggingface/transformers")
37 | default_sentence_transformers_home = os.path.expanduser("~/.cache/torch/sentence_transformers")
38 |
39 | # Set cache paths if not already set
40 | if 'HF_HOME' not in os.environ:
41 | os.environ['HF_HOME'] = default_hf_home
42 | if 'TRANSFORMERS_CACHE' not in os.environ:
43 | os.environ['TRANSFORMERS_CACHE'] = default_transformers_cache
44 | if 'SENTENCE_TRANSFORMERS_HOME' not in os.environ:
45 | os.environ['SENTENCE_TRANSFORMERS_HOME'] = default_sentence_transformers_home
46 |
47 | # Setup offline mode immediately when this module is imported
48 | setup_offline_mode()
49 |
50 | __version__ = "8.42.0"
51 |
52 | from .models import Memory, MemoryQueryResult
53 | from .storage import MemoryStorage
54 | from .utils import generate_content_hash
55 |
56 | # Conditional imports
57 | __all__ = [
58 | 'Memory',
59 | 'MemoryQueryResult',
60 | 'MemoryStorage',
61 | 'generate_content_hash'
62 | ]
63 |
64 | # Import storage backends conditionally
65 | try:
66 | from .storage import SqliteVecMemoryStorage
67 | __all__.append('SqliteVecMemoryStorage')
68 | except ImportError:
69 | SqliteVecMemoryStorage = None
70 |
71 |
```
--------------------------------------------------------------------------------
/.github/workflows/CACHE_FIX.md:
--------------------------------------------------------------------------------
```markdown
1 | # Python Cache Configuration Fix
2 |
3 | ## Issue Identified
4 | **Date**: 2024-08-24
5 | **Problem**: GitHub Actions workflows failing at Python setup step
6 |
7 | ### Root Cause
8 | The `setup-python` action was configured with `cache: 'pip'` but couldn't find a `requirements.txt` file. The project uses `pyproject.toml` for dependency management instead.
9 |
10 | ### Error Message
11 | ```
12 | Error: No file in /home/runner/work/mcp-memory-service/mcp-memory-service matched to [**/requirements.txt], make sure you have checked out the target repository
13 | ```
14 |
15 | ## Solution Applied
16 |
17 | Added `cache-dependency-path: '**/pyproject.toml'` to all Python setup steps that use pip caching.
18 |
19 | ### Files Modified
20 |
21 | #### 1. `.github/workflows/main-optimized.yml`
22 | Fixed 2 instances:
23 | - Line 34-39: Release job Python setup
24 | - Line 112-117: Test job Python setup
25 |
26 | #### 2. `.github/workflows/cleanup-images.yml`
27 | Fixed 1 instance:
28 | - Line 95-100: Docker Hub cleanup job Python setup
29 |
30 | ### Before
31 | ```yaml
32 | - name: Set up Python
33 | uses: actions/setup-python@v4
34 | with:
35 | python-version: '3.11'
36 | cache: 'pip'
37 | # ❌ Missing cache-dependency-path causes failure
38 | ```
39 |
40 | ### After
41 | ```yaml
42 | - name: Set up Python
43 | uses: actions/setup-python@v4
44 | with:
45 | python-version: '3.11'
46 | cache: 'pip'
47 | cache-dependency-path: '**/pyproject.toml'
48 | # ✅ Explicitly tells setup-python where to find dependencies
49 | ```
50 |
51 | ## Benefits
52 |
53 | 1. **Immediate Fix**: Workflows will no longer fail at Python setup step
54 | 2. **Performance**: Dependencies are properly cached, reducing workflow execution time
55 | 3. **Compatibility**: Works with modern Python projects using `pyproject.toml` (PEP 621)
56 |
57 | ## Testing
58 |
59 | All modified workflows have been validated:
60 | - ✅ `main-optimized.yml` - Valid YAML syntax
61 | - ✅ `cleanup-images.yml` - Valid YAML syntax
62 |
63 | ## Background
64 |
65 | The `setup-python` action defaults to looking for `requirements.txt` when using pip cache. Since this project uses `pyproject.toml` for dependency management (following modern Python packaging standards), we need to explicitly specify the dependency file path.
66 |
67 | This is a known issue in the setup-python action:
68 | - Issue #502: Cache pip dependencies from pyproject.toml file
69 | - Issue #529: Change pip default cache path to include pyproject.toml
70 |
71 | ## Next Steps
72 |
73 | After pushing these changes:
74 | 1. Workflows should complete successfully
75 | 2. Monitor the Python setup steps to confirm caching works
76 | 3. Check workflow execution time improvements from proper caching
77 |
78 | ## Alternative Solutions (Not Applied)
79 |
80 | 1. **Remove caching**: Simply remove `cache: 'pip'` line (would work but slower)
81 | 2. **Create requirements.txt**: Generate from pyproject.toml (adds maintenance burden)
82 | 3. **Use uv directly**: Since project uses uv for package management (more complex change)
83 |
84 | Date: 2024-08-24
85 | Status: Fixed and ready for deployment
```
--------------------------------------------------------------------------------
/scripts/pr/amp_suggest_fixes.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # scripts/pr/amp_suggest_fixes.sh - Generate fix suggestions using Amp CLI
3 | #
4 | # Usage: bash scripts/pr/amp_suggest_fixes.sh <PR_NUMBER>
5 | # Example: bash scripts/pr/amp_suggest_fixes.sh 215
6 |
7 | set -e
8 |
9 | PR_NUMBER=$1
10 |
11 | if [ -z "$PR_NUMBER" ]; then
12 | echo "Usage: $0 <PR_NUMBER>"
13 | exit 1
14 | fi
15 |
16 | if ! command -v gh &> /dev/null; then
17 | echo "Error: GitHub CLI (gh) is not installed"
18 | exit 1
19 | fi
20 |
21 | echo "=== Amp CLI Fix Suggestions for PR #$PR_NUMBER ==="
22 | echo ""
23 |
24 | # Ensure Amp directories exist
25 | mkdir -p .claude/amp/prompts/pending
26 | mkdir -p .claude/amp/responses/ready
27 |
28 | # Get repository
29 | REPO=$(gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null || echo "doobidoo/mcp-memory-service")
30 |
31 | # Fetch review comments
32 | echo "Fetching review comments from PR #$PR_NUMBER..."
33 | review_comments=$(gh api "repos/$REPO/pulls/$PR_NUMBER/comments" | \
34 | jq -r '[.[] | select(.user.login | test("bot|gemini|claude"))] | .[] | "- \(.path):\(.line) - \(.body[0:200])"' | \
35 | head -50)
36 |
37 | if [ -z "$review_comments" ]; then
38 | echo "No review comments found."
39 | exit 0
40 | fi
41 |
42 | echo "Review Comments:"
43 | echo "$review_comments"
44 | echo ""
45 |
46 | # Get PR diff
47 | echo "Fetching PR diff..."
48 | pr_diff=$(gh pr diff $PR_NUMBER | head -500) # Limit to 500 lines to avoid token overflow
49 |
50 | # Generate UUID for fix suggestions task
51 | fixes_uuid=$(uuidgen 2>/dev/null || cat /proc/sys/kernel/random/uuid)
52 |
53 | echo "Creating Amp prompt for fix suggestions..."
54 |
55 | # Create fix suggestions prompt
56 | cat > .claude/amp/prompts/pending/fixes-${fixes_uuid}.json << EOF
57 | {
58 | "id": "${fixes_uuid}",
59 | "timestamp": "$(date -u +"%Y-%m-%dT%H:%M:%S.000Z")",
60 | "prompt": "Analyze these code review comments and suggest specific fixes. DO NOT auto-apply changes. Output format: For each issue, provide: 1) File path, 2) Issue description, 3) Suggested fix (code snippet or explanation), 4) Rationale. Focus on safe, non-breaking changes (formatting, type hints, error handling, variable naming, import organization).\n\nReview comments:\n${review_comments}\n\nPR diff (current code):\n${pr_diff}\n\nProvide actionable fix suggestions in markdown format.",
61 | "context": {
62 | "project": "mcp-memory-service",
63 | "task": "fix-suggestions",
64 | "pr_number": "${PR_NUMBER}"
65 | },
66 | "options": {
67 | "timeout": 180000,
68 | "format": "markdown"
69 | }
70 | }
71 | EOF
72 |
73 | echo "✅ Created Amp prompt for fix suggestions"
74 | echo ""
75 | echo "=== Run this Amp command ==="
76 | echo "amp @.claude/amp/prompts/pending/fixes-${fixes_uuid}.json"
77 | echo ""
78 | echo "=== Then collect the suggestions ==="
79 | echo "bash scripts/pr/amp_collect_results.sh --timeout 180 --uuids '${fixes_uuid}'"
80 | echo ""
81 |
82 | # Save UUID for later collection
83 | echo "${fixes_uuid}" > /tmp/amp_fix_suggestions_uuid_${PR_NUMBER}.txt
84 |
85 | echo "UUID saved to /tmp/amp_fix_suggestions_uuid_${PR_NUMBER}.txt for result collection"
86 |
```
--------------------------------------------------------------------------------
/docs/mastery/troubleshooting.md:
--------------------------------------------------------------------------------
```markdown
1 | # MCP Memory Service — Troubleshooting Guide
2 |
3 | Common issues and proven fixes when running locally or in CI.
4 |
5 | ## sqlite-vec Extension Loading Fails
6 |
7 | Symptoms:
8 |
9 | - Errors like: `SQLite extension loading not supported` or `enable_load_extension not available`.
10 | - `Failed to load sqlite-vec extension`.
11 |
12 | Causes:
13 |
14 | - Python’s `sqlite3` not compiled with loadable extensions (macOS system Python is common culprit).
15 |
16 | Fixes:
17 |
18 | - macOS:
19 | - `brew install python` and use Homebrew Python.
20 | - Or install via pyenv with extensions: `PYTHON_CONFIGURE_OPTS='--enable-loadable-sqlite-extensions' pyenv install 3.12.x`.
21 | - Linux:
22 | - Install dev headers: `apt install python3-dev sqlite3` and ensure Python was built with `--enable-loadable-sqlite-extensions`.
23 | - Windows:
24 | - Prefer official python.org installer or conda distribution.
25 | - Alternative: switch backend: `export MCP_MEMORY_STORAGE_BACKEND=chromadb` (see migration notes).
26 |
27 | ## `sentence-transformers`/`torch` Not Available
28 |
29 | Symptoms:
30 |
31 | - Warnings about no embedding model; semantic search returns empty.
32 |
33 | Fixes:
34 |
35 | - Install ML deps: `pip install sentence-transformers torch` (or `uv add` equivalents).
36 | - For constrained environments, semantic search can still run once deps are installed; tag-based and metadata operations work without embeddings.
37 |
38 | ## First-Run Model Downloads
39 |
40 | Symptoms:
41 |
42 | - Warnings like: `Using TRANSFORMERS_CACHE is deprecated` or `No snapshots directory`.
43 |
44 | Status:
45 |
46 | - Expected on first run while downloading `all-MiniLM-L6-v2` (~25MB). Subsequent runs use cache.
47 |
48 | ## Cloudflare Backend Fails on Boot
49 |
50 | Symptoms:
51 |
52 | - Immediate exit with `Missing required environment variables for Cloudflare backend`.
53 |
54 | Fixes:
55 |
56 | - Set all required envs: `CLOUDFLARE_API_TOKEN`, `CLOUDFLARE_ACCOUNT_ID`, `CLOUDFLARE_VECTORIZE_INDEX`, `CLOUDFLARE_D1_DATABASE_ID`. Optional: `CLOUDFLARE_R2_BUCKET`.
57 | - Validate resources via Wrangler or dashboard; see `docs/cloudflare-setup.md`.
58 |
59 | ## Port/Coordination Conflicts
60 |
61 | Symptoms:
62 |
63 | - Multi-client mode cannot start HTTP server, or falls back to direct mode.
64 |
65 | Status/Fixes:
66 |
67 | - The server auto-detects: `http_client` (connect), `http_server` (start), else `direct` (WAL). If the coordination port is in use by another service, expect direct fallback; adjust port or stop the conflicting service.
68 |
69 | ## File Permission or Path Errors
70 |
71 | Symptoms:
72 |
73 | - Path write tests failing under `BASE_DIR` or backup directories.
74 |
75 | Fixes:
76 |
77 | - Ensure `MCP_MEMORY_BASE_DIR` points to a writable location; the service validates and creates directories and test-writes `.write_test` files with retries.
78 |
79 | ## Slow Queries or High CPU
80 |
81 | Checklist:
82 |
83 | - Ensure embeddings are available and model loaded once (warmup).
84 | - For low RAM or Windows CUDA:
85 | - `PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:128`
86 | - Reduce model cache sizes; see `configure_environment()` in `server.py`.
87 | - Tune SQLite pragmas via `MCP_MEMORY_SQLITE_PRAGMAS`.
88 |
89 |
```
--------------------------------------------------------------------------------
/scripts/server/check_http_server.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """
3 | Check if the MCP Memory Service HTTP server is running.
4 |
5 | This script checks if the HTTP server is accessible and provides
6 | helpful feedback to users about how to start it if it's not running.
7 | """
8 |
9 | import sys
10 | import os
11 | from urllib.request import urlopen, Request
12 | from urllib.error import URLError, HTTPError
13 | import json
14 | import ssl
15 |
16 |
17 | def check_http_server(verbose: bool = False) -> bool:
18 | """
19 | Check if the HTTP server is running.
20 |
21 | Args:
22 | verbose: If True, print detailed status messages
23 |
24 | Returns:
25 | bool: True if server is running, False otherwise
26 | """
27 | # Determine the endpoint from environment
28 | https_enabled = os.getenv('MCP_HTTPS_ENABLED', 'false').lower() == 'true'
29 | http_port = int(os.getenv('MCP_HTTP_PORT', '8000'))
30 | https_port = int(os.getenv('MCP_HTTPS_PORT', '8443'))
31 |
32 | if https_enabled:
33 | endpoint = f"https://localhost:{https_port}/api/health"
34 | else:
35 | endpoint = f"http://localhost:{http_port}/api/health"
36 |
37 | try:
38 | # Create SSL context that doesn't verify certificates (for self-signed certs)
39 | ctx = ssl.create_default_context()
40 | ctx.check_hostname = False
41 | ctx.verify_mode = ssl.CERT_NONE
42 |
43 | req = Request(endpoint)
44 | with urlopen(req, timeout=3, context=ctx) as response:
45 | if response.status == 200:
46 | data = json.loads(response.read().decode('utf-8'))
47 | if verbose:
48 | print("[OK] HTTP server is running")
49 | print(f" Version: {data.get('version', 'unknown')}")
50 | print(f" Endpoint: {endpoint}")
51 | print(f" Status: {data.get('status', 'unknown')}")
52 | return True
53 | else:
54 | if verbose:
55 | print(f"[WARN] HTTP server responded with status {response.status}")
56 | return False
57 | except (URLError, HTTPError, json.JSONDecodeError) as e:
58 | if verbose:
59 | print("[ERROR] HTTP server is NOT running")
60 | print(f"\nTo start the HTTP server, run:")
61 | print(f" uv run python scripts/server/run_http_server.py")
62 | print(f"\n Or for HTTPS:")
63 | print(f" MCP_HTTPS_ENABLED=true uv run python scripts/server/run_http_server.py")
64 | print(f"\nError: {str(e)}")
65 | return False
66 |
67 |
68 | def main():
69 | """Main entry point for CLI usage."""
70 | import argparse
71 |
72 | parser = argparse.ArgumentParser(
73 | description="Check if MCP Memory Service HTTP server is running"
74 | )
75 | parser.add_argument(
76 | "-q", "--quiet",
77 | action="store_true",
78 | help="Only return exit code (0=running, 1=not running), no output."
79 | )
80 |
81 | args = parser.parse_args()
82 |
83 | is_running = check_http_server(verbose=not args.quiet)
84 | sys.exit(0 if is_running else 1)
85 |
86 |
87 | if __name__ == "__main__":
88 | main()
89 |
```
--------------------------------------------------------------------------------
/claude_commands/memory-ingest.md:
--------------------------------------------------------------------------------
```markdown
1 | # memory-ingest
2 |
3 | Ingest a document file into the MCP Memory Service database.
4 |
5 | ## Usage
6 |
7 | ```
8 | claude /memory-ingest <file_path> [--tags TAG1,TAG2] [--chunk-size SIZE] [--chunk-overlap OVERLAP] [--memory-type TYPE]
9 | ```
10 |
11 | ## Parameters
12 |
13 | - `file_path`: Path to the document file to ingest (required)
14 | - `--tags`: Comma-separated list of tags to apply to all memories created from this document
15 | - `--chunk-size`: Target size for text chunks in characters (default: 1000)
16 | - `--chunk-overlap`: Characters to overlap between chunks (default: 200)
17 | - `--memory-type`: Type label for created memories (default: "document")
18 |
19 | ## Supported Formats
20 |
21 | - PDF files (.pdf)
22 | - Text files (.txt, .md, .markdown, .rst)
23 | - JSON files (.json)
24 |
25 | ## Implementation
26 |
27 | I need to upload the document to the MCP Memory Service HTTP API endpoint and monitor the progress.
28 |
29 | First, let me check if the service is running and get the correct endpoint:
30 |
31 | ```bash
32 | # Check if the service is running on default port
33 | curl -s http://localhost:8080/api/health || echo "Service not running on 8080"
34 |
35 | # Or check common alternative ports
36 | curl -s http://localhost:8443/api/health || echo "Service not running on 8443"
37 | ```
38 |
39 | Assuming the service is running (adjust the URL as needed), I'll upload the document:
40 |
41 | ```bash
42 | # Upload the document with specified parameters
43 | curl -X POST "http://localhost:8080/api/documents/upload" \\
44 | -F "file=@$FILE_PATH" \\
45 | -F "tags=$TAGS" \\
46 | -F "chunk_size=$CHUNK_SIZE" \\
47 | -F "chunk_overlap=$CHUNK_OVERLAP" \\
48 | -F "memory_type=$MEMORY_TYPE" \\
49 |
50 | ```
51 |
52 | Then I'll monitor the upload progress:
53 |
54 | ```bash
55 | # Monitor progress (replace UPLOAD_ID with the ID from the upload response)
56 | curl -s "http://localhost:8080/api/documents/status/UPLOAD_ID"
57 | ```
58 |
59 | ## Examples
60 |
61 | ```
62 | # Ingest a PDF with tags
63 | claude /memory-ingest manual.pdf --tags documentation,reference
64 |
65 | # Ingest a markdown file with custom chunking
66 | claude /memory-ingest README.md --chunk-size 1500 --chunk-overlap 300 --tags project,readme
67 |
68 | # Ingest a document as reference material
69 | claude /memory-ingest api-docs.json --tags api,reference --memory-type reference
70 | ```
71 |
72 | ## Actual Execution Steps
73 |
74 | When you run this command, I will:
75 |
76 | 1. **Validate the file exists** and check if it's a supported format
77 | 2. **Determine the service endpoint** (try localhost:8080, then 8443)
78 | 3. **Upload the file** using the documents API endpoint with your specified parameters
79 | 4. **Monitor progress** and show real-time updates
80 | 5. **Report results** including chunks created and any errors
81 |
82 | The document will be automatically parsed, chunked, and stored as searchable memories in your MCP Memory Service database.
83 |
84 | ## Notes
85 |
86 | - The document will be automatically parsed and chunked for optimal retrieval
87 | - Each chunk becomes a separate memory entry with semantic embeddings
88 | - Progress will be displayed during ingestion
89 | - Failed chunks will be reported but won't stop the overall process
90 |
```
--------------------------------------------------------------------------------
/archive/docs-removed-2025-08-23/development/mcp-milestone.md:
--------------------------------------------------------------------------------
```markdown
1 | # MCP Memory Service v4.0.0-beta.1 - Major Milestone Achievement
2 |
3 | **Date**: August 4, 2025
4 | **Status**: 🚀 **Mission Accomplished**
5 |
6 | ## Project Evolution Complete
7 |
8 | Successfully transitioned MCP Memory Service from experimental local-only service to **production-ready remote memory infrastructure** with native MCP protocol support.
9 |
10 | ## Technical Achievements
11 |
12 | ### 1. Release Management ✅
13 | - **v4.0.0-beta.1** beta release completed
14 | - Fixed Docker CI/CD workflows (main.yml and publish-and-test.yml)
15 | - GitHub Release created with comprehensive notes
16 | - Repository cleanup (3 obsolete branches removed)
17 |
18 | ### 2. GitHub Issues Resolved ✅
19 | - **Issue #71**: Remote Memory Service access - **FULLY RESOLVED** via FastAPI MCP integration
20 | - **Issue #72**: Node.js Bridge SSL issues - **SUPERSEDED** (bridge deprecated in favor of native protocol)
21 |
22 | ### 3. MCP Protocol Compliance ✅
23 | Applied critical refactorings from fellow AI Coder:
24 | - **Flexible ID Validation**: `Optional[Union[str, int]]` supporting both string and integer IDs
25 | - **Dual Route Handling**: Both `/mcp` and `/mcp/` endpoints to prevent 307 redirects
26 | - **Content Hash Generation**: Proper `generate_content_hash()` implementation
27 |
28 | ### 4. Infrastructure Deployment ✅
29 | - **Remote Server**: Successfully deployed at `your-server-ip:8000`
30 | - **Backend**: SQLite-vec (1.7MB database, 384-dimensional embeddings)
31 | - **Model**: all-MiniLM-L6-v2 loaded and operational
32 | - **Existing Data**: 65 memories already stored
33 | - **API Coverage**: Full MCP protocol + REST API + Dashboard
34 |
35 | ## Strategic Impact
36 |
37 | This represents the **successful completion of architectural evolution** from:
38 | - ❌ Local-only experimental service
39 | - ✅ Production-ready remote memory infrastructure
40 |
41 | **Key Benefits Achieved**:
42 | 1. **Cross-Device Access**: Claude Code can connect from any device
43 | 2. **Protocol Compliance**: Standard MCP JSON-RPC 2.0 implementation
44 | 3. **Scalable Architecture**: Dual-service design (HTTP + MCP)
45 | 4. **Robust CI/CD**: Automated testing and deployment pipeline
46 |
47 | ## Verification
48 |
49 | **MCP Protocol Test Results**:
50 | ```bash
51 | # Health check successful
52 | curl -X POST http://your-server-ip:8000/mcp \
53 | -H "Content-Type: application/json" \
54 | -d '{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"check_database_health"}}'
55 |
56 | # Response: {"status":"healthy","statistics":{"total_memories":65,"embedding_model":"all-MiniLM-L6-v2"}}
57 | ```
58 |
59 | **Available Endpoints**:
60 | - 🔧 **MCP Protocol**: `http://your-server-ip:8000/mcp`
61 | - 📊 **Dashboard**: `http://your-server-ip:8000/`
62 | - 📚 **API Docs**: `http://your-server-ip:8000/api/docs`
63 |
64 | ## Next Steps
65 |
66 | - Monitor beta feedback for v4.0.0 stable release
67 | - Continue remote memory service operation
68 | - Support Claude Code integrations across devices
69 |
70 | ---
71 |
72 | **This milestone marks the successful transformation of MCP Memory Service into a fully operational, remotely accessible, protocol-compliant memory infrastructure ready for production use.** 🎉
```
--------------------------------------------------------------------------------
/scripts/maintenance/memory-types.md:
--------------------------------------------------------------------------------
```markdown
1 | # Memory Type Taxonomy (Updated Nov 2025)
2 |
3 | Database consolidated from 342 fragmented types to 128 organized types. Use these **24 core types** for all new memories.
4 |
5 | ## Content Types
6 | - `note` - General notes, observations, summaries
7 | - `reference` - Reference materials, knowledge base entries
8 | - `document` - Formal documents, code snippets
9 | - `guide` - How-to guides, tutorials, troubleshooting guides
10 |
11 | ## Activity Types
12 | - `session` - Work sessions, development sessions
13 | - `implementation` - Implementation work, integrations
14 | - `analysis` - Analysis, reports, investigations
15 | - `troubleshooting` - Problem-solving, debugging
16 | - `test` - Testing activities, validation
17 |
18 | ## Artifact Types
19 | - `fix` - Bug fixes, corrections
20 | - `feature` - New features, enhancements
21 | - `release` - Releases, release notes
22 | - `deployment` - Deployments, deployment records
23 |
24 | ## Progress Types
25 | - `milestone` - Milestones, completions, achievements
26 | - `status` - Status updates, progress reports
27 |
28 | ## Infrastructure Types
29 | - `configuration` - Configurations, setups, settings
30 | - `infrastructure` - Infrastructure changes, system updates
31 | - `process` - Processes, workflows, procedures
32 | - `security` - Security-related memories
33 | - `architecture` - Architecture decisions, design patterns
34 |
35 | ## Other Types
36 | - `documentation` - Documentation artifacts
37 | - `solution` - Solutions, resolutions
38 | - `achievement` - Accomplishments, successes
39 |
40 | ## Usage Guidelines
41 |
42 | ### Avoid Creating New Type Variations
43 |
44 | **DON'T** create variations like:
45 | - `bug-fix`, `bugfix`, `technical-fix` → Use `fix`
46 | - `technical-solution`, `project-solution` → Use `solution`
47 | - `project-implementation` → Use `implementation`
48 | - `technical-note` → Use `note`
49 |
50 | ### Avoid Redundant Prefixes
51 |
52 | Remove unnecessary qualifiers:
53 | - `project-*` → Use base type
54 | - `technical-*` → Use base type
55 | - `development-*` → Use base type
56 |
57 | ### Cleanup Commands
58 |
59 | ```bash
60 | # Preview type consolidation
61 | python scripts/maintenance/consolidate_memory_types.py --dry-run
62 |
63 | # Execute type consolidation
64 | python scripts/maintenance/consolidate_memory_types.py
65 |
66 | # Check type distribution
67 | python scripts/maintenance/check_memory_types.py
68 |
69 | # Assign types to untyped memories
70 | python scripts/maintenance/assign_memory_types.py --dry-run
71 | python scripts/maintenance/assign_memory_types.py
72 | ```
73 |
74 | ## Consolidation Rules
75 |
76 | The consolidation script applies these transformations:
77 |
78 | 1. **Fix variants** → `fix`: bug-fix, bugfix, technical-fix, etc.
79 | 2. **Implementation variants** → `implementation`: integrations, project-implementation, etc.
80 | 3. **Solution variants** → `solution`: technical-solution, project-solution, etc.
81 | 4. **Note variants** → `note`: technical-note, development-note, etc.
82 | 5. **Remove redundant prefixes**: project-, technical-, development-
83 |
84 | ## Benefits of Standardization
85 |
86 | - Improved search and retrieval accuracy
87 | - Better tag-based filtering
88 | - Reduced database fragmentation
89 | - Easier memory type analytics
90 | - Consistent memory organization
91 |
```
--------------------------------------------------------------------------------
/tools/docker/test-docker-modes.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Test script to verify both Docker modes work correctly
3 |
4 | set -e
5 |
6 | echo "==================================="
7 | echo "Docker Setup Test Script"
8 | echo "==================================="
9 |
10 | # Colors for output
11 | RED='\033[0;31m'
12 | GREEN='\033[0;32m'
13 | YELLOW='\033[1;33m'
14 | NC='\033[0m' # No Color
15 |
16 | # Function to print colored output
17 | print_status() {
18 | if [ $1 -eq 0 ]; then
19 | echo -e "${GREEN}✓${NC} $2"
20 | else
21 | echo -e "${RED}✗${NC} $2"
22 | return 1
23 | fi
24 | }
25 |
26 | # Change to docker directory
27 | cd "$(dirname "$0")"
28 |
29 | echo ""
30 | echo "1. Building Docker image..."
31 | docker-compose build --quiet
32 | print_status $? "Docker image built successfully"
33 |
34 | echo ""
35 | echo "2. Testing MCP Protocol Mode..."
36 | echo " Starting container in MCP mode..."
37 | docker-compose up -d
38 | sleep 5
39 |
40 | # Check if container is running
41 | docker-compose ps | grep -q "Up"
42 | print_status $? "MCP mode container is running"
43 |
44 | # Check logs for correct mode
45 | docker-compose logs 2>&1 | grep -q "Running in mcp mode"
46 | print_status $? "Container started in MCP mode"
47 |
48 | # Stop MCP mode
49 | docker-compose down
50 | echo ""
51 |
52 | echo "3. Testing HTTP API Mode..."
53 | echo " Starting container in HTTP mode..."
54 | docker-compose -f docker-compose.http.yml up -d
55 | sleep 10
56 |
57 | # Check if container is running
58 | docker-compose -f docker-compose.http.yml ps | grep -q "Up"
59 | print_status $? "HTTP mode container is running"
60 |
61 | # Check logs for Uvicorn
62 | docker-compose -f docker-compose.http.yml logs 2>&1 | grep -q "Uvicorn\|FastAPI\|HTTP"
63 | print_status $? "HTTP server started (Uvicorn/FastAPI)"
64 |
65 | # Test health endpoint
66 | HTTP_RESPONSE=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8000/api/health 2>/dev/null || echo "000")
67 | if [ "$HTTP_RESPONSE" = "200" ]; then
68 | print_status 0 "Health endpoint responding (HTTP $HTTP_RESPONSE)"
69 | else
70 | print_status 1 "Health endpoint not responding (HTTP $HTTP_RESPONSE)"
71 | fi
72 |
73 | # Test with API key
74 | API_TEST=$(curl -s -X POST http://localhost:8000/api/memories \
75 | -H "Content-Type: application/json" \
76 | -H "Authorization: Bearer your-secure-api-key-here" \
77 | -d '{"content": "Docker test memory", "tags": ["test"]}' 2>/dev/null | grep -q "success\|unauthorized" && echo "ok" || echo "fail")
78 |
79 | if [ "$API_TEST" = "ok" ]; then
80 | print_status 0 "API endpoint accessible"
81 | else
82 | print_status 1 "API endpoint not accessible"
83 | fi
84 |
85 | # Stop HTTP mode
86 | docker-compose -f docker-compose.http.yml down
87 |
88 | echo ""
89 | echo "==================================="
90 | echo "Test Summary:"
91 | echo "==================================="
92 | echo -e "${GREEN}✓${NC} All critical fixes from Joe applied:"
93 | echo " - PYTHONPATH=/app/src"
94 | echo " - run_server.py copied"
95 | echo " - Embedding models pre-downloaded"
96 | echo ""
97 | echo -e "${GREEN}✓${NC} Simplified Docker structure:"
98 | echo " - Unified entrypoint for both modes"
99 | echo " - Clear MCP vs HTTP separation"
100 | echo " - Single Dockerfile for all modes"
101 | echo ""
102 | echo -e "${YELLOW}Note:${NC} Deprecated files marked in DEPRECATED.md"
103 | echo ""
104 | echo "Docker setup is ready for use!"
```
--------------------------------------------------------------------------------
/scripts/server/preload_models.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """
3 | Pre-load sentence-transformers models to avoid startup delays.
4 |
5 | This script downloads and caches the default embedding models used by
6 | MCP Memory Service so they don't need to be downloaded during server startup,
7 | which can cause timeout errors in Claude Desktop.
8 | """
9 |
10 | import sys
11 | import os
12 |
13 | def preload_sentence_transformers():
14 | """Pre-load the default sentence-transformers model."""
15 | try:
16 | print("[INFO] Pre-loading sentence-transformers models...")
17 | from sentence_transformers import SentenceTransformer
18 |
19 | # Default model used by the memory service
20 | model_name = "all-MiniLM-L6-v2"
21 | print(f"[INFO] Downloading and caching model: {model_name}")
22 |
23 | model = SentenceTransformer(model_name)
24 | print(f"[OK] Model loaded successfully on device: {model.device}")
25 |
26 | # Test the model to ensure it works
27 | print("[INFO] Testing model functionality...")
28 | test_text = "This is a test sentence for embedding."
29 | embedding = model.encode(test_text)
30 | print(f"[OK] Model test successful - embedding shape: {embedding.shape}")
31 |
32 | return True
33 |
34 | except ImportError:
35 | print("[WARNING] sentence-transformers not available - skipping model preload")
36 | return False
37 | except Exception as e:
38 | print(f"[ERROR] Error preloading model: {e}")
39 | return False
40 |
41 | def check_cache_status():
42 | """Check if models are already cached."""
43 | cache_locations = [
44 | os.path.expanduser("~/.cache/huggingface/hub"),
45 | os.path.expanduser("~/.cache/torch/sentence_transformers"),
46 | ]
47 |
48 | for cache_dir in cache_locations:
49 | if os.path.exists(cache_dir):
50 | try:
51 | contents = os.listdir(cache_dir)
52 | for item in contents:
53 | if 'sentence-transformers' in item.lower() or 'minilm' in item.lower():
54 | print(f"[OK] Found cached model: {item}")
55 | return True
56 | except (OSError, PermissionError):
57 | continue
58 |
59 | print("[INFO] No cached models found")
60 | return False
61 |
62 | def main():
63 | print("MCP Memory Service - Model Preloader")
64 | print("=" * 50)
65 |
66 | # Check current cache status
67 | print("\n[1] Checking cache status...")
68 | models_cached = check_cache_status()
69 |
70 | if models_cached:
71 | print("[OK] Models are already cached - no download needed")
72 | return True
73 |
74 | # Preload models
75 | print("\n[2] Preloading models...")
76 | success = preload_sentence_transformers()
77 |
78 | if success:
79 | print("\n[OK] Model preloading complete!")
80 | print("[INFO] MCP Memory Service should now start without downloading models")
81 | else:
82 | print("\n[WARNING] Model preloading failed - server may need to download during startup")
83 |
84 | return success
85 |
86 | if __name__ == "__main__":
87 | success = main()
88 | sys.exit(0 if success else 1)
```
--------------------------------------------------------------------------------
/docs/mastery/testing-guide.md:
--------------------------------------------------------------------------------
```markdown
1 | # MCP Memory Service — Testing Guide
2 |
3 | This guide explains how to run, understand, and extend the test suites.
4 |
5 | ## Prerequisites
6 |
7 | - Python ≥ 3.10 (3.12 recommended; 3.13 may lack prebuilt `sqlite-vec` wheels).
8 | - Install dependencies (uv recommended):
9 | - `uv sync` (respects `pyproject.toml` and `uv.lock`), or
10 | - `pip install -e .` plus extras as needed.
11 | - For SQLite-vec tests:
12 | - `sqlite-vec` and `sentence-transformers`/`torch` should be installed.
13 | - On some OS/Python combinations, sqlite extension loading must be supported (see Troubleshooting).
14 |
15 | ## Test Layout
16 |
17 | - `tests/README.md`: overview.
18 | - Categories:
19 | - Unit: `tests/unit/` (e.g., tags, mdns, cloudflare stubs).
20 | - Integration: `tests/integration/` (cross-component flows).
21 | - Performance: `tests/performance/`.
22 | - Backend-specific: top-level tests like `test_sqlite_vec_storage.py`, `test_time_parser.py`, `test_memory_ops.py`.
23 |
24 | ## Running Tests
25 |
26 | Run all:
27 |
28 | ```
29 | pytest
30 | ```
31 |
32 | Category:
33 |
34 | ```
35 | pytest tests/unit/
36 | pytest tests/integration/
37 | pytest tests/performance/
38 | ```
39 |
40 | Single file or test:
41 |
42 | ```
43 | pytest tests/test_sqlite_vec_storage.py::TestSqliteVecStorage::test_store_memory -q
44 | ```
45 |
46 | With uv:
47 |
48 | ```
49 | uv run pytest -q
50 | ```
51 |
52 | ## Important Behaviors and Skips
53 |
54 | - SQLite-vec tests are marked to skip when `sqlite-vec` is unavailable:
55 | - See `pytestmark = pytest.mark.skipif(not SQLITE_VEC_AVAILABLE, ...)` in `tests/test_sqlite_vec_storage.py`.
56 | - Some tests simulate no-embedding scenarios by patching `SENTENCE_TRANSFORMERS_AVAILABLE=False` to validate fallback code paths.
57 | - Temp directories isolate database files; connections are closed in teardown.
58 |
59 | ## Coverage of Key Areas
60 |
61 | - Storage CRUD and vector search (`test_sqlite_vec_storage.py`).
62 | - Time parsing and timestamp recall (`test_time_parser.py`, `test_timestamp_recall.py`).
63 | - Tag and metadata semantics (`test_tag_storage.py`, `unit/test_tags.py`).
64 | - Health checks and database init (`test_database.py`).
65 | - Cloudflare adapters have unit-level coverage stubbing network (`unit/test_cloudflare_storage.py`).
66 |
67 | ## Writing New Tests
68 |
69 | - Prefer async `pytest.mark.asyncio` for storage APIs.
70 | - Use `tempfile.mkdtemp()` for per-test DB paths.
71 | - Use `src.mcp_memory_service.models.memory.Memory` and `generate_content_hash` helpers.
72 | - For backend-specific behavior, keep tests colocated with backend tests and gate with availability flags.
73 | - For MCP tool surface tests, prefer FastMCP server (`mcp_server.py`) in isolated processes or with `lifespan` context.
74 |
75 | ## Local MCP/Service Tests
76 |
77 | Run stdio server:
78 |
79 | ```
80 | uv run memory server
81 | ```
82 |
83 | Run FastMCP HTTP server:
84 |
85 | ```
86 | uv run mcp-memory-server
87 | ```
88 |
89 | Use any MCP client (Claude Desktop/Code) and exercise tools: store, retrieve, search_by_tag, delete, health.
90 |
91 | ## Debugging and Logs
92 |
93 | - Set `LOG_LEVEL=INFO` for more verbosity.
94 | - For Claude Desktop: stdout is suppressed to preserve JSON; inspect stderr/warnings. LM Studio prints diagnostics to stdout.
95 | - Common sqlite-vec errors print actionable remediation (see Troubleshooting).
96 |
97 |
```
--------------------------------------------------------------------------------
/scripts/service/install_http_service.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 | # Install MCP Memory HTTP Service for systemd
3 |
4 | set -e
5 |
6 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
7 | PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)"
8 | SERVICE_FILE="$SCRIPT_DIR/mcp-memory-http.service"
9 | SERVICE_NAME="mcp-memory-http.service"
10 |
11 | echo "MCP Memory HTTP Service Installation"
12 | echo "===================================="
13 | echo ""
14 |
15 | # Check if service file exists
16 | if [ ! -f "$SERVICE_FILE" ]; then
17 | echo "❌ Service file not found: $SERVICE_FILE"
18 | exit 1
19 | fi
20 |
21 | # Check if .env exists
22 | if [ ! -f "$PROJECT_DIR/.env" ]; then
23 | echo "❌ .env file not found: $PROJECT_DIR/.env"
24 | echo "Please create .env file with your configuration"
25 | exit 1
26 | fi
27 |
28 | # Check if venv exists
29 | if [ ! -d "$PROJECT_DIR/venv" ]; then
30 | echo "❌ Virtual environment not found: $PROJECT_DIR/venv"
31 | echo "Please run: python -m venv venv && source venv/bin/activate && pip install -e ."
32 | exit 1
33 | fi
34 |
35 | # Install as user service (recommended) or system service
36 | echo "Installation Options:"
37 | echo "1. User service (recommended) - runs as your user, no sudo needed"
38 | echo "2. System service - runs at boot, requires sudo"
39 | read -p "Select [1/2]: " choice
40 |
41 | case $choice in
42 | 1)
43 | # User service
44 | SERVICE_DIR="$HOME/.config/systemd/user"
45 | mkdir -p "$SERVICE_DIR"
46 |
47 | echo "Installing user service to: $SERVICE_DIR/$SERVICE_NAME"
48 | cp "$SERVICE_FILE" "$SERVICE_DIR/$SERVICE_NAME"
49 |
50 | # Reload systemd
51 | systemctl --user daemon-reload
52 |
53 | echo ""
54 | echo "✅ Service installed successfully!"
55 | echo ""
56 | echo "To start the service:"
57 | echo " systemctl --user start $SERVICE_NAME"
58 | echo ""
59 | echo "To enable auto-start on login:"
60 | echo " systemctl --user enable $SERVICE_NAME"
61 | echo " loginctl enable-linger $USER # Required for auto-start"
62 | echo ""
63 | echo "To check status:"
64 | echo " systemctl --user status $SERVICE_NAME"
65 | echo ""
66 | echo "To view logs:"
67 | echo " journalctl --user -u $SERVICE_NAME -f"
68 | ;;
69 |
70 | 2)
71 | # System service
72 | if [ "$EUID" -ne 0 ]; then
73 | echo "❌ System service installation requires sudo"
74 | echo "Please run: sudo $0"
75 | exit 1
76 | fi
77 |
78 | SERVICE_DIR="/etc/systemd/system"
79 | echo "Installing system service to: $SERVICE_DIR/$SERVICE_NAME"
80 | cp "$SERVICE_FILE" "$SERVICE_DIR/$SERVICE_NAME"
81 |
82 | # Reload systemd
83 | systemctl daemon-reload
84 |
85 | echo ""
86 | echo "✅ Service installed successfully!"
87 | echo ""
88 | echo "To start the service:"
89 | echo " sudo systemctl start $SERVICE_NAME"
90 | echo ""
91 | echo "To enable auto-start on boot:"
92 | echo " sudo systemctl enable $SERVICE_NAME"
93 | echo ""
94 | echo "To check status:"
95 | echo " sudo systemctl status $SERVICE_NAME"
96 | echo ""
97 | echo "To view logs:"
98 | echo " sudo journalctl -u $SERVICE_NAME -f"
99 | ;;
100 |
101 | *)
102 | echo "❌ Invalid choice"
103 | exit 1
104 | ;;
105 | esac
106 |
```
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
```markdown
1 | # Pull Request
2 |
3 | ## Description
4 |
5 | <!-- Provide a clear and concise description of the changes -->
6 |
7 | ## Motivation
8 |
9 | <!-- Explain why these changes are needed and what problem they solve -->
10 |
11 | ## Type of Change
12 |
13 | <!-- Check all that apply -->
14 |
15 | - [ ] 🐛 Bug fix (non-breaking change that fixes an issue)
16 | - [ ] ✨ New feature (non-breaking change that adds functionality)
17 | - [ ] 💥 Breaking change (fix or feature that would cause existing functionality to not work as expected)
18 | - [ ] 📝 Documentation update
19 | - [ ] 🧪 Test improvement
20 | - [ ] ♻️ Code refactoring (no functional changes)
21 | - [ ] ⚡ Performance improvement
22 | - [ ] 🔧 Configuration change
23 | - [ ] 🎨 UI/UX improvement
24 |
25 | ## Changes
26 |
27 | <!-- List the specific changes made in this PR -->
28 |
29 | -
30 | -
31 | -
32 |
33 | **Breaking Changes** (if any):
34 | <!-- Describe any breaking changes and migration steps -->
35 |
36 | -
37 |
38 | ## Testing
39 |
40 | ### How Has This Been Tested?
41 |
42 | <!-- Describe the tests you ran to verify your changes -->
43 |
44 | - [ ] Unit tests
45 | - [ ] Integration tests
46 | - [ ] Manual testing
47 | - [ ] MCP Inspector validation
48 |
49 | **Test Configuration**:
50 | - Python version:
51 | - OS:
52 | - Storage backend:
53 | - Installation method:
54 |
55 | ### Test Coverage
56 |
57 | <!-- Describe the test coverage added or modified -->
58 |
59 | - [ ] Added new tests
60 | - [ ] Updated existing tests
61 | - [ ] Test coverage maintained/improved
62 |
63 | ## Related Issues
64 |
65 | <!-- Link related issues using keywords: Fixes #123, Closes #456, Relates to #789 -->
66 |
67 | Fixes #
68 | Closes #
69 | Relates to #
70 |
71 | ## Screenshots
72 |
73 | <!-- If applicable, add screenshots to help explain your changes -->
74 |
75 | ## Documentation
76 |
77 | <!-- Check all that apply -->
78 |
79 | - [ ] Updated README.md
80 | - [ ] Updated CLAUDE.md
81 | - [ ] Updated AGENTS.md
82 | - [ ] Updated CHANGELOG.md
83 | - [ ] Updated Wiki pages
84 | - [ ] Updated code comments/docstrings
85 | - [ ] Added API documentation
86 | - [ ] No documentation needed
87 |
88 | ## Pre-submission Checklist
89 |
90 | <!-- Check all boxes before submitting -->
91 |
92 | - [ ] ✅ My code follows the project's coding standards (PEP 8, type hints)
93 | - [ ] ✅ I have performed a self-review of my code
94 | - [ ] ✅ I have commented my code, particularly in hard-to-understand areas
95 | - [ ] ✅ I have made corresponding changes to the documentation
96 | - [ ] ✅ My changes generate no new warnings
97 | - [ ] ✅ I have added tests that prove my fix is effective or that my feature works
98 | - [ ] ✅ New and existing unit tests pass locally with my changes
99 | - [ ] ✅ Any dependent changes have been merged and published
100 | - [ ] ✅ I have updated CHANGELOG.md following [Keep a Changelog](https://keepachangelog.com/) format
101 | - [ ] ✅ I have checked that no sensitive data is exposed (API keys, tokens, passwords)
102 | - [ ] ✅ I have verified this works with all supported storage backends (if applicable)
103 |
104 | ## Additional Notes
105 |
106 | <!-- Any additional information, context, or notes for reviewers -->
107 |
108 | ---
109 |
110 | **For Reviewers**:
111 | - Review checklist: See [PR Review Guide](https://github.com/doobidoo/mcp-memory-service/wiki/PR-Review-Guide)
112 | - Consider testing with Gemini Code Assist for comprehensive review
113 | - Verify CHANGELOG.md entry is present and correctly formatted
114 | - Check documentation accuracy and completeness
115 |
```
--------------------------------------------------------------------------------
/.github/workflows/WORKFLOW_FIXES.md:
--------------------------------------------------------------------------------
```markdown
1 | # Workflow Fixes Applied
2 |
3 | ## Issues Identified and Fixed
4 |
5 | ### 1. Cleanup Images Workflow (`cleanup-images.yml`)
6 |
7 | **Issues:**
8 | - Referenced non-existent workflows in `workflow_run` trigger
9 | - Used incorrect action versions (`@v5` instead of `@v4`)
10 | - Incorrect account type (`org` instead of `personal`)
11 | - Missing error handling for optional steps
12 | - No validation for Docker Hub credentials
13 |
14 | **Fixes Applied:**
15 | - Updated workflow references to match actual workflow names
16 | - Downgraded action versions to stable versions (`@v4`, `@v1`)
17 | - Changed account type to `personal` for personal GitHub account
18 | - Added `continue-on-error: true` for optional cleanup steps
19 | - Added credential validation and conditional Docker Hub cleanup
20 | - Added informative messages when cleanup is skipped
21 |
22 | ### 2. Main Optimized Workflow (`main-optimized.yml`)
23 |
24 | **Issues:**
25 | - Complex matrix strategy with indirect secret access
26 | - No handling for missing Docker Hub credentials
27 | - Potential authentication failures for Docker registries
28 |
29 | **Fixes Applied:**
30 | - Simplified login steps with explicit registry conditions
31 | - Added conditional Docker Hub login based on credential availability
32 | - Added skip message when Docker Hub credentials are missing
33 | - Improved error handling for registry authentication
34 |
35 | ## Changes Made
36 |
37 | ### cleanup-images.yml
38 | ```yaml
39 | # Before
40 | workflow_run:
41 | workflows: ["Release (Tags) - Optimized", "Main CI/CD Pipeline - Optimized"]
42 |
43 | uses: actions/delete-package-versions@v5
44 | account-type: org
45 |
46 | # After
47 | workflow_run:
48 | workflows: ["Main CI/CD Pipeline", "Docker Publish (Tags)", "Publish and Test (Tags)"]
49 |
50 | uses: actions/delete-package-versions@v4
51 | account-type: personal
52 | continue-on-error: true
53 | ```
54 |
55 | ### main-optimized.yml
56 | ```yaml
57 | # Before
58 | username: ${{ matrix.username_secret == '_github_actor' && github.actor || secrets[matrix.username_secret] }}
59 |
60 | # After
61 | - name: Log in to Docker Hub
62 | if: matrix.registry == 'docker.io' && secrets.DOCKER_USERNAME && secrets.DOCKER_PASSWORD
63 | - name: Log in to GitHub Container Registry
64 | if: matrix.registry == 'ghcr.io'
65 | ```
66 |
67 | ## Safety Improvements
68 |
69 | 1. **Graceful Degradation**: Workflows now continue even if optional steps fail
70 | 2. **Credential Validation**: Proper checking for required secrets before use
71 | 3. **Clear Messaging**: Users are informed when steps are skipped
72 | 4. **Error Isolation**: Failures in one cleanup job don't affect others
73 |
74 | ## Testing Recommendations
75 |
76 | 1. **Manual Trigger Test**: Test cleanup workflow with dry-run mode
77 | 2. **Credential Scenarios**: Test with and without Docker Hub credentials
78 | 3. **Registry Cleanup**: Verify GHCR cleanup works independently
79 | 4. **Workflow Dependencies**: Ensure workflow triggers work correctly
80 |
81 | ## Expected Behavior
82 |
83 | - **With Full Credentials**: Both GHCR and Docker Hub cleanup run
84 | - **Without Docker Credentials**: Only GHCR cleanup runs, Docker Hub skipped
85 | - **Action Failures**: Individual cleanup steps may fail but workflow continues
86 | - **No Images to Clean**: Workflows complete successfully with no actions
87 |
88 | Date: 2024-08-24
89 | Status: Applied and ready for testing
```
--------------------------------------------------------------------------------
/tests/test_semantic_search.py:
--------------------------------------------------------------------------------
```python
1 | """
2 | MCP Memory Service
3 | Copyright (c) 2024 Heinrich Krupp
4 | Licensed under the MIT License. See LICENSE file in the project root for full license text.
5 | """
6 | """
7 | Test semantic search functionality of the MCP Memory Service.
8 | """
9 | import pytest
10 | import pytest_asyncio
11 | import asyncio
12 | from mcp.server import Server
13 | from mcp.server.models import InitializationOptions
14 |
15 | @pytest_asyncio.fixture
16 | async def memory_server():
17 | """Create a test instance of the memory server."""
18 | server = Server("test-memory")
19 | await server.initialize(InitializationOptions(
20 | server_name="test-memory",
21 | server_version="0.1.0"
22 | ))
23 | yield server
24 | await server.shutdown()
25 |
26 | @pytest.mark.asyncio
27 | async def test_semantic_similarity(memory_server):
28 | """Test semantic similarity scoring."""
29 | # Store related memories
30 | memories = [
31 | "The quick brown fox jumps over the lazy dog",
32 | "A fast auburn fox leaps above a sleepy canine",
33 | "A cat chases a mouse"
34 | ]
35 |
36 | for memory in memories:
37 | await memory_server.store_memory(content=memory)
38 |
39 | # Test semantic retrieval
40 | query = "swift red fox jumping over sleeping dog"
41 | results = await memory_server.debug_retrieve(
42 | query=query,
43 | n_results=2,
44 | similarity_threshold=0.0 # Get all results with scores
45 | )
46 |
47 | # First two results should be the fox-related memories
48 | assert len(results) >= 2
49 | assert all("fox" in result for result in results[:2])
50 |
51 | @pytest.mark.asyncio
52 | async def test_similarity_threshold(memory_server):
53 | """Test similarity threshold filtering."""
54 | await memory_server.store_memory(
55 | content="Python is a programming language"
56 | )
57 |
58 | # This query is semantically unrelated
59 | results = await memory_server.debug_retrieve(
60 | query="Recipe for chocolate cake",
61 | similarity_threshold=0.8
62 | )
63 |
64 | assert len(results) == 0 # No results above threshold
65 |
66 | @pytest.mark.asyncio
67 | async def test_exact_match(memory_server):
68 | """Test exact match retrieval."""
69 | test_content = "This is an exact match test"
70 | await memory_server.store_memory(content=test_content)
71 |
72 | results = await memory_server.exact_match_retrieve(
73 | content=test_content
74 | )
75 |
76 | assert len(results) == 1
77 | assert results[0] == test_content
78 |
79 | @pytest.mark.asyncio
80 | async def test_semantic_ordering(memory_server):
81 | """Test that results are ordered by semantic similarity."""
82 | # Store memories with varying relevance
83 | memories = [
84 | "Machine learning is a subset of artificial intelligence",
85 | "Deep learning uses neural networks",
86 | "A bicycle has two wheels"
87 | ]
88 |
89 | for memory in memories:
90 | await memory_server.store_memory(content=memory)
91 |
92 | query = "What is AI and machine learning?"
93 | results = await memory_server.debug_retrieve(
94 | query=query,
95 | n_results=3,
96 | similarity_threshold=0.0
97 | )
98 |
99 | # Check ordering
100 | assert "machine learning" in results[0].lower()
101 | assert "bicycle" not in results[0].lower()
```
--------------------------------------------------------------------------------
/scripts/sync/claude_sync_commands.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | # Copyright 2024 Heinrich Krupp
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | """
17 | Claude command wrapper for memory sync operations.
18 | Provides convenient commands for managing dual memory backends.
19 | """
20 | import sys
21 | import asyncio
22 | import subprocess
23 | from pathlib import Path
24 |
25 | SYNC_SCRIPT = Path(__file__).parent / "sync_memory_backends.py"
26 |
27 | def run_sync_command(args):
28 | """Run the sync script with given arguments."""
29 | cmd = [sys.executable, str(SYNC_SCRIPT)] + args
30 | result = subprocess.run(cmd, capture_output=True, text=True)
31 |
32 | if result.stdout:
33 | print(result.stdout.strip())
34 | if result.stderr:
35 | print(result.stderr.strip(), file=sys.stderr)
36 |
37 | return result.returncode
38 |
39 | def memory_sync_status():
40 | """Show memory sync status."""
41 | return run_sync_command(['--status'])
42 |
43 | def memory_sync_backup():
44 | """Backup Cloudflare memories to SQLite-vec."""
45 | print("Backing up Cloudflare memories to SQLite-vec...")
46 | return run_sync_command(['--direction', 'cf-to-sqlite'])
47 |
48 | def memory_sync_restore():
49 | """Restore SQLite-vec memories to Cloudflare."""
50 | print("Restoring SQLite-vec memories to Cloudflare...")
51 | return run_sync_command(['--direction', 'sqlite-to-cf'])
52 |
53 | def memory_sync_bidirectional():
54 | """Perform bidirectional sync."""
55 | print("Performing bidirectional sync...")
56 | return run_sync_command(['--direction', 'bidirectional'])
57 |
58 | def memory_sync_dry_run():
59 | """Show what would be synced without making changes."""
60 | print("Dry run - showing what would be synced:")
61 | return run_sync_command(['--dry-run'])
62 |
63 | def show_usage():
64 | """Show usage information."""
65 | print("Usage: python claude_sync_commands.py <command>")
66 | print("Commands:")
67 | print(" status - Show sync status")
68 | print(" backup - Backup Cloudflare → SQLite-vec")
69 | print(" restore - Restore SQLite-vec → Cloudflare")
70 | print(" sync - Bidirectional sync")
71 | print(" dry-run - Show what would be synced")
72 |
73 | if __name__ == "__main__":
74 | # Dictionary-based command dispatch for better scalability
75 | commands = {
76 | "status": memory_sync_status,
77 | "backup": memory_sync_backup,
78 | "restore": memory_sync_restore,
79 | "sync": memory_sync_bidirectional,
80 | "dry-run": memory_sync_dry_run,
81 | }
82 |
83 | if len(sys.argv) < 2:
84 | show_usage()
85 | sys.exit(1)
86 |
87 | command = sys.argv[1]
88 |
89 | if command in commands:
90 | sys.exit(commands[command]())
91 | else:
92 | print(f"Unknown command: {command}")
93 | show_usage()
94 | sys.exit(1)
```
--------------------------------------------------------------------------------
/scripts/utils/memory_wrapper_uv.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """
3 | UV-specific memory wrapper for MCP Memory Service
4 | This wrapper is specifically designed for UV-based installations.
5 | """
6 | import os
7 | import sys
8 | import subprocess
9 | import traceback
10 |
11 | # Set environment variables for better cross-platform compatibility
12 | os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = "1"
13 | os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "max_split_size_mb:128"
14 |
15 | def print_info(text):
16 | """Print formatted info text."""
17 | print(f"[INFO] {text}", file=sys.stderr, flush=True)
18 |
19 | def print_error(text):
20 | """Print formatted error text."""
21 | print(f"[ERROR] {text}", file=sys.stderr, flush=True)
22 |
23 | def print_success(text):
24 | """Print formatted success text."""
25 | print(f"[SUCCESS] {text}", file=sys.stderr, flush=True)
26 |
27 | def main():
28 | """Main entry point for UV-based memory service."""
29 | print_info("Starting MCP Memory Service with UV...")
30 |
31 | # Set ChromaDB path if provided via environment variables
32 | if "MCP_MEMORY_CHROMA_PATH" in os.environ:
33 | print_info(f"Using ChromaDB path: {os.environ['MCP_MEMORY_CHROMA_PATH']}")
34 |
35 | # Set backups path if provided via environment variables
36 | if "MCP_MEMORY_BACKUPS_PATH" in os.environ:
37 | print_info(f"Using backups path: {os.environ['MCP_MEMORY_BACKUPS_PATH']}")
38 |
39 | try:
40 | # Use UV to run the memory service
41 | cmd = [sys.executable, '-m', 'uv', 'run', 'memory']
42 | cmd.extend(sys.argv[1:]) # Pass through any additional arguments
43 |
44 | print_info(f"Running command: {' '.join(cmd)}")
45 |
46 | # Execute the command
47 | result = subprocess.run(cmd, check=True)
48 | sys.exit(result.returncode)
49 |
50 | except subprocess.SubprocessError as e:
51 | print_error(f"UV run failed: {e}")
52 | print_info("Falling back to direct module execution...")
53 |
54 | # Fallback to direct execution
55 | try:
56 | # Add the source directory to path
57 | script_dir = os.path.dirname(os.path.abspath(__file__))
58 | src_dir = os.path.join(script_dir, "src")
59 |
60 | if os.path.exists(src_dir):
61 | sys.path.insert(0, src_dir)
62 |
63 | # Import and run the server
64 | from mcp_memory_service.server import main as server_main
65 | server_main()
66 |
67 | except ImportError as import_error:
68 | print_error(f"Failed to import memory service: {import_error}")
69 | sys.exit(1)
70 | except Exception as fallback_error:
71 | print_error(f"Fallback execution failed: {fallback_error}")
72 | traceback.print_exc(file=sys.stderr)
73 | sys.exit(1)
74 |
75 | except Exception as e:
76 | print_error(f"Error running memory service: {e}")
77 | traceback.print_exc(file=sys.stderr)
78 | sys.exit(1)
79 |
80 | if __name__ == "__main__":
81 | try:
82 | main()
83 | except KeyboardInterrupt:
84 | print_info("Shutting down gracefully...")
85 | sys.exit(0)
86 | except Exception as e:
87 | print_error(f"Unhandled exception: {e}")
88 | traceback.print_exc(file=sys.stderr)
89 | sys.exit(1)
90 |
```