This is page 59 of 62. Use http://codebase.md/doobidoo/mcp-memory-service?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .claude
│ ├── agents
│ │ ├── amp-bridge.md
│ │ ├── amp-pr-automator.md
│ │ ├── code-quality-guard.md
│ │ ├── gemini-pr-automator.md
│ │ └── github-release-manager.md
│ ├── commands
│ │ ├── README.md
│ │ ├── refactor-function
│ │ ├── refactor-function-prod
│ │ └── refactor-function.md
│ ├── consolidation-fix-handoff.md
│ ├── consolidation-hang-fix-summary.md
│ ├── directives
│ │ ├── agents.md
│ │ ├── code-quality-workflow.md
│ │ ├── consolidation-details.md
│ │ ├── development-setup.md
│ │ ├── hooks-configuration.md
│ │ ├── memory-first.md
│ │ ├── memory-tagging.md
│ │ ├── pr-workflow.md
│ │ ├── quality-system-details.md
│ │ ├── README.md
│ │ ├── refactoring-checklist.md
│ │ ├── storage-backends.md
│ │ └── version-management.md
│ ├── prompts
│ │ └── hybrid-cleanup-integration.md
│ ├── settings.local.json.backup
│ └── settings.local.json.local
├── .commit-message
├── .coveragerc
├── .dockerignore
├── .env.example
├── .env.sqlite.backup
├── .envnn#
├── .gitattributes
├── .github
│ ├── FUNDING.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.yml
│ │ ├── config.yml
│ │ ├── feature_request.yml
│ │ └── performance_issue.yml
│ ├── pull_request_template.md
│ └── workflows
│ ├── bridge-tests.yml
│ ├── CACHE_FIX.md
│ ├── claude-branch-automation.yml
│ ├── claude-code-review.yml
│ ├── claude.yml
│ ├── cleanup-images.yml.disabled
│ ├── dev-setup-validation.yml
│ ├── docker-publish.yml
│ ├── dockerfile-lint.yml
│ ├── LATEST_FIXES.md
│ ├── main-optimized.yml.disabled
│ ├── main.yml
│ ├── publish-and-test.yml
│ ├── publish-dual.yml
│ ├── README_OPTIMIZATION.md
│ ├── release-tag.yml.disabled
│ ├── release.yml
│ ├── roadmap-review-reminder.yml
│ ├── SECRET_CONDITIONAL_FIX.md
│ └── WORKFLOW_FIXES.md
├── .gitignore
├── .mcp.json.backup
├── .mcp.json.template
├── .metrics
│ ├── baseline_cc_install_hooks.txt
│ ├── baseline_mi_install_hooks.txt
│ ├── baseline_nesting_install_hooks.txt
│ ├── BASELINE_REPORT.md
│ ├── COMPLEXITY_COMPARISON.txt
│ ├── QUICK_REFERENCE.txt
│ ├── README.md
│ ├── REFACTORED_BASELINE.md
│ ├── REFACTORING_COMPLETION_REPORT.md
│ └── TRACKING_TABLE.md
├── .pyscn
│ ├── .gitignore
│ └── reports
│ └── analyze_20251123_214224.html
├── AGENTS.md
├── ai-optimized-tool-descriptions.py
├── archive
│ ├── deployment
│ │ ├── deploy_fastmcp_fixed.sh
│ │ ├── deploy_http_with_mcp.sh
│ │ └── deploy_mcp_v4.sh
│ ├── deployment-configs
│ │ ├── empty_config.yml
│ │ └── smithery.yaml
│ ├── development
│ │ └── test_fastmcp.py
│ ├── docs-removed-2025-08-23
│ │ ├── authentication.md
│ │ ├── claude_integration.md
│ │ ├── claude-code-compatibility.md
│ │ ├── claude-code-integration.md
│ │ ├── claude-code-quickstart.md
│ │ ├── claude-desktop-setup.md
│ │ ├── complete-setup-guide.md
│ │ ├── database-synchronization.md
│ │ ├── development
│ │ │ ├── autonomous-memory-consolidation.md
│ │ │ ├── CLEANUP_PLAN.md
│ │ │ ├── CLEANUP_README.md
│ │ │ ├── CLEANUP_SUMMARY.md
│ │ │ ├── dream-inspired-memory-consolidation.md
│ │ │ ├── hybrid-slm-memory-consolidation.md
│ │ │ ├── mcp-milestone.md
│ │ │ ├── multi-client-architecture.md
│ │ │ ├── test-results.md
│ │ │ └── TIMESTAMP_FIX_SUMMARY.md
│ │ ├── distributed-sync.md
│ │ ├── invocation_guide.md
│ │ ├── macos-intel.md
│ │ ├── master-guide.md
│ │ ├── mcp-client-configuration.md
│ │ ├── multi-client-server.md
│ │ ├── service-installation.md
│ │ ├── sessions
│ │ │ └── MCP_ENHANCEMENT_SESSION_MEMORY_v4.1.0.md
│ │ ├── UBUNTU_SETUP.md
│ │ ├── ubuntu.md
│ │ ├── windows-setup.md
│ │ └── windows.md
│ ├── docs-root-cleanup-2025-08-23
│ │ ├── AWESOME_LIST_SUBMISSION.md
│ │ ├── CLOUDFLARE_IMPLEMENTATION.md
│ │ ├── DOCUMENTATION_ANALYSIS.md
│ │ ├── DOCUMENTATION_CLEANUP_PLAN.md
│ │ ├── DOCUMENTATION_CONSOLIDATION_COMPLETE.md
│ │ ├── LITESTREAM_SETUP_GUIDE.md
│ │ ├── lm_studio_system_prompt.md
│ │ ├── PYTORCH_DOWNLOAD_FIX.md
│ │ └── README-ORIGINAL-BACKUP.md
│ ├── investigations
│ │ └── MACOS_HOOKS_INVESTIGATION.md
│ ├── litestream-configs-v6.3.0
│ │ ├── install_service.sh
│ │ ├── litestream_master_config_fixed.yml
│ │ ├── litestream_master_config.yml
│ │ ├── litestream_replica_config_fixed.yml
│ │ ├── litestream_replica_config.yml
│ │ ├── litestream_replica_simple.yml
│ │ ├── litestream-http.service
│ │ ├── litestream.service
│ │ └── requirements-cloudflare.txt
│ ├── release-notes
│ │ └── release-notes-v7.1.4.md
│ └── setup-development
│ ├── README.md
│ ├── setup_consolidation_mdns.sh
│ ├── STARTUP_SETUP_GUIDE.md
│ └── test_service.sh
├── CHANGELOG-HISTORIC.md
├── CHANGELOG.md
├── claude_commands
│ ├── memory-context.md
│ ├── memory-health.md
│ ├── memory-ingest-dir.md
│ ├── memory-ingest.md
│ ├── memory-recall.md
│ ├── memory-search.md
│ ├── memory-store.md
│ ├── README.md
│ └── session-start.md
├── claude-hooks
│ ├── config.json
│ ├── config.template.json
│ ├── CONFIGURATION.md
│ ├── core
│ │ ├── auto-capture-hook.js
│ │ ├── auto-capture-hook.ps1
│ │ ├── memory-retrieval.js
│ │ ├── mid-conversation.js
│ │ ├── permission-request.js
│ │ ├── session-end.js
│ │ ├── session-start.js
│ │ └── topic-change.js
│ ├── debug-pattern-test.js
│ ├── install_claude_hooks_windows.ps1
│ ├── install_hooks.py
│ ├── memory-mode-controller.js
│ ├── MIGRATION.md
│ ├── README-AUTO-CAPTURE.md
│ ├── README-NATURAL-TRIGGERS.md
│ ├── README-PERMISSION-REQUEST.md
│ ├── README-phase2.md
│ ├── README.md
│ ├── simple-test.js
│ ├── statusline.sh
│ ├── test-adaptive-weights.js
│ ├── test-dual-protocol-hook.js
│ ├── test-mcp-hook.js
│ ├── test-natural-triggers.js
│ ├── test-recency-scoring.js
│ ├── tests
│ │ ├── integration-test.js
│ │ ├── phase2-integration-test.js
│ │ ├── test-code-execution.js
│ │ ├── test-cross-session.json
│ │ ├── test-permission-request.js
│ │ ├── test-session-tracking.json
│ │ └── test-threading.json
│ ├── utilities
│ │ ├── adaptive-pattern-detector.js
│ │ ├── auto-capture-patterns.js
│ │ ├── context-formatter.js
│ │ ├── context-shift-detector.js
│ │ ├── conversation-analyzer.js
│ │ ├── dynamic-context-updater.js
│ │ ├── git-analyzer.js
│ │ ├── mcp-client.js
│ │ ├── memory-client.js
│ │ ├── memory-scorer.js
│ │ ├── performance-manager.js
│ │ ├── project-detector.js
│ │ ├── session-cache.json
│ │ ├── session-tracker.js
│ │ ├── tiered-conversation-monitor.js
│ │ ├── user-override-detector.js
│ │ └── version-checker.js
│ └── WINDOWS-SESSIONSTART-BUG.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── COMMIT_MESSAGE.md
├── CONTRIBUTING.md
├── Development-Sprint-November-2025.md
├── docs
│ ├── amp-cli-bridge.md
│ ├── api
│ │ ├── code-execution-interface.md
│ │ ├── memory-metadata-api.md
│ │ ├── PHASE1_IMPLEMENTATION_SUMMARY.md
│ │ ├── PHASE2_IMPLEMENTATION_SUMMARY.md
│ │ ├── PHASE2_REPORT.md
│ │ └── tag-standardization.md
│ ├── architecture
│ │ ├── graph-database-design.md
│ │ ├── search-enhancement-spec.md
│ │ └── search-examples.md
│ ├── architecture.md
│ ├── archive
│ │ └── obsolete-workflows
│ │ ├── load_memory_context.md
│ │ └── README.md
│ ├── assets
│ │ └── images
│ │ ├── dashboard-v3.3.0-preview.png
│ │ ├── memory-awareness-hooks-example.png
│ │ ├── project-infographic.svg
│ │ └── README.md
│ ├── CLAUDE_CODE_QUICK_REFERENCE.md
│ ├── cloudflare-setup.md
│ ├── demo-recording-script.md
│ ├── deployment
│ │ ├── docker.md
│ │ ├── dual-service.md
│ │ ├── production-guide.md
│ │ └── systemd-service.md
│ ├── development
│ │ ├── ai-agent-instructions.md
│ │ ├── code-quality
│ │ │ ├── phase-2a-completion.md
│ │ │ ├── phase-2a-handle-get-prompt.md
│ │ │ ├── phase-2a-index.md
│ │ │ ├── phase-2a-install-package.md
│ │ │ └── phase-2b-session-summary.md
│ │ ├── code-quality-workflow.md
│ │ ├── dashboard-workflow.md
│ │ ├── issue-management.md
│ │ ├── pr-280-post-mortem.md
│ │ ├── pr-review-guide.md
│ │ ├── refactoring-notes.md
│ │ ├── release-checklist.md
│ │ └── todo-tracker.md
│ ├── docker-optimized-build.md
│ ├── document-ingestion.md
│ ├── DOCUMENTATION_AUDIT.md
│ ├── enhancement-roadmap-issue-14.md
│ ├── examples
│ │ ├── analysis-scripts.js
│ │ ├── maintenance-session-example.md
│ │ ├── memory-distribution-chart.jsx
│ │ ├── quality-system-configs.md
│ │ └── tag-schema.json
│ ├── features
│ │ └── association-quality-boost.md
│ ├── first-time-setup.md
│ ├── glama-deployment.md
│ ├── guides
│ │ ├── advanced-command-examples.md
│ │ ├── chromadb-migration.md
│ │ ├── commands-vs-mcp-server.md
│ │ ├── mcp-enhancements.md
│ │ ├── mdns-service-discovery.md
│ │ ├── memory-consolidation-guide.md
│ │ ├── memory-quality-guide.md
│ │ ├── migration.md
│ │ ├── scripts.md
│ │ └── STORAGE_BACKENDS.md
│ ├── HOOK_IMPROVEMENTS.md
│ ├── hooks
│ │ └── phase2-code-execution-migration.md
│ ├── http-server-management.md
│ ├── ide-compatability.md
│ ├── IMAGE_RETENTION_POLICY.md
│ ├── images
│ │ ├── dashboard-placeholder.md
│ │ └── update-restart-demo.png
│ ├── implementation
│ │ ├── health_checks.md
│ │ └── performance.md
│ ├── IMPLEMENTATION_PLAN_HTTP_SSE.md
│ ├── integration
│ │ ├── homebrew.md
│ │ └── multi-client.md
│ ├── integrations
│ │ ├── gemini.md
│ │ ├── groq-bridge.md
│ │ ├── groq-integration-summary.md
│ │ └── groq-model-comparison.md
│ ├── integrations.md
│ ├── legacy
│ │ └── dual-protocol-hooks.md
│ ├── LIGHTWEIGHT_ONNX_SETUP.md
│ ├── LM_STUDIO_COMPATIBILITY.md
│ ├── maintenance
│ │ └── memory-maintenance.md
│ ├── mastery
│ │ ├── api-reference.md
│ │ ├── architecture-overview.md
│ │ ├── configuration-guide.md
│ │ ├── local-setup-and-run.md
│ │ ├── testing-guide.md
│ │ └── troubleshooting.md
│ ├── migration
│ │ ├── code-execution-api-quick-start.md
│ │ └── graph-migration-guide.md
│ ├── natural-memory-triggers
│ │ ├── cli-reference.md
│ │ ├── installation-guide.md
│ │ └── performance-optimization.md
│ ├── oauth-setup.md
│ ├── pr-graphql-integration.md
│ ├── quality-system-ui-implementation.md
│ ├── quick-setup-cloudflare-dual-environment.md
│ ├── README.md
│ ├── refactoring
│ │ └── phase-3-3-analysis.md
│ ├── releases
│ │ └── v8.72.0-testing.md
│ ├── remote-configuration-wiki-section.md
│ ├── research
│ │ ├── code-execution-interface-implementation.md
│ │ └── code-execution-interface-summary.md
│ ├── ROADMAP.md
│ ├── sqlite-vec-backend.md
│ ├── statistics
│ │ ├── charts
│ │ │ ├── activity_patterns.png
│ │ │ ├── contributors.png
│ │ │ ├── growth_trajectory.png
│ │ │ ├── monthly_activity.png
│ │ │ └── october_sprint.png
│ │ ├── data
│ │ │ ├── activity_by_day.csv
│ │ │ ├── activity_by_hour.csv
│ │ │ ├── contributors.csv
│ │ │ └── monthly_activity.csv
│ │ ├── generate_charts.py
│ │ └── REPOSITORY_STATISTICS.md
│ ├── technical
│ │ ├── development.md
│ │ ├── memory-migration.md
│ │ ├── migration-log.md
│ │ ├── sqlite-vec-embedding-fixes.md
│ │ └── tag-storage.md
│ ├── testing
│ │ └── regression-tests.md
│ ├── testing-cloudflare-backend.md
│ ├── troubleshooting
│ │ ├── cloudflare-api-token-setup.md
│ │ ├── cloudflare-authentication.md
│ │ ├── database-transfer-migration.md
│ │ ├── general.md
│ │ ├── hooks-quick-reference.md
│ │ ├── memory-management.md
│ │ ├── pr162-schema-caching-issue.md
│ │ ├── session-end-hooks.md
│ │ └── sync-issues.md
│ ├── tutorials
│ │ ├── advanced-techniques.md
│ │ ├── data-analysis.md
│ │ └── demo-session-walkthrough.md
│ ├── wiki-documentation-plan.md
│ └── wiki-Graph-Database-Architecture.md
├── examples
│ ├── claude_desktop_config_template.json
│ ├── claude_desktop_config_windows.json
│ ├── claude-desktop-http-config.json
│ ├── config
│ │ └── claude_desktop_config.json
│ ├── http-mcp-bridge.js
│ ├── memory_export_template.json
│ ├── README.md
│ ├── setup
│ │ └── setup_multi_client_complete.py
│ └── start_https_example.sh
├── IMPLEMENTATION_SUMMARY.md
├── install_service.py
├── install.py
├── LICENSE
├── NOTICE
├── PR_DESCRIPTION.md
├── pyproject-lite.toml
├── pyproject.toml
├── pytest.ini
├── README.md
├── release-notes-v8.61.0.md
├── run_server.py
├── scripts
│ ├── .claude
│ │ └── settings.local.json
│ ├── archive
│ │ └── check_missing_timestamps.py
│ ├── backup
│ │ ├── backup_memories.py
│ │ ├── backup_sqlite_vec.sh
│ │ ├── export_distributable_memories.sh
│ │ └── restore_memories.py
│ ├── benchmarks
│ │ ├── benchmark_code_execution_api.py
│ │ ├── benchmark_hybrid_sync.py
│ │ └── benchmark_server_caching.py
│ ├── ci
│ │ ├── check_dockerfile_args.sh
│ │ └── validate_imports.sh
│ ├── database
│ │ ├── analyze_sqlite_vec_db.py
│ │ ├── check_sqlite_vec_status.py
│ │ ├── db_health_check.py
│ │ └── simple_timestamp_check.py
│ ├── development
│ │ ├── debug_server_initialization.py
│ │ ├── find_orphaned_files.py
│ │ ├── fix_mdns.sh
│ │ ├── fix_sitecustomize.py
│ │ ├── remote_ingest.sh
│ │ ├── setup-git-merge-drivers.sh
│ │ ├── uv-lock-merge.sh
│ │ └── verify_hybrid_sync.py
│ ├── hooks
│ │ └── pre-commit
│ ├── installation
│ │ ├── install_linux_service.py
│ │ ├── install_macos_service.py
│ │ ├── install_uv.py
│ │ ├── install_windows_service.py
│ │ ├── install.py
│ │ ├── setup_backup_cron.sh
│ │ ├── setup_claude_mcp.sh
│ │ └── setup_cloudflare_resources.py
│ ├── linux
│ │ ├── service_status.sh
│ │ ├── start_service.sh
│ │ ├── stop_service.sh
│ │ ├── uninstall_service.sh
│ │ └── view_logs.sh
│ ├── maintenance
│ │ ├── add_project_tags.py
│ │ ├── apply_quality_boost_retroactively.py
│ │ ├── assign_memory_types.py
│ │ ├── auto_retag_memory_merge.py
│ │ ├── auto_retag_memory.py
│ │ ├── backfill_graph_table.py
│ │ ├── check_memory_types.py
│ │ ├── cleanup_association_memories_hybrid.py
│ │ ├── cleanup_association_memories.py
│ │ ├── cleanup_corrupted_encoding.py
│ │ ├── cleanup_low_quality.py
│ │ ├── cleanup_memories.py
│ │ ├── cleanup_organize.py
│ │ ├── consolidate_memory_types.py
│ │ ├── consolidation_mappings.json
│ │ ├── delete_orphaned_vectors_fixed.py
│ │ ├── delete_test_memories.py
│ │ ├── fast_cleanup_duplicates_with_tracking.sh
│ │ ├── find_all_duplicates.py
│ │ ├── find_cloudflare_duplicates.py
│ │ ├── find_duplicates.py
│ │ ├── memory-types.md
│ │ ├── README.md
│ │ ├── recover_timestamps_from_cloudflare.py
│ │ ├── regenerate_embeddings.py
│ │ ├── repair_malformed_tags.py
│ │ ├── repair_memories.py
│ │ ├── repair_sqlite_vec_embeddings.py
│ │ ├── repair_zero_embeddings.py
│ │ ├── restore_from_json_export.py
│ │ ├── retag_valuable_memories.py
│ │ ├── scan_todos.sh
│ │ ├── soft_delete_test_memories.py
│ │ └── sync_status.py
│ ├── migration
│ │ ├── cleanup_mcp_timestamps.py
│ │ ├── legacy
│ │ │ └── migrate_chroma_to_sqlite.py
│ │ ├── mcp-migration.py
│ │ ├── migrate_sqlite_vec_embeddings.py
│ │ ├── migrate_storage.py
│ │ ├── migrate_tags.py
│ │ ├── migrate_timestamps.py
│ │ ├── migrate_to_cloudflare.py
│ │ ├── migrate_to_sqlite_vec.py
│ │ ├── migrate_v5_enhanced.py
│ │ ├── TIMESTAMP_CLEANUP_README.md
│ │ └── verify_mcp_timestamps.py
│ ├── pr
│ │ ├── amp_collect_results.sh
│ │ ├── amp_detect_breaking_changes.sh
│ │ ├── amp_generate_tests.sh
│ │ ├── amp_pr_review.sh
│ │ ├── amp_quality_gate.sh
│ │ ├── amp_suggest_fixes.sh
│ │ ├── auto_review.sh
│ │ ├── detect_breaking_changes.sh
│ │ ├── generate_tests.sh
│ │ ├── lib
│ │ │ └── graphql_helpers.sh
│ │ ├── pre_pr_check.sh
│ │ ├── quality_gate.sh
│ │ ├── resolve_threads.sh
│ │ ├── run_pyscn_analysis.sh
│ │ ├── run_quality_checks_on_files.sh
│ │ ├── run_quality_checks.sh
│ │ ├── thread_status.sh
│ │ └── watch_reviews.sh
│ ├── quality
│ │ ├── bulk_evaluate_onnx.py
│ │ ├── check_test_scores.py
│ │ ├── debug_deberta_scoring.py
│ │ ├── export_deberta_onnx.py
│ │ ├── fix_dead_code_install.sh
│ │ ├── migrate_to_deberta.py
│ │ ├── phase1_dead_code_analysis.md
│ │ ├── phase2_complexity_analysis.md
│ │ ├── README_PHASE1.md
│ │ ├── README_PHASE2.md
│ │ ├── rescore_deberta.py
│ │ ├── rescore_fallback.py
│ │ ├── reset_onnx_scores.py
│ │ ├── track_pyscn_metrics.sh
│ │ └── weekly_quality_review.sh
│ ├── README.md
│ ├── run
│ │ ├── memory_wrapper_cleanup.ps1
│ │ ├── memory_wrapper_cleanup.py
│ │ ├── memory_wrapper_cleanup.sh
│ │ ├── README_CLEANUP_WRAPPER.md
│ │ ├── run_mcp_memory.sh
│ │ ├── run-with-uv.sh
│ │ └── start_sqlite_vec.sh
│ ├── run_memory_server.py
│ ├── server
│ │ ├── check_http_server.py
│ │ ├── check_server_health.py
│ │ ├── memory_offline.py
│ │ ├── preload_models.py
│ │ ├── run_http_server.py
│ │ ├── run_memory_server.py
│ │ ├── start_http_server.bat
│ │ └── start_http_server.sh
│ ├── service
│ │ ├── deploy_dual_services.sh
│ │ ├── http_server_manager.sh
│ │ ├── install_http_service.sh
│ │ ├── mcp-memory-http.service
│ │ ├── mcp-memory.service
│ │ ├── memory_service_manager.sh
│ │ ├── service_control.sh
│ │ ├── service_utils.py
│ │ ├── update_service.sh
│ │ └── windows
│ │ ├── add_watchdog_trigger.ps1
│ │ ├── install_scheduled_task.ps1
│ │ ├── manage_service.ps1
│ │ ├── run_http_server_background.ps1
│ │ ├── uninstall_scheduled_task.ps1
│ │ └── update_and_restart.ps1
│ ├── setup-lightweight.sh
│ ├── sync
│ │ ├── check_drift.py
│ │ ├── claude_sync_commands.py
│ │ ├── export_memories.py
│ │ ├── import_memories.py
│ │ ├── litestream
│ │ │ ├── apply_local_changes.sh
│ │ │ ├── enhanced_memory_store.sh
│ │ │ ├── init_staging_db.sh
│ │ │ ├── io.litestream.replication.plist
│ │ │ ├── manual_sync.sh
│ │ │ ├── memory_sync.sh
│ │ │ ├── pull_remote_changes.sh
│ │ │ ├── push_to_remote.sh
│ │ │ ├── README.md
│ │ │ ├── resolve_conflicts.sh
│ │ │ ├── setup_local_litestream.sh
│ │ │ ├── setup_remote_litestream.sh
│ │ │ ├── staging_db_init.sql
│ │ │ ├── stash_local_changes.sh
│ │ │ ├── sync_from_remote_noconfig.sh
│ │ │ └── sync_from_remote.sh
│ │ ├── README.md
│ │ ├── safe_cloudflare_update.sh
│ │ ├── sync_memory_backends.py
│ │ └── sync_now.py
│ ├── testing
│ │ ├── run_complete_test.py
│ │ ├── run_memory_test.sh
│ │ ├── simple_test.py
│ │ ├── test_cleanup_logic.py
│ │ ├── test_cloudflare_backend.py
│ │ ├── test_docker_functionality.py
│ │ ├── test_installation.py
│ │ ├── test_mdns.py
│ │ ├── test_memory_api.py
│ │ ├── test_memory_simple.py
│ │ ├── test_migration.py
│ │ ├── test_search_api.py
│ │ ├── test_sqlite_vec_embeddings.py
│ │ ├── test_sse_events.py
│ │ ├── test-connection.py
│ │ └── test-hook.js
│ ├── update_and_restart.sh
│ ├── utils
│ │ ├── claude_commands_utils.py
│ │ ├── detect_platform.py
│ │ ├── generate_personalized_claude_md.sh
│ │ ├── groq
│ │ ├── groq_agent_bridge.py
│ │ ├── list-collections.py
│ │ ├── memory_wrapper_uv.py
│ │ ├── query_memories.py
│ │ ├── README_detect_platform.md
│ │ ├── smithery_wrapper.py
│ │ ├── test_groq_bridge.sh
│ │ └── uv_wrapper.py
│ └── validation
│ ├── check_dev_setup.py
│ ├── check_documentation_links.py
│ ├── check_handler_coverage.py
│ ├── diagnose_backend_config.py
│ ├── validate_configuration_complete.py
│ ├── validate_graph_tools.py
│ ├── validate_memories.py
│ ├── validate_migration.py
│ ├── validate_timestamp_integrity.py
│ ├── verify_environment.py
│ ├── verify_pytorch_windows.py
│ └── verify_torch.py
├── SECURITY.md
├── selective_timestamp_recovery.py
├── SPONSORS.md
├── src
│ └── mcp_memory_service
│ ├── __init__.py
│ ├── _version.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── client.py
│ │ ├── operations.py
│ │ ├── sync_wrapper.py
│ │ └── types.py
│ ├── backup
│ │ ├── __init__.py
│ │ └── scheduler.py
│ ├── cli
│ │ ├── __init__.py
│ │ ├── ingestion.py
│ │ ├── main.py
│ │ └── utils.py
│ ├── config.py
│ ├── consolidation
│ │ ├── __init__.py
│ │ ├── associations.py
│ │ ├── base.py
│ │ ├── clustering.py
│ │ ├── compression.py
│ │ ├── consolidator.py
│ │ ├── decay.py
│ │ ├── forgetting.py
│ │ ├── health.py
│ │ └── scheduler.py
│ ├── dependency_check.py
│ ├── discovery
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── mdns_service.py
│ ├── embeddings
│ │ ├── __init__.py
│ │ └── onnx_embeddings.py
│ ├── ingestion
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── chunker.py
│ │ ├── csv_loader.py
│ │ ├── json_loader.py
│ │ ├── pdf_loader.py
│ │ ├── registry.py
│ │ ├── semtools_loader.py
│ │ └── text_loader.py
│ ├── lm_studio_compat.py
│ ├── mcp_server.py
│ ├── models
│ │ ├── __init__.py
│ │ └── memory.py
│ ├── quality
│ │ ├── __init__.py
│ │ ├── ai_evaluator.py
│ │ ├── async_scorer.py
│ │ ├── config.py
│ │ ├── implicit_signals.py
│ │ ├── metadata_codec.py
│ │ ├── onnx_ranker.py
│ │ └── scorer.py
│ ├── server
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ ├── cache_manager.py
│ │ ├── client_detection.py
│ │ ├── environment.py
│ │ ├── handlers
│ │ │ ├── __init__.py
│ │ │ ├── consolidation.py
│ │ │ ├── documents.py
│ │ │ ├── graph.py
│ │ │ ├── memory.py
│ │ │ ├── quality.py
│ │ │ └── utility.py
│ │ └── logging_config.py
│ ├── server_impl.py
│ ├── services
│ │ ├── __init__.py
│ │ └── memory_service.py
│ ├── storage
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── cloudflare.py
│ │ ├── factory.py
│ │ ├── graph.py
│ │ ├── http_client.py
│ │ ├── hybrid.py
│ │ ├── migrations
│ │ │ └── 008_add_graph_table.sql
│ │ └── sqlite_vec.py
│ ├── sync
│ │ ├── __init__.py
│ │ ├── exporter.py
│ │ ├── importer.py
│ │ └── litestream_config.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── cache_manager.py
│ │ ├── content_splitter.py
│ │ ├── db_utils.py
│ │ ├── debug.py
│ │ ├── directory_ingestion.py
│ │ ├── document_processing.py
│ │ ├── gpu_detection.py
│ │ ├── hashing.py
│ │ ├── health_check.py
│ │ ├── http_server_manager.py
│ │ ├── port_detection.py
│ │ ├── quality_analytics.py
│ │ ├── startup_orchestrator.py
│ │ ├── system_detection.py
│ │ └── time_parser.py
│ └── web
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── analytics.py
│ │ ├── backup.py
│ │ ├── consolidation.py
│ │ ├── documents.py
│ │ ├── events.py
│ │ ├── health.py
│ │ ├── manage.py
│ │ ├── mcp.py
│ │ ├── memories.py
│ │ ├── quality.py
│ │ ├── search.py
│ │ └── sync.py
│ ├── app.py
│ ├── dependencies.py
│ ├── oauth
│ │ ├── __init__.py
│ │ ├── authorization.py
│ │ ├── discovery.py
│ │ ├── middleware.py
│ │ ├── models.py
│ │ ├── registration.py
│ │ └── storage.py
│ ├── sse.py
│ └── static
│ ├── app.js
│ ├── i18n
│ │ ├── de.json
│ │ ├── en.json
│ │ ├── es.json
│ │ ├── fr.json
│ │ ├── ja.json
│ │ ├── ko.json
│ │ └── zh.json
│ ├── index.html
│ ├── README.md
│ ├── sse_test.html
│ └── style.css
├── start_http_debug.bat
├── start_http_server.sh
├── test_document.txt
├── test_version_checker.js
├── TESTING_NOTES.md
├── tests
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── test_compact_types.py
│ │ └── test_operations.py
│ ├── bridge
│ │ ├── mock_responses.js
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── test_http_mcp_bridge.js
│ ├── conftest.py
│ ├── consolidation
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── test_associations.py
│ │ ├── test_clustering.py
│ │ ├── test_compression.py
│ │ ├── test_consolidator.py
│ │ ├── test_decay.py
│ │ ├── test_forgetting.py
│ │ └── test_graph_modes.py
│ ├── contracts
│ │ └── api-specification.yml
│ ├── integration
│ │ ├── conftest.py
│ │ ├── HANDLER_COVERAGE_REPORT.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── test_all_memory_handlers.py
│ │ ├── test_api_key_fallback.py
│ │ ├── test_api_memories_chronological.py
│ │ ├── test_api_tag_time_search.py
│ │ ├── test_api_with_memory_service.py
│ │ ├── test_bridge_integration.js
│ │ ├── test_cli_interfaces.py
│ │ ├── test_cloudflare_connection.py
│ │ ├── test_concurrent_clients.py
│ │ ├── test_data_serialization_consistency.py
│ │ ├── test_http_server_startup.py
│ │ ├── test_mcp_memory.py
│ │ ├── test_mdns_integration.py
│ │ ├── test_oauth_basic_auth.py
│ │ ├── test_oauth_flow.py
│ │ ├── test_server_handlers.py
│ │ └── test_store_memory.py
│ ├── performance
│ │ ├── test_background_sync.py
│ │ └── test_hybrid_live.py
│ ├── README.md
│ ├── smithery
│ │ └── test_smithery.py
│ ├── sqlite
│ │ └── simple_sqlite_vec_test.py
│ ├── storage
│ │ ├── conftest.py
│ │ └── test_graph_storage.py
│ ├── test_client.py
│ ├── test_content_splitting.py
│ ├── test_database.py
│ ├── test_deberta_quality.py
│ ├── test_fallback_quality.py
│ ├── test_graph_traversal.py
│ ├── test_hybrid_cloudflare_limits.py
│ ├── test_hybrid_storage.py
│ ├── test_lightweight_onnx.py
│ ├── test_memory_ops.py
│ ├── test_memory_wrapper_cleanup.py
│ ├── test_quality_integration.py
│ ├── test_quality_system.py
│ ├── test_semantic_search.py
│ ├── test_sqlite_vec_storage.py
│ ├── test_time_parser.py
│ ├── test_timestamp_preservation.py
│ ├── timestamp
│ │ ├── test_hook_vs_manual_storage.py
│ │ ├── test_issue99_final_validation.py
│ │ ├── test_search_retrieval_inconsistency.py
│ │ ├── test_timestamp_issue.py
│ │ └── test_timestamp_simple.py
│ └── unit
│ ├── conftest.py
│ ├── test_cloudflare_storage.py
│ ├── test_csv_loader.py
│ ├── test_fastapi_dependencies.py
│ ├── test_import.py
│ ├── test_imports.py
│ ├── test_json_loader.py
│ ├── test_mdns_simple.py
│ ├── test_mdns.py
│ ├── test_memory_service.py
│ ├── test_memory.py
│ ├── test_semtools_loader.py
│ ├── test_storage_interface_compatibility.py
│ ├── test_tag_time_filtering.py
│ └── test_uv_no_pip_installer_fallback.py
├── tools
│ ├── docker
│ │ ├── DEPRECATED.md
│ │ ├── docker-compose.http.yml
│ │ ├── docker-compose.pythonpath.yml
│ │ ├── docker-compose.standalone.yml
│ │ ├── docker-compose.uv.yml
│ │ ├── docker-compose.yml
│ │ ├── docker-entrypoint-persistent.sh
│ │ ├── docker-entrypoint-unified.sh
│ │ ├── docker-entrypoint.sh
│ │ ├── Dockerfile
│ │ ├── Dockerfile.glama
│ │ ├── Dockerfile.slim
│ │ ├── README.md
│ │ └── test-docker-modes.sh
│ └── README.md
├── uv.lock
└── verify_compression.sh
```
# Files
--------------------------------------------------------------------------------
/install.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | # Copyright 2024 Heinrich Krupp
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | """
17 | Installation script for MCP Memory Service with cross-platform compatibility.
18 | This script guides users through the installation process with the appropriate
19 | dependencies for their platform.
20 | """
21 | import os
22 | import sys
23 | import platform
24 | import subprocess
25 | import argparse
26 | import shutil
27 | from pathlib import Path
28 |
29 | # Import shared GPU detection utilities
30 | try:
31 | from src.mcp_memory_service.utils.gpu_detection import detect_gpu as shared_detect_gpu
32 | except ImportError:
33 | # Fallback for development/testing scenarios
34 | sys.path.insert(0, str(Path(__file__).parent))
35 | from src.mcp_memory_service.utils.gpu_detection import detect_gpu as shared_detect_gpu
36 |
37 | # Fix Windows console encoding issues
38 | if platform.system() == "Windows":
39 | # Ensure stdout uses UTF-8 on Windows to prevent character encoding issues in logs
40 | if hasattr(sys.stdout, 'reconfigure'):
41 | try:
42 | sys.stdout.reconfigure(encoding='utf-8')
43 | sys.stderr.reconfigure(encoding='utf-8')
44 | except AttributeError:
45 | pass
46 |
47 | # Enhanced logging system for installer
48 | import logging
49 | from datetime import datetime
50 |
51 | class DualOutput:
52 | """Class to handle both console and file output simultaneously."""
53 | def __init__(self, log_file_path):
54 | self.console = sys.stdout
55 | self.log_file = None
56 | self.log_file_path = log_file_path
57 | self._setup_log_file()
58 |
59 | def _setup_log_file(self):
60 | """Set up the log file with proper encoding."""
61 | try:
62 | # Create log file with UTF-8 encoding
63 | self.log_file = open(self.log_file_path, 'w', encoding='utf-8')
64 | # Write header
65 | timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
66 |
67 | # Fix Windows version display in log header
68 | platform_info = f"{platform.system()} {platform.release()}"
69 | if platform.system() == "Windows":
70 | try:
71 | import winreg
72 | key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"SOFTWARE\Microsoft\Windows NT\CurrentVersion")
73 | build_number = winreg.QueryValueEx(key, "CurrentBuildNumber")[0]
74 | winreg.CloseKey(key)
75 |
76 | # Windows 11 has build number >= 22000
77 | if int(build_number) >= 22000:
78 | platform_info = f"Windows 11"
79 | else:
80 | platform_info = f"Windows {platform.release()}"
81 | except (ImportError, OSError, ValueError):
82 | pass # Use default
83 |
84 | header = f"""
85 | ================================================================================
86 | MCP Memory Service Installation Log
87 | Started: {timestamp}
88 | Platform: {platform_info} ({platform.machine()})
89 | Python: {sys.version}
90 | ================================================================================
91 |
92 | """
93 | self.log_file.write(header)
94 | self.log_file.flush()
95 | except Exception as e:
96 | print(f"Warning: Could not create log file {self.log_file_path}: {e}")
97 | self.log_file = None
98 |
99 | def write(self, text):
100 | """Write to both console and log file."""
101 | # Write to console
102 | self.console.write(text)
103 | self.console.flush()
104 |
105 | # Write to log file if available
106 | if self.log_file:
107 | try:
108 | self.log_file.write(text)
109 | self.log_file.flush()
110 | except Exception:
111 | pass # Silently ignore log file write errors
112 |
113 | def flush(self):
114 | """Flush both outputs."""
115 | self.console.flush()
116 | if self.log_file:
117 | try:
118 | self.log_file.flush()
119 | except Exception:
120 | pass
121 |
122 | def close(self):
123 | """Close the log file."""
124 | if self.log_file:
125 | try:
126 | timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
127 | footer = f"""
128 | ================================================================================
129 | Installation completed: {timestamp}
130 | ================================================================================
131 | """
132 | self.log_file.write(footer)
133 | self.log_file.close()
134 | except Exception:
135 | pass
136 |
137 | # Global dual output instance
138 | _dual_output = None
139 |
140 | def setup_installer_logging():
141 | """Set up the installer logging system."""
142 | global _dual_output
143 |
144 | # Create log file path
145 | log_file = Path.cwd() / "installation.log"
146 |
147 | # Remove old log file if it exists
148 | if log_file.exists():
149 | try:
150 | log_file.unlink()
151 | except Exception:
152 | pass
153 |
154 | # Set up dual output
155 | _dual_output = DualOutput(str(log_file))
156 |
157 | # Redirect stdout to dual output
158 | sys.stdout = _dual_output
159 |
160 | print(f"Installation log will be saved to: {log_file}")
161 |
162 | return str(log_file)
163 |
164 | def cleanup_installer_logging():
165 | """Clean up the installer logging system."""
166 | global _dual_output
167 |
168 | if _dual_output:
169 | # Restore original stdout
170 | sys.stdout = _dual_output.console
171 | _dual_output.close()
172 | _dual_output = None
173 |
174 | # Import Claude commands utilities
175 | try:
176 | from scripts.claude_commands_utils import install_claude_commands, check_claude_code_cli
177 | except ImportError:
178 | # Handle case where script is run from different directory
179 | script_dir = Path(__file__).parent
180 | sys.path.insert(0, str(script_dir))
181 | try:
182 | from scripts.claude_commands_utils import install_claude_commands, check_claude_code_cli
183 | except ImportError:
184 | install_claude_commands = None
185 | check_claude_code_cli = None
186 |
187 | # Global variable to store the uv executable path
188 | UV_EXECUTABLE_PATH = None
189 |
190 | def _pip_available() -> bool:
191 | """Return True if `python -m pip` is available in the current environment."""
192 | try:
193 | subprocess.check_call(
194 | [sys.executable, "-m", "pip", "--version"],
195 | stdout=subprocess.DEVNULL,
196 | stderr=subprocess.DEVNULL,
197 | )
198 | return True
199 | except (subprocess.SubprocessError, OSError):
200 | return False
201 |
202 | def _uv_executable() -> str | None:
203 | """Return uv executable path if available (prefer detected path)."""
204 | return UV_EXECUTABLE_PATH or shutil.which("uv")
205 |
206 | def _install_python_packages(
207 | packages: list[str],
208 | *,
209 | extra_args: list[str] | None = None,
210 | silent: bool = False,
211 | env: dict[str, str] | None = None,
212 | ) -> None:
213 | """Install packages using pip if present, otherwise fall back to `uv pip`.
214 |
215 | Raises:
216 | subprocess.SubprocessError: If the installation command fails
217 | OSError: If the installation command cannot be executed
218 | RuntimeError: If neither pip nor uv are available
219 | """
220 | if _pip_available():
221 | cmd: list[str] = [sys.executable, "-m", "pip", "install"]
222 | else:
223 | uv_path = _uv_executable()
224 | if not uv_path:
225 | raise RuntimeError("Neither pip nor uv could be found. Cannot install packages.")
226 | cmd = [uv_path, "pip", "install", "--python", sys.executable]
227 |
228 | if extra_args:
229 | cmd.extend(extra_args)
230 | cmd.extend(packages)
231 |
232 | kwargs: dict[str, object] = {}
233 | if env is not None:
234 | kwargs["env"] = env
235 | if silent:
236 | kwargs["stdout"] = subprocess.DEVNULL
237 | kwargs["stderr"] = subprocess.DEVNULL
238 |
239 | subprocess.check_call(cmd, **kwargs)
240 |
241 | def print_header(text):
242 | """Print a formatted header."""
243 | print("\n" + "=" * 80)
244 | print(f" {text}")
245 | print("=" * 80)
246 |
247 | def print_step(step, text):
248 | """Print a formatted step."""
249 | print(f"\n[{step}] {text}")
250 |
251 | def print_info(text):
252 | """Print formatted info text."""
253 | print(f" -> {text}")
254 |
255 | def print_error(text):
256 | """Print formatted error text."""
257 | print(f" [ERROR] {text}")
258 |
259 | def print_success(text):
260 | """Print formatted success text."""
261 | print(f" [OK] {text}")
262 |
263 | def print_warning(text):
264 | """Print formatted warning text."""
265 | print(f" [WARNING] {text}")
266 |
267 | def prompt_user_input(prompt_text, default_value=""):
268 | """
269 | Prompt user for input with formatted banner.
270 |
271 | Args:
272 | prompt_text: The input prompt to display
273 | default_value: Optional default value if user presses Enter
274 |
275 | Returns:
276 | User's input (or default if empty)
277 | """
278 | print("\n" + "=" * 60)
279 | print("⚠️ USER INPUT REQUIRED")
280 | print("=" * 60)
281 | response = input(prompt_text).strip()
282 | print("=" * 60 + "\n")
283 | return response if response else default_value
284 |
285 | def build_mcp_server_config(storage_backend="sqlite_vec", repo_path=None):
286 | """
287 | Build MCP server configuration dict for multi-client access.
288 |
289 | Args:
290 | storage_backend: Storage backend to use (sqlite_vec or chromadb)
291 | repo_path: Repository path (defaults to current directory)
292 |
293 | Returns:
294 | Dict containing MCP server configuration with command, args, and env
295 | """
296 | if repo_path is None:
297 | repo_path = str(Path.cwd())
298 |
299 | # Build environment configuration based on storage backend
300 | env_config = {
301 | "MCP_MEMORY_STORAGE_BACKEND": storage_backend,
302 | "LOG_LEVEL": "INFO"
303 | }
304 |
305 | # Add backend-specific configuration
306 | if storage_backend == "sqlite_vec":
307 | env_config["MCP_MEMORY_SQLITE_PRAGMAS"] = "busy_timeout=15000,cache_size=20000"
308 |
309 | return {
310 | "command": UV_EXECUTABLE_PATH or "uv",
311 | "args": ["--directory", repo_path, "run", "memory"],
312 | "env": env_config
313 | }
314 |
315 | # Cache for system detection to avoid duplicate calls
316 | _system_info_cache = None
317 |
318 | def detect_system():
319 | """Detect the system architecture and platform."""
320 | global _system_info_cache
321 | if _system_info_cache is not None:
322 | return _system_info_cache
323 |
324 | system = platform.system().lower()
325 | machine = platform.machine().lower()
326 | python_version = f"{sys.version_info.major}.{sys.version_info.minor}"
327 |
328 | is_windows = system == "windows"
329 | is_macos = system == "darwin"
330 | is_linux = system == "linux"
331 | is_arm = machine in ("arm64", "aarch64")
332 | is_x86 = machine in ("x86_64", "amd64", "x64")
333 |
334 | # Fix Windows version detection - Windows 11 reports as Windows 10
335 | if is_windows:
336 | try:
337 | import winreg
338 | key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"SOFTWARE\Microsoft\Windows NT\CurrentVersion")
339 | build_number = winreg.QueryValueEx(key, "CurrentBuildNumber")[0]
340 | winreg.CloseKey(key)
341 |
342 | # Windows 11 has build number >= 22000
343 | if int(build_number) >= 22000:
344 | windows_version = "11"
345 | else:
346 | windows_version = platform.release()
347 | except (ImportError, OSError, ValueError):
348 | windows_version = platform.release()
349 |
350 | print_info(f"System: {platform.system()} {windows_version}")
351 | else:
352 | print_info(f"System: {platform.system()} {platform.release()}")
353 |
354 | print_info(f"Architecture: {machine}")
355 | print_info(f"Python: {python_version}")
356 |
357 | # Check for virtual environment
358 | in_venv = sys.prefix != sys.base_prefix
359 | if not in_venv:
360 | print_warning("Not running in a virtual environment. It's recommended to install in a virtual environment.")
361 | else:
362 | print_info(f"Virtual environment: {sys.prefix}")
363 |
364 | # Check for Homebrew PyTorch installation
365 | has_homebrew_pytorch = False
366 | homebrew_pytorch_version = None
367 | if is_macos:
368 | try:
369 | # Check if pytorch is installed via brew
370 | print_info("Checking for Homebrew PyTorch installation...")
371 | result = subprocess.run(
372 | ['brew', 'list', 'pytorch', '--version'],
373 | capture_output=True,
374 | text=True,
375 | timeout=30 # Increased timeout to prevent hanging
376 | )
377 | if result.returncode == 0:
378 | has_homebrew_pytorch = True
379 | # Extract version from output
380 | version_line = result.stdout.strip()
381 | homebrew_pytorch_version = version_line.split()[1] if len(version_line.split()) > 1 else "Unknown"
382 | print_info(f"Detected Homebrew PyTorch installation: {homebrew_pytorch_version}")
383 | except subprocess.TimeoutExpired:
384 | print_info("Homebrew PyTorch detection timed out - skipping")
385 | has_homebrew_pytorch = False
386 | except (subprocess.SubprocessError, FileNotFoundError):
387 | pass
388 |
389 | _system_info_cache = {
390 | "system": system,
391 | "machine": machine,
392 | "python_version": python_version,
393 | "is_windows": is_windows,
394 | "is_macos": is_macos,
395 | "is_linux": is_linux,
396 | "is_arm": is_arm,
397 | "is_x86": is_x86,
398 | "in_venv": in_venv,
399 | "has_homebrew_pytorch": has_homebrew_pytorch,
400 | "homebrew_pytorch_version": homebrew_pytorch_version
401 | }
402 | return _system_info_cache
403 |
404 | def check_sqlite_extension_support():
405 | """Check if Python's sqlite3 supports loading extensions."""
406 | import sqlite3
407 |
408 | test_conn = None
409 | try:
410 | test_conn = sqlite3.connect(":memory:")
411 | if not hasattr(test_conn, 'enable_load_extension'):
412 | return False, "Python sqlite3 module not compiled with extension support"
413 |
414 | # Test if we can actually enable extension loading
415 | test_conn.enable_load_extension(True)
416 | test_conn.enable_load_extension(False)
417 | return True, "Extension loading supported"
418 |
419 | except AttributeError as e:
420 | return False, f"enable_load_extension not available: {e}"
421 | except Exception as e:
422 | return False, f"Extension support check failed: {e}"
423 | finally:
424 | if test_conn:
425 | test_conn.close()
426 |
427 | def detect_gpu():
428 | """Detect GPU and acceleration capabilities.
429 |
430 | Wrapper function that uses the shared GPU detection module.
431 | """
432 | system_info = detect_system()
433 |
434 | # Use shared GPU detection module
435 | gpu_info = shared_detect_gpu(system_info)
436 |
437 | # Print GPU information (maintain installer output format)
438 | if gpu_info.get("has_cuda"):
439 | cuda_version = gpu_info.get("cuda_version")
440 | print_info(f"CUDA detected: {cuda_version or 'Unknown version'}")
441 | if gpu_info.get("has_rocm"):
442 | rocm_version = gpu_info.get("rocm_version")
443 | print_info(f"ROCm detected: {rocm_version or 'Unknown version'}")
444 | if gpu_info.get("has_mps"):
445 | print_info("Apple Metal Performance Shaders (MPS) detected")
446 | if gpu_info.get("has_directml"):
447 | directml_version = gpu_info.get("directml_version")
448 | if directml_version:
449 | print_info(f"DirectML detected: {directml_version}")
450 | else:
451 | print_info("DirectML detected")
452 |
453 | if not (gpu_info.get("has_cuda") or gpu_info.get("has_rocm") or
454 | gpu_info.get("has_mps") or gpu_info.get("has_directml")):
455 | print_info("No GPU acceleration detected, will use CPU-only mode")
456 |
457 | return gpu_info
458 |
459 |
460 | def check_dependencies():
461 | """Check for required dependencies.
462 |
463 | Note on package managers:
464 | - Traditional virtual environments (venv, virtualenv) include pip by default
465 | - Alternative package managers like uv may not include pip or may manage packages differently
466 | - We attempt multiple detection methods for pip and only fail if:
467 | a) We're not in a virtual environment, or
468 | b) We can't detect pip AND can't install dependencies
469 |
470 | We proceed with installation even if pip isn't detected when in a virtual environment,
471 | assuming an alternative package manager (like uv) is handling dependencies.
472 |
473 | Returns:
474 | bool: True if all dependencies are met, False otherwise.
475 | """
476 | print_step("2", "Checking dependencies")
477 |
478 | # Check for pip
479 | pip_installed = False
480 |
481 | # Try subprocess check first
482 | try:
483 | subprocess.check_call([sys.executable, '-m', 'pip', '--version'],
484 | stdout=subprocess.DEVNULL,
485 | stderr=subprocess.DEVNULL)
486 | pip_installed = True
487 | print_info("pip is installed")
488 | except subprocess.SubprocessError:
489 | # Fallback to import check
490 | try:
491 | import pip
492 | pip_installed = True
493 | print_info(f"pip is installed: {pip.__version__}")
494 | except ImportError:
495 | # Check if we're in a virtual environment
496 | in_venv = sys.prefix != sys.base_prefix
497 | if in_venv:
498 | print_warning("pip could not be detected, but you're in a virtual environment. "
499 | "If you're using uv or another alternative package manager, this is normal. "
500 | "Continuing installation (will use uv where needed)...")
501 | pip_installed = True # Proceed anyway
502 | else:
503 | print_error("pip is not installed. Please install pip first.")
504 | return False
505 |
506 | # Check for setuptools
507 | try:
508 | import setuptools
509 | print_info(f"setuptools is installed: {setuptools.__version__}")
510 | except ImportError:
511 | print_warning("setuptools is not installed. Will attempt to install it.")
512 | # If pip is available, use it to install setuptools
513 | if pip_installed:
514 | try:
515 | _install_python_packages(["setuptools"], silent=True)
516 | print_success("setuptools installed successfully")
517 | except (subprocess.SubprocessError, OSError, RuntimeError):
518 | # Check if in virtual environment
519 | in_venv = sys.prefix != sys.base_prefix
520 | if in_venv:
521 | print_warning("Failed to install setuptools automatically. If you're using an alternative package manager "
522 | "like uv, please install setuptools manually using that tool (e.g., 'uv pip install setuptools').")
523 | else:
524 | print_error("Failed to install setuptools. Please install it manually.")
525 | return False
526 | else:
527 | # Should be unreachable since pip_installed would only be False if we returned earlier
528 | print_error("Cannot install setuptools without pip. Please install setuptools manually.")
529 | return False
530 |
531 | # Check for wheel
532 | try:
533 | import wheel
534 | print_info(f"wheel is installed: {wheel.__version__}")
535 | except ImportError:
536 | print_warning("wheel is not installed. Will attempt to install it.")
537 | # If pip is available, use it to install wheel
538 | if pip_installed:
539 | try:
540 | _install_python_packages(["wheel"], silent=True)
541 | print_success("wheel installed successfully")
542 | except (subprocess.SubprocessError, OSError, RuntimeError):
543 | # Check if in virtual environment
544 | in_venv = sys.prefix != sys.base_prefix
545 | if in_venv:
546 | print_warning("Failed to install wheel automatically. If you're using an alternative package manager "
547 | "like uv, please install wheel manually using that tool (e.g., 'uv pip install wheel').")
548 | else:
549 | print_error("Failed to install wheel. Please install it manually.")
550 | return False
551 | else:
552 | # Should be unreachable since pip_installed would only be False if we returned earlier
553 | print_error("Cannot install wheel without pip. Please install wheel manually.")
554 | return False
555 |
556 | return True
557 |
558 | def install_pytorch_platform_specific(system_info, gpu_info, args=None):
559 | """Install PyTorch with platform-specific configurations."""
560 | # Check if PyTorch installation should be skipped
561 | if args and args.skip_pytorch:
562 | print_info("Skipping PyTorch installation as requested")
563 | return True
564 |
565 | if system_info["is_windows"]:
566 | return install_pytorch_windows(gpu_info)
567 | elif system_info["is_macos"] and system_info["is_x86"]:
568 | return install_pytorch_macos_intel()
569 | elif system_info["is_macos"] and system_info["is_arm"]:
570 | return install_pytorch_macos_arm64()
571 | else:
572 | # For other platforms, let the regular installer handle it
573 | return True
574 |
575 | def install_pytorch_macos_intel():
576 | """Install PyTorch specifically for macOS with Intel CPUs."""
577 | print_step("3a", "Installing PyTorch for macOS Intel CPU")
578 |
579 | # Use the versions known to work well on macOS Intel and with Python 3.13+
580 | try:
581 | # For Python 3.13+, we need newer PyTorch versions
582 | python_version = sys.version_info
583 |
584 | if python_version >= (3, 13):
585 | # For Python 3.13+, try to install latest compatible version
586 | print_info(f"Installing PyTorch for macOS Intel (Python {python_version.major}.{python_version.minor})...")
587 | print_info("Attempting to install latest PyTorch compatible with Python 3.13...")
588 |
589 | try:
590 | # Try to install without version specifiers to get latest compatible version
591 | cmd = [
592 | sys.executable, '-m', 'pip', 'install',
593 | "torch", "torchvision", "torchaudio"
594 | ]
595 | print_info(f"Running: {' '.join(cmd)}")
596 | subprocess.check_call(cmd)
597 | st_version = "3.0.0" # Newer sentence-transformers for newer PyTorch
598 | except subprocess.SubprocessError as e:
599 | print_warning(f"Failed to install latest PyTorch: {e}")
600 | # Fallback to a specific version
601 | torch_version = "2.1.0"
602 | torch_vision_version = "0.16.0"
603 | torch_audio_version = "2.1.0"
604 | st_version = "3.0.0"
605 |
606 | print_info(f"Trying fallback to PyTorch {torch_version}...")
607 |
608 | cmd = [
609 | sys.executable, '-m', 'pip', 'install',
610 | f"torch=={torch_version}",
611 | f"torchvision=={torch_vision_version}",
612 | f"torchaudio=={torch_audio_version}"
613 | ]
614 | print_info(f"Running: {' '.join(cmd)}")
615 | subprocess.check_call(cmd)
616 | else:
617 | # Use traditional versions for older Python
618 | torch_version = "1.13.1"
619 | torch_vision_version = "0.14.1"
620 | torch_audio_version = "0.13.1"
621 | st_version = "2.2.2"
622 |
623 | print_info(f"Installing PyTorch {torch_version} for macOS Intel (Python {python_version.major}.{python_version.minor})...")
624 |
625 | # Install PyTorch first with compatible version
626 | cmd = [
627 | sys.executable, '-m', 'pip', 'install',
628 | f"torch=={torch_version}",
629 | f"torchvision=={torch_vision_version}",
630 | f"torchaudio=={torch_audio_version}"
631 | ]
632 |
633 | print_info(f"Running: {' '.join(cmd)}")
634 | subprocess.check_call(cmd)
635 |
636 | # Install a compatible version of sentence-transformers
637 | print_info(f"Installing sentence-transformers {st_version}...")
638 |
639 | cmd = [
640 | sys.executable, '-m', 'pip', 'install',
641 | f"sentence-transformers=={st_version}"
642 | ]
643 |
644 | print_info(f"Running: {' '.join(cmd)}")
645 | subprocess.check_call(cmd)
646 |
647 | print_success(f"PyTorch {torch_version} and sentence-transformers {st_version} installed successfully for macOS Intel")
648 | return True
649 | except subprocess.SubprocessError as e:
650 | print_error(f"Failed to install PyTorch for macOS Intel: {e}")
651 |
652 | # Provide fallback instructions
653 | if python_version >= (3, 13):
654 | print_warning("You may need to manually install compatible versions for Python 3.13+ on Intel macOS:")
655 | print_info("pip install torch==2.3.0 torchvision==0.18.0 torchaudio==2.3.0")
656 | print_info("pip install sentence-transformers==3.0.0")
657 | else:
658 | print_warning("You may need to manually install compatible versions for Intel macOS:")
659 | print_info("pip install torch==1.13.1 torchvision==0.14.1 torchaudio==0.13.1")
660 | print_info("pip install sentence-transformers==2.2.2")
661 |
662 | return False
663 |
664 | def install_pytorch_macos_arm64():
665 | """Install PyTorch specifically for macOS with ARM64 (Apple Silicon)."""
666 | print_step("3a", "Installing PyTorch for macOS ARM64 (Apple Silicon)")
667 |
668 | try:
669 | # For Apple Silicon, we can use the latest PyTorch with MPS support
670 | print_info("Installing PyTorch with Metal Performance Shaders (MPS) support...")
671 |
672 | # Install PyTorch with MPS support - let pip choose the best compatible version
673 | cmd = [
674 | sys.executable, '-m', 'pip', 'install',
675 | 'torch>=2.0.0',
676 | 'torchvision',
677 | 'torchaudio'
678 | ]
679 |
680 | print_info(f"Running: {' '.join(cmd)}")
681 | subprocess.check_call(cmd)
682 |
683 | # Install sentence-transformers
684 | print_info("Installing sentence-transformers (for embedding generation)...")
685 | print_info("Note: Models will be downloaded on first use (~25MB)")
686 | cmd = [
687 | sys.executable, '-m', 'pip', 'install',
688 | 'sentence-transformers>=2.2.2'
689 | ]
690 |
691 | print_info(f"Running: {' '.join(cmd)}")
692 | subprocess.check_call(cmd)
693 |
694 | print_success("PyTorch and sentence-transformers installed successfully for macOS ARM64")
695 | print_info("MPS (Metal Performance Shaders) acceleration is available for GPU compute")
696 |
697 | return True
698 | except subprocess.SubprocessError as e:
699 | print_error(f"Failed to install PyTorch for macOS ARM64: {e}")
700 |
701 | # Provide fallback instructions
702 | print_warning("You may need to manually install PyTorch for Apple Silicon:")
703 | print_info("pip install torch torchvision torchaudio")
704 | print_info("pip install sentence-transformers")
705 | print_info("")
706 | print_info("If you encounter issues, try:")
707 | print_info("pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cpu")
708 |
709 | return False
710 |
711 | def install_pytorch_windows(gpu_info):
712 | """Install PyTorch on Windows using the appropriate index URL."""
713 | print_step("3a", "Installing PyTorch for Windows")
714 |
715 | # Check if PyTorch is already installed and compatible
716 | pytorch_installed = False
717 | torch_version_installed = None
718 | directml_compatible = False
719 |
720 | try:
721 | import torch
722 | torch_version_installed = torch.__version__
723 | pytorch_installed = True
724 | print_info(f"PyTorch {torch_version_installed} is already installed")
725 |
726 | # Check if version is compatible with DirectML (2.4.x works, 2.5.x doesn't)
727 | version_parts = torch_version_installed.split('.')
728 | major, minor = int(version_parts[0]), int(version_parts[1])
729 |
730 | if gpu_info["has_directml"]:
731 | if major == 2 and minor == 4:
732 | directml_compatible = True
733 | print_success(f"PyTorch {torch_version_installed} is compatible with DirectML")
734 |
735 | # Check if torch-directml is also installed
736 | try:
737 | import torch_directml
738 | directml_version = getattr(torch_directml, '__version__', 'Unknown version')
739 | print_success(f"torch-directml {directml_version} is already installed")
740 | return True # Everything is compatible, no need to reinstall
741 | except ImportError:
742 | print_info("torch-directml not found, will install it")
743 | # Install torch-directml only
744 | try:
745 | subprocess.check_call([
746 | sys.executable, '-m', 'pip', 'install', 'torch-directml==0.2.5.dev240914'
747 | ])
748 | print_success("torch-directml installed successfully")
749 | return True
750 | except subprocess.SubprocessError:
751 | print_warning("Failed to install torch-directml - DirectML support will be limited")
752 | return True # Still return True since PyTorch works
753 |
754 | elif major == 2 and minor >= 5:
755 | print_warning(f"PyTorch {torch_version_installed} is not compatible with torch-directml")
756 | print_info("torch-directml requires PyTorch 2.4.x, but 2.5.x is installed")
757 | print_info("Keeping existing PyTorch installation - DirectML support will be limited")
758 | return True # Don't break existing installation
759 | else:
760 | print_info(f"PyTorch {torch_version_installed} compatibility with DirectML is unknown")
761 | else:
762 | # No DirectML needed, check if current version is reasonable
763 | if major == 2 and minor >= 4:
764 | print_success(f"PyTorch {torch_version_installed} is acceptable for CPU usage")
765 | return True # Keep existing installation
766 |
767 | except ImportError:
768 | print_info("PyTorch not found, will install compatible version")
769 |
770 | # If we get here, we need to install PyTorch
771 | # Determine the appropriate PyTorch index URL based on GPU
772 | if gpu_info["has_cuda"]:
773 | # Get CUDA version and determine appropriate index URL
774 | cuda_version = gpu_info.get("cuda_version", "")
775 |
776 | # Extract major version from CUDA version string
777 | cuda_major = None
778 | if cuda_version:
779 | # Try to extract the major version (e.g., "11.8" -> "11")
780 | try:
781 | cuda_major = cuda_version.split('.')[0]
782 | except (IndexError, AttributeError):
783 | pass
784 |
785 | # Default to cu118 if we couldn't determine the version or it's not a common one
786 | if cuda_major == "12":
787 | cuda_suffix = "cu121" # CUDA 12.x
788 | print_info(f"Detected CUDA {cuda_version}, using cu121 channel")
789 | elif cuda_major == "11":
790 | cuda_suffix = "cu118" # CUDA 11.x
791 | print_info(f"Detected CUDA {cuda_version}, using cu118 channel")
792 | elif cuda_major == "10":
793 | cuda_suffix = "cu102" # CUDA 10.x
794 | print_info(f"Detected CUDA {cuda_version}, using cu102 channel")
795 | else:
796 | # Default to cu118 as a safe choice for newer NVIDIA GPUs
797 | cuda_suffix = "cu118"
798 | print_info(f"Using default cu118 channel for CUDA {cuda_version}")
799 |
800 | index_url = f"https://download.pytorch.org/whl/{cuda_suffix}"
801 | else:
802 | # CPU-only version
803 | index_url = "https://download.pytorch.org/whl/cpu"
804 | print_info("Using CPU-only PyTorch for Windows")
805 |
806 | # Install PyTorch with the appropriate index URL
807 | try:
808 | # Use versions compatible with DirectML if needed
809 | if gpu_info["has_directml"]:
810 | # Use PyTorch 2.4.x which is compatible with torch-directml
811 | torch_version = "2.4.1"
812 | torchvision_version = "0.19.1" # Compatible with torch 2.4.1
813 | torchaudio_version = "2.4.1"
814 | print_info("Using PyTorch 2.4.1 for DirectML compatibility")
815 | else:
816 | # Use latest version for non-DirectML systems
817 | torch_version = "2.5.1"
818 | torchvision_version = "0.20.1" # Compatible with torch 2.5.1
819 | torchaudio_version = "2.5.1"
820 | print_info("Using PyTorch 2.5.1 for optimal performance")
821 |
822 | cmd = [
823 | sys.executable, '-m', 'pip', 'install',
824 | f"torch=={torch_version}",
825 | f"torchvision=={torchvision_version}",
826 | f"torchaudio=={torchaudio_version}",
827 | f"--index-url={index_url}"
828 | ]
829 |
830 | print_info(f"Running: {' '.join(cmd)}")
831 | subprocess.check_call(cmd)
832 |
833 | # Check if DirectML is needed
834 | if gpu_info["has_directml"]:
835 | print_info("Installing torch-directml for DirectML support")
836 | try:
837 | # Try the latest dev version since stable versions aren't available
838 | subprocess.check_call([
839 | sys.executable, '-m', 'pip', 'install', 'torch-directml==0.2.5.dev240914'
840 | ])
841 | except subprocess.SubprocessError:
842 | print_warning("Failed to install torch-directml - DirectML support will be limited")
843 | print_info("You can install manually later with: pip install torch-directml==0.2.5.dev240914")
844 |
845 | print_success("PyTorch installed successfully for Windows")
846 | return True
847 | except subprocess.SubprocessError as e:
848 | print_error(f"Failed to install PyTorch for Windows: {e}")
849 | print_warning("You may need to manually install PyTorch using instructions from https://pytorch.org/get-started/locally/")
850 | return False
851 |
852 | def detect_storage_backend_compatibility(system_info, gpu_info):
853 | """Detect which storage backends are compatible with the current environment."""
854 | print_step("3a", "Analyzing storage backend compatibility")
855 |
856 | compatibility = {
857 | "chromadb": {"supported": True, "issues": [], "recommendation": "legacy"},
858 | "sqlite_vec": {"supported": True, "issues": [], "recommendation": "default"}
859 | }
860 |
861 | # Check ChromaDB compatibility issues
862 | chromadb_issues = []
863 |
864 | # macOS Intel compatibility issues
865 | if system_info["is_macos"] and system_info["is_x86"]:
866 | chromadb_issues.append("ChromaDB has known installation issues on older macOS Intel systems")
867 | chromadb_issues.append("May require specific dependency versions")
868 | compatibility["chromadb"]["recommendation"] = "problematic"
869 | compatibility["sqlite_vec"]["recommendation"] = "recommended"
870 |
871 | # Memory constraints
872 | total_memory_gb = 0
873 | try:
874 | import psutil
875 | total_memory_gb = psutil.virtual_memory().total / (1024**3)
876 | except ImportError:
877 | # Fallback memory detection
878 | try:
879 | with open('/proc/meminfo', 'r') as f:
880 | for line in f:
881 | if line.startswith('MemTotal:'):
882 | total_memory_gb = int(line.split()[1]) / (1024**2)
883 | break
884 | except (FileNotFoundError, IOError):
885 | pass
886 |
887 | if total_memory_gb > 0 and total_memory_gb < 4:
888 | chromadb_issues.append(f"System has {total_memory_gb:.1f}GB RAM - ChromaDB may consume significant memory")
889 | compatibility["sqlite_vec"]["recommendation"] = "recommended"
890 |
891 | # Older Python versions
892 | python_version = f"{sys.version_info.major}.{sys.version_info.minor}"
893 | if sys.version_info < (3, 9):
894 | chromadb_issues.append(f"Python {python_version} may have ChromaDB compatibility issues")
895 |
896 | # ARM architecture considerations
897 | if system_info["is_arm"]:
898 | print_info("ARM architecture detected - both backends should work well")
899 |
900 | compatibility["chromadb"]["issues"] = chromadb_issues
901 |
902 | # Print compatibility analysis
903 | print_info("Storage Backend Compatibility Analysis:")
904 |
905 | for backend, info in compatibility.items():
906 | status = "[OK]" if info["supported"] else "[X]"
907 | rec_text = {
908 | "recommended": "[*] RECOMMENDED",
909 | "default": "[+] Standard",
910 | "problematic": "[!] May have issues",
911 | "lightweight": "[-] Lightweight"
912 | }.get(info["recommendation"], "")
913 |
914 | print_info(f" {status} {backend.upper()}: {rec_text}")
915 |
916 | if info["issues"]:
917 | for issue in info["issues"]:
918 | print_info(f" • {issue}")
919 |
920 | return compatibility
921 |
922 | def choose_storage_backend(system_info, gpu_info, args):
923 | """Choose storage backend based on environment and user preferences."""
924 | compatibility = detect_storage_backend_compatibility(system_info, gpu_info)
925 |
926 | # Check if user specified a backend via environment
927 | env_backend = os.environ.get('MCP_MEMORY_STORAGE_BACKEND')
928 | if env_backend:
929 | print_info(f"Using storage backend from environment: {env_backend}")
930 | return env_backend
931 |
932 | # Check for command line argument (we'll add this)
933 | if hasattr(args, 'storage_backend') and args.storage_backend:
934 | print_info(f"Using storage backend from command line: {args.storage_backend}")
935 | return args.storage_backend
936 |
937 | # Auto-select based on compatibility
938 | recommended_backend = None
939 | for backend, info in compatibility.items():
940 | if info["recommendation"] == "recommended":
941 | recommended_backend = backend
942 | break
943 |
944 | if not recommended_backend:
945 | recommended_backend = "sqlite_vec" # Default fallback
946 |
947 | # Interactive selection if no auto-recommendation is clear
948 | if compatibility["chromadb"]["recommendation"] == "problematic":
949 | print_step("3b", "Storage Backend Selection")
950 | print_info("Based on your system, ChromaDB may have installation issues.")
951 | print_info("SQLite-vec is recommended as a lightweight, compatible alternative.")
952 | print_info("")
953 | print_info("Available options:")
954 | print_info(" 1. SQLite-vec (Recommended) - Lightweight, fast, minimal dependencies")
955 | print_info(" 2. ChromaDB (Standard) - Full-featured but may have issues on your system")
956 | print_info(" 3. Auto-detect - Try ChromaDB first, fallback to SQLite-vec if it fails")
957 | print_info("")
958 |
959 | while True:
960 | try:
961 | if args.non_interactive:
962 | print_info("Non-interactive mode: using default storage backend (SQLite-vec)")
963 | choice = "1"
964 | else:
965 | choice = prompt_user_input("Choose storage backend [1-3] (default: 1, press Enter for default): ", "1")
966 |
967 | if choice == "1":
968 | return "sqlite_vec"
969 | elif choice == "2":
970 | return "chromadb"
971 | elif choice == "3":
972 | return "auto_detect"
973 | else:
974 | print_error("Please enter 1, 2, or 3")
975 | except (EOFError, KeyboardInterrupt):
976 | print_info("\nUsing recommended backend: sqlite_vec")
977 | return "sqlite_vec"
978 |
979 | return recommended_backend
980 |
981 | def install_storage_backend(backend, system_info):
982 | """Install the chosen storage backend."""
983 | print_step("3c", f"Installing {backend} storage backend")
984 |
985 | if backend == "sqlite_vec":
986 | # Check extension support before attempting installation
987 | extension_supported, extension_message = check_sqlite_extension_support()
988 | if not extension_supported:
989 | print_warning(f"SQLite extension support not available: {extension_message}")
990 |
991 | # Provide platform-specific guidance
992 | if platform.system().lower() == "darwin": # macOS
993 | print_info("This is common on macOS with system Python.")
994 | print_info("SOLUTIONS:")
995 | print_info(" • Install Python via Homebrew: brew install python")
996 | print_info(" • Use pyenv with extensions: PYTHON_CONFIGURE_OPTS='--enable-loadable-sqlite-extensions' pyenv install 3.12.0")
997 | print_info(" • Switch to ChromaDB backend: --storage-backend chromadb")
998 |
999 | # Ask user what they want to do
1000 | if not system_info.get('non_interactive'):
1001 | print("\n" + "=" * 60)
1002 | print("⚠️ USER INPUT REQUIRED")
1003 | print("=" * 60)
1004 | print("sqlite-vec requires SQLite extension support, which is not available.")
1005 | response = input("Switch to ChromaDB backend instead? (y/N): ").strip().lower()
1006 | print("=" * 60 + "\n")
1007 | if response in ['y', 'yes']:
1008 | print_info("Switching to ChromaDB backend...")
1009 | return install_storage_backend("chromadb", system_info)
1010 | else:
1011 | print_info("Continuing with sqlite-vec installation (may fail at runtime)...")
1012 | else:
1013 | print_info("Non-interactive mode: attempting sqlite-vec installation anyway")
1014 | else:
1015 | print_info("Consider switching to ChromaDB backend for better compatibility")
1016 |
1017 | # Special handling for Python 3.13
1018 | if sys.version_info >= (3, 13):
1019 | print_info("Detected Python 3.13+ - using special installation method for sqlite-vec")
1020 | return install_sqlite_vec_python313(system_info)
1021 |
1022 | # Standard installation for older Python versions
1023 | try:
1024 | print_info("Installing SQLite-vec...")
1025 | _install_python_packages(["sqlite-vec"])
1026 | print_success("SQLite-vec installed successfully")
1027 | return True
1028 | except (subprocess.SubprocessError, OSError, RuntimeError) as e:
1029 | print_error(f"Failed to install SQLite-vec: {e}")
1030 | return False
1031 |
1032 | elif backend == "chromadb":
1033 | print_error("ChromaDB backend has been removed in v8.0.0")
1034 | print_info("Please use one of the supported backends:")
1035 | print_info(" - 'hybrid': Local speed + cloud persistence (recommended)")
1036 | print_info(" - 'sqlite_vec': Fast local storage")
1037 | print_info(" - 'cloudflare': Cloud storage only")
1038 | print_info("\nTo migrate from ChromaDB, run: python scripts/migration/migrate_to_sqlite_vec.py")
1039 | return False
1040 |
1041 | elif backend == "auto_detect":
1042 | print_info("Attempting auto-detection...")
1043 |
1044 | # Try ChromaDB first
1045 | print_info("Trying ChromaDB installation...")
1046 | if install_storage_backend("chromadb", system_info):
1047 | print_success("ChromaDB installed successfully")
1048 | return "chromadb"
1049 |
1050 | print_warning("ChromaDB installation failed, falling back to SQLite-vec...")
1051 | if install_storage_backend("sqlite_vec", system_info):
1052 | print_success("SQLite-vec installed successfully as fallback")
1053 | return "sqlite_vec"
1054 |
1055 | print_error("Both storage backends failed to install")
1056 | return False
1057 |
1058 | return False
1059 |
1060 | def initialize_sqlite_vec_database(storage_path):
1061 | """Initialize SQLite-vec database during installation."""
1062 | try:
1063 | print_info("Initializing SQLite-vec database...")
1064 |
1065 | # Add src to path for imports
1066 | src_path = str(Path(__file__).parent / "src")
1067 | if src_path not in sys.path:
1068 | sys.path.insert(0, src_path)
1069 |
1070 | # Import required modules
1071 | from mcp_memory_service.storage.sqlite_vec import SqliteVecMemoryStorage
1072 | from mcp_memory_service.models.memory import Memory
1073 | from mcp_memory_service.utils.hashing import generate_content_hash
1074 | import asyncio
1075 |
1076 | async def init_db():
1077 | # Create storage instance
1078 | storage = SqliteVecMemoryStorage(str(storage_path))
1079 |
1080 | # Initialize the database
1081 | await storage.initialize()
1082 |
1083 | # Create a test memory to verify the database works
1084 | test_content = "Database initialization successful"
1085 | test_memory = Memory(
1086 | content=test_content,
1087 | content_hash=generate_content_hash(test_content),
1088 | tags=["init", "system"],
1089 | memory_type="system"
1090 | )
1091 |
1092 | # Store test memory
1093 | success, message = await storage.store(test_memory)
1094 | return success, message
1095 |
1096 | # Run initialization
1097 | success, message = asyncio.run(init_db())
1098 |
1099 | if success:
1100 | print_success(f"SQLite-vec database initialized: {storage_path}")
1101 | return True
1102 | else:
1103 | print_warning(f"Database initialization partially failed: {message}")
1104 | return True # Database exists even if test failed
1105 |
1106 | except ImportError as e:
1107 | print_warning(f"Could not initialize database (dependencies missing): {e}")
1108 | print_info("Database will be initialized on first use")
1109 | return True # Not a critical failure
1110 | except Exception as e:
1111 | print_warning(f"Database initialization failed: {e}")
1112 | print_info("Database will be initialized on first use")
1113 | return True # Not a critical failure
1114 |
1115 | def install_sqlite_vec_python313(system_info):
1116 | """Special installation method for sqlite-vec on Python 3.13+."""
1117 | print_info("Python 3.13+ detected - sqlite-vec may not have pre-built wheels yet")
1118 |
1119 | # Check if uv is available
1120 | uv_path = shutil.which("uv")
1121 | use_uv = uv_path is not None
1122 |
1123 | # Try multiple installation strategies
1124 | strategies = []
1125 |
1126 | if use_uv:
1127 | # Strategy 1: Try with uv pip
1128 | strategies.append({
1129 | 'name': 'uv pip install',
1130 | 'cmd': [uv_path, 'pip', 'install', '--python', sys.executable, 'sqlite-vec'],
1131 | 'description': 'Installing with uv package manager'
1132 | })
1133 |
1134 | # Strategy 2: Try with uv pip and no-binary flag
1135 | strategies.append({
1136 | 'name': 'uv pip install (source build)',
1137 | 'cmd': [uv_path, 'pip', 'install', '--python', sys.executable, '--no-binary', ':all:', 'sqlite-vec'],
1138 | 'description': 'Building from source with uv'
1139 | })
1140 |
1141 | # Strategy 3: Standard pip install
1142 | strategies.append({
1143 | 'name': 'pip install',
1144 | 'cmd': [sys.executable, '-m', 'pip', 'install', 'sqlite-vec'],
1145 | 'description': 'Installing with pip'
1146 | })
1147 |
1148 | # Strategy 4: pip with no-binary flag to force compilation
1149 | strategies.append({
1150 | 'name': 'pip install (source build)',
1151 | 'cmd': [sys.executable, '-m', 'pip', 'install', '--no-binary', ':all:', 'sqlite-vec'],
1152 | 'description': 'Building from source with pip'
1153 | })
1154 |
1155 | # Strategy 5: Install from GitHub directly
1156 | strategies.append({
1157 | 'name': 'GitHub install',
1158 | 'cmd': [sys.executable, '-m', 'pip', 'install', 'git+https://github.com/asg017/sqlite-vec.git#subdirectory=python'],
1159 | 'description': 'Installing directly from GitHub'
1160 | })
1161 |
1162 | # Try each strategy
1163 | for i, strategy in enumerate(strategies, 1):
1164 | try:
1165 | print_info(f"Attempt {i}/{len(strategies)}: {strategy['description']}...")
1166 | subprocess.check_call(strategy['cmd'], stderr=subprocess.PIPE)
1167 | print_success(f"SQLite-vec installed successfully using {strategy['name']}")
1168 | return True
1169 | except subprocess.CalledProcessError as e:
1170 | print_warning(f"{strategy['name']} failed: {e}")
1171 | if i < len(strategies):
1172 | print_info("Trying next installation method...")
1173 | continue
1174 | except Exception as e:
1175 | print_warning(f"{strategy['name']} failed with unexpected error: {e}")
1176 | continue
1177 |
1178 | # All strategies failed - provide manual instructions
1179 | print_error("Failed to install sqlite-vec with all automatic methods")
1180 | print_info("")
1181 | print_info("MANUAL INSTALLATION OPTIONS:")
1182 | print_info("")
1183 | print_info("Option 1: Use Python 3.12 (recommended)")
1184 | print_info(" 1. Install Python 3.12: brew install [email protected]")
1185 | print_info(" 2. Create venv: python3.12 -m venv .venv")
1186 | print_info(" 3. Activate: source .venv/bin/activate")
1187 | print_info(" 4. Re-run: python install.py")
1188 | print_info("")
1189 | print_info("Option 2: Install pysqlite3-binary (alternative)")
1190 | print_info(" pip install pysqlite3-binary")
1191 | print_info("")
1192 | print_info("Option 3: Wait for sqlite-vec Python 3.13 support")
1193 | print_info(" Check: https://github.com/asg017/sqlite-vec/issues")
1194 | print_info("")
1195 | print_info("Option 4: Use hybrid or cloudflare backend")
1196 | print_info(" python install.py --storage-backend hybrid")
1197 | print_info("")
1198 |
1199 | # Note: ChromaDB option removed in v8.0.0
1200 |
1201 | return False
1202 |
1203 | def install_uv():
1204 | """Install uv package manager if not already installed."""
1205 | uv_path = shutil.which("uv")
1206 | if uv_path:
1207 | print_info(f"uv is already installed at: {uv_path}")
1208 | return uv_path
1209 |
1210 | print_info("Installing uv package manager...")
1211 |
1212 | try:
1213 | # Determine the installation directory
1214 | if platform.system() == 'Windows':
1215 | # On Windows, install to user's AppData/Local
1216 | install_dir = os.path.join(os.environ.get('LOCALAPPDATA', ''), 'Programs', 'uv')
1217 | else:
1218 | # On Unix-like systems, install to ~/.local/bin
1219 | install_dir = os.path.expanduser("~/.local/bin")
1220 |
1221 | # Create installation directory if it doesn't exist
1222 | os.makedirs(install_dir, exist_ok=True)
1223 |
1224 | # Download and install uv
1225 | if platform.system() == 'Windows':
1226 | # Windows installation
1227 | install_script = "powershell -c \"irm https://astral.sh/uv/install.ps1 | iex\""
1228 | subprocess.check_call(install_script, shell=True)
1229 | else:
1230 | # Unix-like installation
1231 | install_script = "curl -LsSf https://astral.sh/uv/install.sh | sh"
1232 | subprocess.check_call(install_script, shell=True)
1233 |
1234 | # Check if uv was installed successfully
1235 | uv_path = shutil.which("uv")
1236 | if not uv_path:
1237 | # Try common installation paths
1238 | if platform.system() == 'Windows':
1239 | possible_paths = [
1240 | os.path.join(install_dir, 'uv.exe'),
1241 | os.path.join(os.environ.get('USERPROFILE', ''), '.cargo', 'bin', 'uv.exe')
1242 | ]
1243 | else:
1244 | possible_paths = [
1245 | os.path.join(install_dir, 'uv'),
1246 | os.path.expanduser("~/.cargo/bin/uv")
1247 | ]
1248 |
1249 | for path in possible_paths:
1250 | if os.path.exists(path):
1251 | uv_path = path
1252 | break
1253 |
1254 | if uv_path:
1255 | print_success(f"uv installed successfully at: {uv_path}")
1256 | return uv_path
1257 | else:
1258 | print_error("uv installation completed but executable not found in PATH")
1259 | print_info("You may need to add the installation directory to your PATH")
1260 | return None
1261 |
1262 | except subprocess.CalledProcessError as e:
1263 | print_error(f"Failed to install uv: {e}")
1264 | return None
1265 | except Exception as e:
1266 | print_error(f"Unexpected error installing uv: {e}")
1267 | return None
1268 |
1269 | def _setup_installer_command():
1270 | """Set up the installer command prefix (pip or uv).
1271 |
1272 | Returns:
1273 | tuple: (installer_cmd: list, uv_path: str or None)
1274 | """
1275 | # Detect if pip is available
1276 | pip_available = False
1277 | try:
1278 | subprocess.check_call([sys.executable, '-m', 'pip', '--version'],
1279 | stdout=subprocess.DEVNULL,
1280 | stderr=subprocess.DEVNULL)
1281 | pip_available = True
1282 | except subprocess.SubprocessError:
1283 | pip_available = False
1284 |
1285 | # Try to install uv if pip is not available
1286 | if not pip_available:
1287 | print_info("pip not found, attempting to install uv package manager...")
1288 | uv_path = install_uv()
1289 | else:
1290 | # Check if uv is already available
1291 | uv_path = shutil.which("uv")
1292 | if uv_path:
1293 | print_info(f"uv package manager found at: {uv_path}")
1294 | else:
1295 | print_info("uv package manager not found (will use pip for installation)")
1296 |
1297 | # Store the uv path globally for config generation
1298 | global UV_EXECUTABLE_PATH
1299 | UV_EXECUTABLE_PATH = uv_path
1300 |
1301 | # Decide installer command prefix
1302 | if pip_available:
1303 | installer_cmd = [sys.executable, '-m', 'pip']
1304 | elif uv_path:
1305 | installer_cmd = [uv_path, 'pip']
1306 | print_info(f"Using uv for installation: {uv_path}")
1307 | else:
1308 | print_error("Neither pip nor uv could be found or installed. Cannot install packages.")
1309 | return None, None
1310 |
1311 | return installer_cmd, uv_path
1312 |
1313 |
1314 | def _configure_storage_and_gpu(args):
1315 | """Configure storage backend and GPU environment variables.
1316 |
1317 | Args:
1318 | args: Parsed command line arguments
1319 |
1320 | Returns:
1321 | tuple: (env: dict, system_info: dict, gpu_info: dict, chosen_backend: str)
1322 | """
1323 | env = os.environ.copy()
1324 |
1325 | # Get system and GPU info
1326 | system_info = detect_system()
1327 | gpu_info = detect_gpu()
1328 |
1329 | # Choose and install storage backend
1330 | chosen_backend = choose_storage_backend(system_info, gpu_info, args)
1331 | if chosen_backend == "auto_detect":
1332 | # Handle auto-detection case
1333 | actual_backend = install_storage_backend(chosen_backend, system_info)
1334 | if not actual_backend:
1335 | print_error("Failed to install any storage backend")
1336 | return None, None, None, None
1337 | chosen_backend = actual_backend
1338 | else:
1339 | # Install the chosen backend
1340 | if not install_storage_backend(chosen_backend, system_info):
1341 | print_error(f"Failed to install {chosen_backend} storage backend")
1342 | return None, None, None, None
1343 |
1344 | # Set environment variable for chosen backend
1345 | if chosen_backend == "sqlite_vec":
1346 | env['MCP_MEMORY_STORAGE_BACKEND'] = 'sqlite_vec'
1347 | os.environ['MCP_MEMORY_STORAGE_BACKEND'] = 'sqlite_vec'
1348 | print_info("Configured to use SQLite-vec storage backend")
1349 | else:
1350 | env['MCP_MEMORY_STORAGE_BACKEND'] = 'chromadb'
1351 | os.environ['MCP_MEMORY_STORAGE_BACKEND'] = 'chromadb'
1352 | print_info("Configured to use ChromaDB storage backend")
1353 |
1354 | # Set environment variables based on detected GPU
1355 | if gpu_info.get("has_cuda"):
1356 | print_info("Configuring for CUDA installation")
1357 | elif gpu_info.get("has_rocm"):
1358 | print_info("Configuring for ROCm installation")
1359 | env['MCP_MEMORY_USE_ROCM'] = '1'
1360 | elif gpu_info.get("has_mps"):
1361 | print_info("Configuring for Apple Silicon MPS installation")
1362 | env['PYTORCH_ENABLE_MPS_FALLBACK'] = '1'
1363 | elif gpu_info.get("has_directml"):
1364 | print_info("Configuring for DirectML installation")
1365 | env['MCP_MEMORY_USE_DIRECTML'] = '1'
1366 | else:
1367 | print_info("Configuring for CPU-only installation")
1368 | env['MCP_MEMORY_USE_ONNX'] = '1'
1369 |
1370 | return env, system_info, gpu_info, chosen_backend
1371 |
1372 |
1373 | def _handle_pytorch_setup(args, system_info, gpu_info, env):
1374 | """Handle PyTorch installation logic.
1375 |
1376 | Args:
1377 | args: Parsed command line arguments
1378 | system_info: System information dict
1379 | gpu_info: GPU information dict
1380 | env: Environment variables dict
1381 |
1382 | Returns:
1383 | tuple: (using_homebrew_pytorch: bool, pytorch_installed: bool, env: dict)
1384 | """
1385 | # Check for Homebrew PyTorch installation
1386 | using_homebrew_pytorch = False
1387 | if system_info.get("has_homebrew_pytorch"):
1388 | print_info(f"Using existing Homebrew PyTorch installation (version: {system_info.get('homebrew_pytorch_version')})")
1389 | using_homebrew_pytorch = True
1390 | # Set the environment variable to use ONNX for embeddings
1391 | env['MCP_MEMORY_USE_ONNX'] = '1'
1392 | pytorch_installed = True
1393 | else:
1394 | # Handle platform-specific PyTorch installation
1395 | pytorch_installed = install_pytorch_platform_specific(system_info, gpu_info, args)
1396 | if not pytorch_installed:
1397 | print_warning("Platform-specific PyTorch installation failed, but will continue with package installation")
1398 |
1399 | return using_homebrew_pytorch, pytorch_installed, env
1400 |
1401 |
1402 | def _should_use_onnx_installation(system_info, args, using_homebrew_pytorch):
1403 | """Determine if ONNX installation path should be used.
1404 |
1405 | Args:
1406 | system_info: System information dict
1407 | args: Parsed command line arguments
1408 | using_homebrew_pytorch: Whether Homebrew PyTorch is being used
1409 |
1410 | Returns:
1411 | bool: True if ONNX installation path should be used
1412 | """
1413 | return (system_info["is_macos"] and system_info["is_x86"] and
1414 | (sys.version_info >= (3, 13) or using_homebrew_pytorch or args.skip_pytorch))
1415 |
1416 |
1417 | def _install_with_onnx(installer_cmd, install_mode, chosen_backend, env, using_homebrew_pytorch):
1418 | """Install using SQLite-vec + ONNX configuration.
1419 |
1420 | Args:
1421 | installer_cmd: Command prefix for installer
1422 | install_mode: Installation mode flags (-e for dev mode)
1423 | chosen_backend: Storage backend name
1424 | env: Environment variables
1425 | using_homebrew_pytorch: Whether using Homebrew PyTorch
1426 |
1427 | Returns:
1428 | bool: True if installation succeeded, False otherwise
1429 | """
1430 | try:
1431 | # Print installation context
1432 | if using_homebrew_pytorch:
1433 | print_info("Using Homebrew PyTorch - installing with SQLite-vec + ONNX configuration")
1434 | elif install_mode:
1435 | print_info("Skipping PyTorch installation - using SQLite-vec + ONNX configuration")
1436 | else:
1437 | print_info("Using Python 3.13+ on macOS Intel - using SQLite-vec + ONNX configuration")
1438 |
1439 | # Install without ML dependencies
1440 | cmd = installer_cmd + ['install']
1441 | if len(installer_cmd) >= 2 and Path(installer_cmd[0]).stem == "uv" and installer_cmd[1] == "pip":
1442 | cmd += ['--python', sys.executable]
1443 | cmd += ['--no-deps'] + install_mode + ['.']
1444 | print_info(f"Running: {' '.join(cmd)}")
1445 | subprocess.check_call(cmd, env=env)
1446 |
1447 | # Install core dependencies except torch/sentence-transformers
1448 | print_info("Installing core dependencies (without ML libraries for compatibility)...")
1449 | print_info("Note: First run will download embedding models automatically (~25MB)")
1450 |
1451 | # Build dependency list
1452 | dependencies = [
1453 | "mcp>=1.0.0,<2.0.0",
1454 | "onnxruntime>=1.14.1", # ONNX runtime for embeddings
1455 | "tokenizers>=0.20.0", # Required for ONNX tokenization
1456 | "httpx>=0.24.0", # For downloading ONNX models
1457 | "aiohttp>=3.8.0" # Required for MCP server functionality
1458 | ]
1459 |
1460 | # Add backend-specific dependencies
1461 | if chosen_backend == "sqlite_vec":
1462 | dependencies.append("sqlite-vec>=0.1.0")
1463 | else:
1464 | dependencies.append("chromadb==0.5.23")
1465 | dependencies.append("tokenizers==0.20.3")
1466 |
1467 | # Install dependencies
1468 | _install_python_packages(dependencies)
1469 |
1470 | # Configure ONNX runtime
1471 | print_info("Configuring to use ONNX runtime for inference without PyTorch...")
1472 | env['MCP_MEMORY_USE_ONNX'] = '1'
1473 | os.environ['MCP_MEMORY_USE_ONNX'] = '1'
1474 |
1475 | # Switch to SQLite-vec if needed
1476 | if chosen_backend != "sqlite_vec":
1477 | print_info("Switching to SQLite-vec backend for better compatibility")
1478 | env['MCP_MEMORY_STORAGE_BACKEND'] = 'sqlite_vec'
1479 | os.environ['MCP_MEMORY_STORAGE_BACKEND'] = 'sqlite_vec'
1480 |
1481 | print_success("MCP Memory Service installed successfully (SQLite-vec + ONNX)")
1482 |
1483 | if using_homebrew_pytorch:
1484 | print_info("Using Homebrew PyTorch installation for embedding generation")
1485 | print_info("Environment configured to use SQLite-vec backend and ONNX runtime")
1486 | else:
1487 | print_warning("ML libraries (PyTorch/sentence-transformers) were not installed due to compatibility issues")
1488 | print_info("The service will use ONNX runtime for inference instead")
1489 |
1490 | return True
1491 | except (subprocess.SubprocessError, OSError, RuntimeError) as e:
1492 | print_error(f"Failed to install with ONNX approach: {e}")
1493 | return False
1494 |
1495 |
1496 | def _install_standard(installer_cmd, install_mode, env):
1497 | """Perform standard pip/uv installation.
1498 |
1499 | Args:
1500 | installer_cmd: Command prefix for installer
1501 | install_mode: Installation mode flags (-e for dev mode)
1502 | env: Environment variables
1503 |
1504 | Returns:
1505 | bool: True if installation succeeded, False otherwise
1506 | """
1507 | try:
1508 | cmd = installer_cmd + ['install']
1509 | if len(installer_cmd) >= 2 and Path(installer_cmd[0]).stem == "uv" and installer_cmd[1] == "pip":
1510 | cmd += ['--python', sys.executable]
1511 | cmd += install_mode + ['.']
1512 | print_info(f"Running: {' '.join(cmd)}")
1513 | subprocess.check_call(cmd, env=env)
1514 | print_success("MCP Memory Service installed successfully")
1515 | return True
1516 | except subprocess.SubprocessError as e:
1517 | print_error(f"Failed to install MCP Memory Service: {e}")
1518 | return False
1519 |
1520 |
1521 | def _handle_installation_failure(system_info):
1522 | """Provide helpful guidance when installation fails.
1523 |
1524 | Args:
1525 | system_info: System information dict
1526 | """
1527 | # Special handling for macOS with compatibility issues
1528 | if system_info["is_macos"] and system_info["is_x86"]:
1529 | print_warning("Installation on macOS Intel is challenging")
1530 | print_info("Try manually installing with:")
1531 | print_info("1. pip install --no-deps .")
1532 | print_info("2. pip install sqlite-vec>=0.1.0 mcp>=1.0.0,<2.0.0 onnxruntime>=1.14.1 aiohttp>=3.8.0")
1533 | print_info("3. export MCP_MEMORY_USE_ONNX=1")
1534 | print_info("4. export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec")
1535 |
1536 | if system_info.get("has_homebrew_pytorch"):
1537 | print_info("Homebrew PyTorch was detected but installation still failed.")
1538 | print_info("Try running: python install.py --storage-backend sqlite_vec --skip-pytorch")
1539 |
1540 |
1541 | def install_package(args):
1542 | """Install the package with the appropriate dependencies, supporting pip or uv."""
1543 | print_step("3", "Installing MCP Memory Service")
1544 |
1545 | # Determine installation mode
1546 | install_mode = []
1547 | if args.dev:
1548 | install_mode = ['-e']
1549 | print_info("Installing in development mode")
1550 |
1551 | # Setup installer command (pip or uv)
1552 | installer_cmd, uv_path = _setup_installer_command()
1553 | if installer_cmd is None:
1554 | return False
1555 |
1556 | # Configure storage backend and GPU
1557 | env, system_info, gpu_info, chosen_backend = _configure_storage_and_gpu(args)
1558 | if env is None:
1559 | return False
1560 |
1561 | # Handle PyTorch setup
1562 | using_homebrew_pytorch, pytorch_installed, env = _handle_pytorch_setup(
1563 | args, system_info, gpu_info, env
1564 | )
1565 |
1566 | # Determine installation path
1567 | if _should_use_onnx_installation(system_info, args, using_homebrew_pytorch):
1568 | # Use ONNX-based installation for macOS Intel with Python 3.13+ or Homebrew PyTorch
1569 | success = _install_with_onnx(installer_cmd, install_mode, chosen_backend, env, using_homebrew_pytorch)
1570 | if success:
1571 | return True
1572 | # Fall through to standard installation if ONNX approach fails
1573 |
1574 | # Standard installation path
1575 | success = _install_standard(installer_cmd, install_mode, env)
1576 | if not success:
1577 | _handle_installation_failure(system_info)
1578 |
1579 | return success
1580 |
1581 | def configure_paths(args):
1582 | """Configure paths for the MCP Memory Service."""
1583 | print_step("4", "Configuring paths")
1584 |
1585 | # Get system info
1586 | system_info = detect_system()
1587 |
1588 | # Determine home directory
1589 | home_dir = Path.home()
1590 |
1591 | # Determine base directory based on platform
1592 | if platform.system() == 'Darwin': # macOS
1593 | base_dir = home_dir / 'Library' / 'Application Support' / 'mcp-memory'
1594 | elif platform.system() == 'Windows': # Windows
1595 | base_dir = Path(os.environ.get('LOCALAPPDATA', '')) / 'mcp-memory'
1596 | else: # Linux and others
1597 | base_dir = home_dir / '.local' / 'share' / 'mcp-memory'
1598 |
1599 | # Create directories based on storage backend
1600 | storage_backend = args.storage_backend or os.environ.get('MCP_MEMORY_STORAGE_BACKEND', 'sqlite_vec')
1601 |
1602 | if storage_backend == 'sqlite_vec':
1603 | # For sqlite-vec, we need a database file path
1604 | storage_path = args.chroma_path or (base_dir / 'sqlite_vec.db')
1605 | storage_dir = storage_path.parent if storage_path.name.endswith('.db') else storage_path
1606 | backups_path = args.backups_path or (base_dir / 'backups')
1607 |
1608 | try:
1609 | os.makedirs(storage_dir, exist_ok=True)
1610 | os.makedirs(backups_path, exist_ok=True)
1611 | print_info(f"SQLite-vec database: {storage_path}")
1612 | print_info(f"Backups path: {backups_path}")
1613 |
1614 | # Test if directory is writable
1615 | test_file = os.path.join(storage_dir, '.write_test')
1616 | with open(test_file, 'w') as f:
1617 | f.write('test')
1618 | os.remove(test_file)
1619 |
1620 | # Initialize the SQLite-vec database
1621 | if not initialize_sqlite_vec_database(storage_path):
1622 | print_warning("SQLite-vec database initialization failed, but continuing...")
1623 |
1624 | except Exception as e:
1625 | print_error(f"Failed to configure SQLite-vec paths: {e}")
1626 | return False
1627 | else:
1628 | # ChromaDB configuration
1629 | chroma_path = args.chroma_path or (base_dir / 'chroma_db')
1630 | backups_path = args.backups_path or (base_dir / 'backups')
1631 | storage_path = chroma_path
1632 |
1633 | try:
1634 | os.makedirs(chroma_path, exist_ok=True)
1635 | os.makedirs(backups_path, exist_ok=True)
1636 | print_info(f"ChromaDB path: {chroma_path}")
1637 | print_info(f"Backups path: {backups_path}")
1638 |
1639 | # Test if directories are writable
1640 | test_file = os.path.join(chroma_path, '.write_test')
1641 | with open(test_file, 'w') as f:
1642 | f.write('test')
1643 | os.remove(test_file)
1644 | except Exception as e:
1645 | print_error(f"Failed to configure ChromaDB paths: {e}")
1646 | return False
1647 |
1648 | # Test backups directory for both backends
1649 | try:
1650 | test_file = os.path.join(backups_path, '.write_test')
1651 | with open(test_file, 'w') as f:
1652 | f.write('test')
1653 | os.remove(test_file)
1654 | print_success("Storage directories created and are writable")
1655 | except Exception as e:
1656 | print_error(f"Failed to test backups directory: {e}")
1657 | return False
1658 |
1659 | # Configure Claude Desktop if available
1660 | claude_config_paths = [
1661 | home_dir / 'Library' / 'Application Support' / 'Claude' / 'claude_desktop_config.json',
1662 | home_dir / '.config' / 'Claude' / 'claude_desktop_config.json',
1663 | Path('claude_config') / 'claude_desktop_config.json'
1664 | ]
1665 |
1666 | for config_path in claude_config_paths:
1667 | if config_path.exists():
1668 | print_info(f"Found Claude Desktop config at {config_path}")
1669 | try:
1670 | import json
1671 | with open(config_path, 'r') as f:
1672 | config = json.load(f)
1673 |
1674 | # Update or add MCP Memory configuration
1675 | if 'mcpServers' not in config:
1676 | config['mcpServers'] = {}
1677 |
1678 | # Create environment configuration based on storage backend
1679 | env_config = {
1680 | "MCP_MEMORY_BACKUPS_PATH": str(backups_path),
1681 | "MCP_MEMORY_STORAGE_BACKEND": storage_backend
1682 | }
1683 |
1684 | if storage_backend == 'sqlite_vec':
1685 | env_config["MCP_MEMORY_SQLITE_PATH"] = str(storage_path)
1686 | else:
1687 | env_config["MCP_MEMORY_CHROMA_PATH"] = str(storage_path)
1688 |
1689 | # Create or update the memory server configuration
1690 | if system_info["is_windows"]:
1691 | # Use the memory_wrapper.py script for Windows
1692 | script_path = os.path.abspath("memory_wrapper.py")
1693 | config['mcpServers']['memory'] = {
1694 | "command": "python",
1695 | "args": [script_path],
1696 | "env": env_config
1697 | }
1698 | print_info("Configured Claude Desktop to use memory_wrapper.py for Windows")
1699 | else:
1700 | # Use the standard configuration for other platforms
1701 | config['mcpServers']['memory'] = {
1702 | "command": UV_EXECUTABLE_PATH or "uv",
1703 | "args": [
1704 | "--directory",
1705 | os.path.abspath("."),
1706 | "run",
1707 | "memory"
1708 | ],
1709 | "env": env_config
1710 | }
1711 |
1712 | with open(config_path, 'w') as f:
1713 | json.dump(config, f, indent=2)
1714 |
1715 | print_success("Updated Claude Desktop configuration")
1716 | except Exception as e:
1717 | print_warning(f"Failed to update Claude Desktop configuration: {e}")
1718 | break
1719 |
1720 | return True
1721 |
1722 | def verify_installation():
1723 | """Verify the installation."""
1724 | print_step("5", "Verifying installation")
1725 |
1726 | # Get system info
1727 | system_info = detect_system()
1728 |
1729 | # Check if the package is installed
1730 | try:
1731 | import mcp_memory_service
1732 | print_success(f"MCP Memory Service is installed: {mcp_memory_service.__file__}")
1733 | except ImportError:
1734 | print_error("MCP Memory Service is not installed correctly")
1735 | return False
1736 |
1737 | # Check if the entry point is available
1738 | memory_script = shutil.which('memory')
1739 | if memory_script:
1740 | print_success(f"Memory command is available: {memory_script}")
1741 | else:
1742 | print_warning("Memory command is not available in PATH")
1743 |
1744 | # Check storage backend installation
1745 | storage_backend = os.environ.get('MCP_MEMORY_STORAGE_BACKEND', 'sqlite_vec')
1746 |
1747 | if storage_backend == 'sqlite_vec':
1748 | try:
1749 | import sqlite_vec
1750 | print_success(f"SQLite-vec is installed: {sqlite_vec.__version__}")
1751 | except ImportError:
1752 | print_error("SQLite-vec is not installed correctly")
1753 | return False
1754 | elif storage_backend == 'chromadb':
1755 | try:
1756 | import chromadb
1757 | print_success(f"ChromaDB is installed: {chromadb.__version__}")
1758 | except ImportError:
1759 | print_error("ChromaDB is not installed correctly")
1760 | return False
1761 |
1762 | # Check for ONNX runtime
1763 | try:
1764 | import onnxruntime
1765 | print_success(f"ONNX Runtime is installed: {onnxruntime.__version__}")
1766 | use_onnx = os.environ.get('MCP_MEMORY_USE_ONNX', '').lower() in ('1', 'true', 'yes')
1767 | if use_onnx:
1768 | print_info("Environment configured to use ONNX runtime for embeddings")
1769 | # Check for tokenizers (required for ONNX)
1770 | try:
1771 | import tokenizers
1772 | print_success(f"Tokenizers is installed: {tokenizers.__version__}")
1773 | except ImportError:
1774 | print_warning("Tokenizers not installed but required for ONNX embeddings")
1775 | print_info("Install with: pip install tokenizers>=0.20.0")
1776 | except ImportError:
1777 | print_warning("ONNX Runtime is not installed. This is recommended for PyTorch-free operation.")
1778 | print_info("Install with: pip install onnxruntime>=1.14.1 tokenizers>=0.20.0")
1779 |
1780 | # Check for Homebrew PyTorch
1781 | homebrew_pytorch = False
1782 | if system_info.get("has_homebrew_pytorch"):
1783 | homebrew_pytorch = True
1784 | print_success(f"Homebrew PyTorch detected: {system_info.get('homebrew_pytorch_version')}")
1785 | print_info("Using system-installed PyTorch instead of pip version")
1786 |
1787 | # Check ML dependencies as optional
1788 | pytorch_installed = False
1789 | try:
1790 | import torch
1791 | pytorch_installed = True
1792 | print_info(f"PyTorch is installed: {torch.__version__}")
1793 |
1794 | # Check for CUDA
1795 | if torch.cuda.is_available():
1796 | print_success(f"CUDA is available: {torch.version.cuda}")
1797 | print_info(f"GPU: {torch.cuda.get_device_name(0)}")
1798 | # Check for MPS (Apple Silicon)
1799 | elif hasattr(torch.backends, 'mps') and torch.backends.mps.is_available():
1800 | print_success("MPS (Metal Performance Shaders) is available")
1801 | # Check for DirectML
1802 | else:
1803 | try:
1804 | import torch_directml
1805 | version = getattr(torch_directml, '__version__', 'Unknown version')
1806 | print_success(f"DirectML is available: {version}")
1807 | except ImportError:
1808 | print_info("Using CPU-only PyTorch")
1809 |
1810 | # For macOS Intel, verify compatibility with sentence-transformers
1811 | if system_info["is_macos"] and system_info["is_x86"]:
1812 | torch_version = torch.__version__.split('.')
1813 | major, minor = int(torch_version[0]), int(torch_version[1])
1814 |
1815 | print_info(f"Verifying torch compatibility on macOS Intel (v{major}.{minor})")
1816 | if major < 1 or (major == 1 and minor < 6):
1817 | print_warning(f"PyTorch version {torch.__version__} may be too old for sentence-transformers")
1818 | elif major > 2 or (major == 2 and minor > 1):
1819 | print_warning(f"PyTorch version {torch.__version__} may be too new for sentence-transformers 2.2.2")
1820 | print_info("If you encounter issues, try downgrading to torch 2.0.1")
1821 |
1822 | except ImportError:
1823 | print_warning("PyTorch is not installed via pip. This is okay for basic operation with SQLite-vec backend.")
1824 | if homebrew_pytorch:
1825 | print_info("Using Homebrew PyTorch installation instead of pip version")
1826 | else:
1827 | print_info("For full functionality including embedding generation, install with: pip install 'mcp-memory-service[ml]'")
1828 | pytorch_installed = False
1829 |
1830 | # Check if sentence-transformers is installed correctly (only if PyTorch is installed)
1831 | if pytorch_installed or homebrew_pytorch:
1832 | try:
1833 | import sentence_transformers
1834 | print_success(f"sentence-transformers is installed: {sentence_transformers.__version__}")
1835 |
1836 | if pytorch_installed:
1837 | # Verify compatibility between torch and sentence-transformers
1838 | st_version = sentence_transformers.__version__.split('.')
1839 | torch_version = torch.__version__.split('.')
1840 |
1841 | st_major, st_minor = int(st_version[0]), int(st_version[1])
1842 | torch_major, torch_minor = int(torch_version[0]), int(torch_version[1])
1843 |
1844 | # Specific compatibility check for macOS Intel
1845 | if system_info["is_macos"] and system_info["is_x86"]:
1846 | if st_major >= 3 and (torch_major < 1 or (torch_major == 1 and torch_minor < 11)):
1847 | print_warning(f"sentence-transformers {sentence_transformers.__version__} requires torch>=1.11.0")
1848 | print_info("This may cause runtime issues - consider downgrading sentence-transformers to 2.2.2")
1849 |
1850 | # Verify by trying to load a model (minimal test)
1851 | try:
1852 | print_info("Testing sentence-transformers model loading...")
1853 | test_model = sentence_transformers.SentenceTransformer('paraphrase-MiniLM-L3-v2')
1854 | print_success("Successfully loaded test model")
1855 | except Exception as e:
1856 | print_warning(f"Model loading test failed: {e}")
1857 | print_warning("There may be compatibility issues between PyTorch and sentence-transformers")
1858 |
1859 | except ImportError:
1860 | print_warning("sentence-transformers is not installed. This is okay for basic operation with SQLite-vec backend.")
1861 | print_info("For full functionality including embedding generation, install with: pip install 'mcp-memory-service[ml]'")
1862 |
1863 | # Check for SQLite-vec + ONNX configuration
1864 | if storage_backend == 'sqlite_vec' and os.environ.get('MCP_MEMORY_USE_ONNX', '').lower() in ('1', 'true', 'yes'):
1865 | print_success("SQLite-vec + ONNX configuration is set up correctly")
1866 | print_info("This configuration can run without PyTorch dependency")
1867 |
1868 | try:
1869 | # Import the key components to verify installation
1870 | from mcp_memory_service.storage.sqlite_vec import SqliteVecMemoryStorage
1871 | from mcp_memory_service.models.memory import Memory
1872 | print_success("SQLite-vec + ONNX components loaded successfully")
1873 |
1874 | # Check paths
1875 | sqlite_path = os.environ.get('MCP_MEMORY_SQLITE_PATH', '')
1876 | if sqlite_path:
1877 | print_info(f"SQLite-vec database path: {sqlite_path}")
1878 | else:
1879 | print_warning("MCP_MEMORY_SQLITE_PATH is not set")
1880 |
1881 | backups_path = os.environ.get('MCP_MEMORY_BACKUPS_PATH', '')
1882 | if backups_path:
1883 | print_info(f"Backups path: {backups_path}")
1884 | else:
1885 | print_warning("MCP_MEMORY_BACKUPS_PATH is not set")
1886 |
1887 | except ImportError as e:
1888 | print_error(f"Failed to import SQLite-vec components: {e}")
1889 | return False
1890 |
1891 | # Check if MCP Memory Service package is installed correctly
1892 | try:
1893 | import mcp_memory_service
1894 | print_success(f"MCP Memory Service is installed correctly")
1895 | return True
1896 | except ImportError:
1897 | print_error("MCP Memory Service is not installed correctly")
1898 | return False
1899 |
1900 | def is_legacy_hardware(system_info):
1901 | """Detect legacy hardware that needs special handling."""
1902 | if system_info["is_macos"] and system_info["is_x86"]:
1903 | # Check if it's likely an older Intel Mac
1904 | # This is a heuristic based on common patterns
1905 | try:
1906 | # Try to get more detailed system info
1907 | print_info("Detecting hardware configuration (this may take a moment)...")
1908 | result = subprocess.run(
1909 | ['system_profiler', 'SPHardwareDataType'],
1910 | capture_output=True, text=True, timeout=30
1911 | )
1912 | if result.returncode == 0:
1913 | output = result.stdout.lower()
1914 | # Look for indicators of older hardware
1915 | if any(year in output for year in ['2013', '2014', '2015', '2016', '2017']):
1916 | return True
1917 | except (subprocess.SubprocessError, subprocess.TimeoutExpired):
1918 | pass
1919 |
1920 | return False
1921 |
1922 | def detect_memory_gb():
1923 | """Detect available system memory in GB."""
1924 | try:
1925 | import psutil
1926 | return psutil.virtual_memory().total / (1024**3)
1927 | except ImportError:
1928 | # Fallback detection methods
1929 | try:
1930 | if platform.system() == "Darwin": # macOS
1931 | result = subprocess.run(
1932 | ['sysctl', '-n', 'hw.memsize'],
1933 | capture_output=True, text=True
1934 | )
1935 | if result.returncode == 0:
1936 | return int(result.stdout.strip()) / (1024**3)
1937 | elif platform.system() == "Linux":
1938 | with open('/proc/meminfo', 'r') as f:
1939 | for line in f:
1940 | if line.startswith('MemTotal:'):
1941 | return int(line.split()[1]) / (1024**2)
1942 | except (subprocess.SubprocessError, FileNotFoundError, IOError):
1943 | pass
1944 |
1945 | return 0 # Unknown
1946 |
1947 | def recommend_backend_intelligent(system_info, gpu_info, memory_gb, args):
1948 | """Intelligent backend recommendation based on hardware analysis."""
1949 | # User explicitly chose backend
1950 | if hasattr(args, 'storage_backend') and args.storage_backend:
1951 | return args.storage_backend
1952 |
1953 | # Legacy hardware mode
1954 | if args.legacy_hardware or is_legacy_hardware(system_info):
1955 | print_info("[DETECT] Legacy hardware detected - optimizing for compatibility")
1956 | return "sqlite_vec"
1957 |
1958 | # Server mode
1959 | if args.server_mode:
1960 | print_info("[SERVER] Server mode - selecting lightweight backend")
1961 | return "sqlite_vec"
1962 |
1963 | # Low memory systems
1964 | if memory_gb > 0 and memory_gb < 4:
1965 | print_info(f"[MEMORY] Limited memory detected ({memory_gb:.1f}GB) - using efficient backend")
1966 | return "sqlite_vec"
1967 |
1968 | # macOS Intel with known ChromaDB issues
1969 | if system_info["is_macos"] and system_info["is_x86"]:
1970 | compatibility = detect_storage_backend_compatibility(system_info, gpu_info)
1971 | if compatibility["chromadb"]["recommendation"] == "problematic":
1972 | print_info("[WARNING] macOS Intel compatibility issues detected - using SQLite-vec")
1973 | # Set environment variables for consistent backend selection
1974 | os.environ['MCP_MEMORY_STORAGE_BACKEND'] = 'sqlite_vec'
1975 | # For Intel Macs, also enable ONNX runtime for better compatibility
1976 | if system_info.get("has_homebrew_pytorch") or sys.version_info >= (3, 13):
1977 | print_info("[CONFIG] Enabling ONNX runtime for better compatibility")
1978 | os.environ['MCP_MEMORY_USE_ONNX'] = '1'
1979 | return "sqlite_vec"
1980 |
1981 | # Hardware with GPU acceleration - SQLite-vec still recommended for simplicity
1982 | if gpu_info.get("has_cuda") or gpu_info.get("has_mps") or gpu_info.get("has_directml"):
1983 | gpu_type = "CUDA" if gpu_info.get("has_cuda") else "MPS" if gpu_info.get("has_mps") else "DirectML"
1984 | print_info(f"[GPU] {gpu_type} acceleration detected - SQLite-vec recommended for simplicity and speed")
1985 | return "sqlite_vec"
1986 |
1987 | # High memory systems without GPU - explain the choice
1988 | if memory_gb >= 16:
1989 | print_info("[CHOICE] High-memory system without GPU detected")
1990 | print_info(" -> SQLite-vec: Faster startup, simpler setup, no network dependencies")
1991 | print_info(" -> ChromaDB: Legacy option, being deprecated in v6.0.0")
1992 | print_info(" -> Defaulting to SQLite-vec (recommended for all users)")
1993 | return "sqlite_vec"
1994 |
1995 | # Default recommendation for most users
1996 | print_info("[DEFAULT] Recommending SQLite-vec for optimal user experience")
1997 | return "sqlite_vec"
1998 |
1999 | def show_detailed_help():
2000 | """Show detailed hardware-specific installation help."""
2001 | print_header("MCP Memory Service - Hardware-Specific Installation Guide")
2002 |
2003 | # Detect current system
2004 | system_info = detect_system()
2005 | gpu_info = detect_gpu()
2006 | memory_gb = detect_memory_gb()
2007 |
2008 | # Check SQLite extension support
2009 | extension_supported, extension_message = check_sqlite_extension_support()
2010 |
2011 | print_info("Your System Configuration:")
2012 | print_info(f" Platform: {platform.system()} {platform.release()}")
2013 | print_info(f" Architecture: {platform.machine()}")
2014 | print_info(f" Python: {sys.version_info.major}.{sys.version_info.minor}")
2015 | if memory_gb > 0:
2016 | print_info(f" Memory: {memory_gb:.1f}GB")
2017 | print_info(f" SQLite Extensions: {'✅ Supported' if extension_supported else '❌ Not Supported'}")
2018 |
2019 | # Warn about potential sqlite-vec issues
2020 | if not extension_supported and platform.system().lower() == "darwin":
2021 | print_warning("SQLite extension support not available - this may cause issues with sqlite-vec backend")
2022 | print_info("Consider using Homebrew Python: brew install python")
2023 |
2024 | # Hardware-specific recommendations
2025 | print_step("Recommendations", "Based on your hardware")
2026 |
2027 | if is_legacy_hardware(system_info):
2028 | print_success("Legacy Hardware Path (2013-2017 Intel Mac)")
2029 | print_info(" Recommended: python install.py --legacy-hardware")
2030 | print_info(" This will:")
2031 | print_info(" • Use SQLite-vec backend (avoids ChromaDB compatibility issues)")
2032 | print_info(" • Configure ONNX runtime for CPU-only inference")
2033 | print_info(" • Use Homebrew PyTorch for better compatibility")
2034 | print_info(" • Optimize resource usage for older hardware")
2035 | elif system_info["is_macos"] and system_info["is_arm"]:
2036 | print_success("Apple Silicon Mac - Modern Hardware Path")
2037 | print_info(" Recommended: python install.py")
2038 | print_info(" This will:")
2039 | print_info(" • Use SQLite-vec backend (fast and efficient)")
2040 | print_info(" • Enable MPS acceleration")
2041 | print_info(" • Zero network dependencies")
2042 | elif system_info["is_windows"] and gpu_info.get("has_cuda"):
2043 | print_success("Windows with CUDA GPU - High Performance Path")
2044 | print_info(" Recommended: python install.py")
2045 | print_info(" This will:")
2046 | print_info(" • Use SQLite-vec backend (fast and efficient)")
2047 | print_info(" • Enable CUDA acceleration")
2048 | print_info(" • Zero network dependencies")
2049 | elif memory_gb > 0 and memory_gb < 4:
2050 | print_success("Low-Memory System")
2051 | print_info(" Recommended: python install.py --storage-backend sqlite_vec")
2052 | print_info(" This will:")
2053 | print_info(" • Use lightweight SQLite-vec backend")
2054 | print_info(" • Minimize memory usage")
2055 | print_info(" • Enable ONNX runtime for efficiency")
2056 | elif memory_gb >= 16 and not (gpu_info.get("has_cuda") or gpu_info.get("has_mps") or gpu_info.get("has_directml")):
2057 | print_success("High-Memory System (No GPU) - Choose Your Path")
2058 | print_info(" Option 1 (Recommended): python install.py")
2059 | print_info(" • SQLite-vec: Fast startup, simple setup, same features")
2060 | print_info(" Option 2: python install.py --storage-backend chromadb")
2061 | print_info(" • ChromaDB: Better for 10K+ memories, production deployments")
2062 | print_info(" Most users benefit from SQLite-vec's simplicity")
2063 | elif gpu_info.get("has_cuda") or gpu_info.get("has_mps") or gpu_info.get("has_directml"):
2064 | gpu_type = "CUDA" if gpu_info.get("has_cuda") else "MPS" if gpu_info.get("has_mps") else "DirectML"
2065 | print_success(f"GPU-Accelerated System ({gpu_type}) - High Performance Path")
2066 | print_info(" Recommended: python install.py")
2067 | print_info(" This will:")
2068 | print_info(f" • Use SQLite-vec backend (fast and efficient)")
2069 | print_info(f" • Enable {gpu_type} hardware acceleration")
2070 | print_info(" • Zero network dependencies")
2071 | else:
2072 | print_success("Standard Installation")
2073 | print_info(" Recommended: python install.py")
2074 | print_info(" This will:")
2075 | print_info(" • Use SQLite-vec backend (optimal for most users)")
2076 | print_info(" • Fast startup and simple setup")
2077 | print_info(" • Full semantic search capabilities")
2078 |
2079 | print_step("Available Options", "Command-line flags you can use")
2080 | print_info(" --legacy-hardware : Optimize for 2013-2017 Intel Macs")
2081 | print_info(" --server-mode : Headless server installation")
2082 | print_info(" --storage-backend X : Force backend (chromadb/sqlite_vec)")
2083 | print_info(" --enable-http-api : Enable HTTP/SSE web interface")
2084 | print_info(" --use-homebrew-pytorch: Use existing Homebrew PyTorch")
2085 |
2086 | print_step("Documentation", "Hardware-specific guides")
2087 | print_info(" Legacy Mac Guide: docs/platforms/macos-intel-legacy.md")
2088 | print_info(" Backend Comparison: docs/guides/STORAGE_BACKENDS.md")
2089 | print_info(" Master Guide: docs/guides/INSTALLATION_MASTER.md")
2090 |
2091 | def generate_personalized_docs():
2092 | """Generate personalized setup documentation."""
2093 | print_header("Generating Personalized Setup Guide")
2094 |
2095 | # Detect system
2096 | system_info = detect_system()
2097 | gpu_info = detect_gpu()
2098 | memory_gb = detect_memory_gb()
2099 |
2100 | # Create personalized guide
2101 | guide_content = f"""# Your Personalized MCP Memory Service Setup Guide
2102 |
2103 | Generated on: {platform.node()} at {__import__('datetime').datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
2104 |
2105 | ## Your System Configuration
2106 |
2107 | - **Platform**: {platform.system()} {platform.release()}
2108 | - **Architecture**: {platform.machine()}
2109 | - **Python Version**: {sys.version_info.major}.{sys.version_info.minor}
2110 | - **Memory**: {memory_gb:.1f}GB (detected)
2111 | - **GPU**: {'Yes (' + ('CUDA' if gpu_info.get('has_cuda') else 'MPS' if gpu_info.get('has_mps') else 'DirectML' if gpu_info.get('has_directml') else 'Unknown') + ')' if gpu_info.get('has_cuda') or gpu_info.get('has_mps') or gpu_info.get('has_directml') else 'No'}
2112 |
2113 | ## Recommended Installation Command
2114 |
2115 | ```bash
2116 | """
2117 |
2118 | # Generate recommendation
2119 | class Args:
2120 | storage_backend = None
2121 | legacy_hardware = False
2122 | server_mode = False
2123 |
2124 | args = Args()
2125 | recommended_backend = recommend_backend_intelligent(system_info, gpu_info, memory_gb, args)
2126 |
2127 | if is_legacy_hardware(system_info):
2128 | guide_content += "python install.py --legacy-hardware\n"
2129 | elif memory_gb < 4:
2130 | guide_content += "python install.py --storage-backend sqlite_vec\n"
2131 | else:
2132 | guide_content += "python install.py\n"
2133 |
2134 | guide_content += f"""```
2135 |
2136 | ## Why This Configuration?
2137 |
2138 | Based on your {platform.system()} system with {memory_gb:.1f}GB RAM:
2139 | """
2140 |
2141 | if is_legacy_hardware(system_info):
2142 | guide_content += """
2143 | - [OK] **Hardware Compatibility**: SQLite-vec avoids ChromaDB installation issues on older Intel Macs
2144 | - [OK] **Homebrew PyTorch**: Better compatibility with older systems and reduced dependencies
2145 | - [OK] **ONNX Runtime**: CPU-optimized inference for systems without GPU acceleration
2146 | - [OK] **Memory Efficient**: Optimized resource usage for systems with limited RAM
2147 | - [OK] **Full Feature Set**: Complete semantic search, tagging, and time-based recall capabilities
2148 | """
2149 | elif recommended_backend == "sqlite_vec":
2150 | if memory_gb >= 16 and not (gpu_info.get("has_cuda") or gpu_info.get("has_mps") or gpu_info.get("has_directml")):
2151 | guide_content += """
2152 | - [OK] **Smart Choice**: SQLite-vec recommended for high-memory systems without GPU
2153 | - [OK] **No GPU Needed**: ChromaDB's advantages require GPU acceleration you don't have
2154 | - [OK] **Faster Startup**: Database ready in 2-3 seconds vs ChromaDB's 15-30 seconds
2155 | - [OK] **Simpler Setup**: Single-file database, no complex dependencies
2156 | - [OK] **Full Feature Set**: Complete semantic search, tagging, and time-based recall capabilities
2157 | - [INFO] **Alternative**: Use `--storage-backend chromadb` if you plan 10K+ memories
2158 | """
2159 | else:
2160 | guide_content += """
2161 | - [OK] **SQLite-vec Backend**: Lightweight with complete vector search capabilities
2162 | - [OK] **Low Memory Usage**: Optimized for systems with limited RAM
2163 | - [OK] **Quick Startup**: Database ready in seconds
2164 | - [OK] **Full Feature Set**: Semantic search, tagging, time-based recall
2165 | """
2166 | else:
2167 | guide_content += """
2168 | - [OK] **ChromaDB Backend**: Production-grade with advanced HNSW indexing and rich ecosystem
2169 | - [OK] **Hardware Acceleration**: Takes advantage of your GPU/MPS acceleration
2170 | - [OK] **Scalable Performance**: Optimized for large datasets (10K+ memories) and complex metadata queries
2171 | - [OK] **Full Feature Set**: Complete semantic search, tagging, and time-based recall capabilities
2172 | """
2173 |
2174 | guide_content += f"""
2175 | ## Next Steps
2176 |
2177 | 1. **Run the installation**:
2178 | ```bash
2179 | cd mcp-memory-service
2180 | {guide_content.split('```bash')[1].split('```')[0].strip()}
2181 | ```
2182 |
2183 | 2. **Test the installation**:
2184 | ```bash
2185 | python scripts/test_memory_simple.py
2186 | ```
2187 |
2188 | 3. **Configure Claude Desktop**:
2189 | The installer will generate the optimal configuration for your system.
2190 |
2191 | ## Troubleshooting
2192 |
2193 | If you encounter issues, see the platform-specific guide:
2194 | - **Legacy Mac Issues**: docs/platforms/macos-intel-legacy.md
2195 | - **General Issues**: docs/guides/troubleshooting.md
2196 | - **Backend Selection**: docs/guides/STORAGE_BACKENDS.md
2197 |
2198 | ## Support
2199 |
2200 | Generated configuration ID: {hash(str(system_info) + str(gpu_info))}-{int(__import__('time').time())}
2201 | Include this ID when requesting support for faster assistance.
2202 | """
2203 |
2204 | # Write the guide
2205 | guide_path = "YOUR_PERSONALIZED_SETUP_GUIDE.md"
2206 | with open(guide_path, 'w') as f:
2207 | f.write(guide_content)
2208 |
2209 | print_success(f"Personalized setup guide created: {guide_path}")
2210 | print_info("This guide contains hardware-specific recommendations for your system")
2211 | print_info("Keep this file for future reference and troubleshooting")
2212 |
2213 | def configure_claude_code_integration(system_info):
2214 | """Configure Claude Code MCP integration with optimized settings."""
2215 | print_step("6", "Configuring Claude Code Integration")
2216 |
2217 | # Check if Claude Code is installed
2218 | try:
2219 | result = subprocess.run(['claude', '--version'], capture_output=True, text=True, timeout=5)
2220 | if result.returncode != 0:
2221 | print_warning("Claude Code CLI not found. Please install it first:")
2222 | print_info("curl -fsSL https://claude.ai/install.sh | sh")
2223 | return False
2224 | except (subprocess.SubprocessError, FileNotFoundError, subprocess.TimeoutExpired):
2225 | print_warning("Claude Code CLI not found. Please install it first:")
2226 | print_info("curl -fsSL https://claude.ai/install.sh | sh")
2227 | return False
2228 |
2229 | print_success("Claude Code CLI detected")
2230 |
2231 | # Load template and create personalized .mcp.json
2232 | template_path = Path('.mcp.json.template')
2233 | if not template_path.exists():
2234 | print_error("Template file .mcp.json.template not found")
2235 | return False
2236 |
2237 | try:
2238 | import json
2239 | with open(template_path, 'r') as f:
2240 | template_content = f.read()
2241 |
2242 | # Replace placeholders with actual values
2243 | user_home = str(Path.home())
2244 | personalized_content = template_content.replace('{{USER_HOME}}', user_home)
2245 |
2246 | # Create .mcp.json
2247 | mcp_config_path = Path('.mcp.json')
2248 | with open(mcp_config_path, 'w') as f:
2249 | f.write(personalized_content)
2250 |
2251 | print_success(f"Created personalized .mcp.json configuration")
2252 | print_info(f"Configuration file: {mcp_config_path.absolute()}")
2253 |
2254 | # Add to .gitignore if it exists
2255 | gitignore_path = Path('.gitignore')
2256 | if gitignore_path.exists():
2257 | with open(gitignore_path, 'r') as f:
2258 | gitignore_content = f.read()
2259 |
2260 | if '.mcp.json' not in gitignore_content:
2261 | with open(gitignore_path, 'a') as f:
2262 | f.write('\n# MCP configuration (contains personal paths)\n.mcp.json\n')
2263 | print_success("Added .mcp.json to .gitignore")
2264 |
2265 | # Verify Claude Code can see the configuration
2266 | try:
2267 | result = subprocess.run(['claude', 'mcp', 'list'],
2268 | capture_output=True, text=True, timeout=10, cwd='.')
2269 | if 'memory-service' in result.stdout:
2270 | print_success("Claude Code MCP integration configured successfully!")
2271 | print_info("You can now use memory functions in Claude Code")
2272 | else:
2273 | print_warning("Configuration created but memory-service not detected")
2274 | print_info("You may need to restart Claude Code or check the configuration")
2275 | except (subprocess.SubprocessError, subprocess.TimeoutExpired):
2276 | print_warning("Could not verify Claude Code configuration")
2277 | print_info("Configuration file created - restart Claude Code to use memory functions")
2278 |
2279 | return True
2280 |
2281 | except Exception as e:
2282 | print_error(f"Failed to configure Claude Code integration: {e}")
2283 | return False
2284 |
2285 | def detect_mcp_clients():
2286 | """Detect installed MCP-compatible applications."""
2287 | clients = {}
2288 |
2289 | # Check for Claude Desktop
2290 | claude_config_paths = [
2291 | Path.home() / "AppData" / "Roaming" / "Claude" / "claude_desktop_config.json", # Windows
2292 | Path.home() / "Library" / "Application Support" / "Claude" / "claude_desktop_config.json", # macOS
2293 | Path.home() / ".config" / "Claude" / "claude_desktop_config.json" # Linux
2294 | ]
2295 | for path in claude_config_paths:
2296 | if path.exists():
2297 | clients['claude_desktop'] = path
2298 | break
2299 |
2300 | # Check for Claude Code CLI
2301 | try:
2302 | result = subprocess.run(['claude', '--version'], capture_output=True, text=True, timeout=5)
2303 | if result.returncode == 0:
2304 | clients['claude_code'] = True
2305 | except (subprocess.SubprocessError, FileNotFoundError, subprocess.TimeoutExpired):
2306 | pass
2307 |
2308 | # Check for VS Code with MCP extension
2309 | vscode_settings_paths = [
2310 | Path.home() / "AppData" / "Roaming" / "Code" / "User" / "settings.json", # Windows
2311 | Path.home() / "Library" / "Application Support" / "Code" / "User" / "settings.json", # macOS
2312 | Path.home() / ".config" / "Code" / "User" / "settings.json" # Linux
2313 | ]
2314 | for path in vscode_settings_paths:
2315 | if path.exists():
2316 | try:
2317 | import json
2318 | with open(path, 'r') as f:
2319 | settings = json.load(f)
2320 | # Check for MCP-related extensions or configurations
2321 | if any('mcp' in str(key).lower() or 'model-context-protocol' in str(key).lower()
2322 | for key in settings.keys()):
2323 | clients['vscode_mcp'] = path
2324 | break
2325 | except (json.JSONDecodeError, IOError):
2326 | pass
2327 |
2328 | # Check for Continue IDE
2329 | continue_paths = [
2330 | Path.home() / ".continue" / "config.json",
2331 | Path.home() / ".config" / "continue" / "config.json",
2332 | Path.home() / "AppData" / "Roaming" / "continue" / "config.json" # Windows
2333 | ]
2334 | for path in continue_paths:
2335 | if path.exists():
2336 | clients['continue'] = path
2337 | break
2338 |
2339 | # Check for generic MCP configurations
2340 | generic_mcp_paths = [
2341 | Path.home() / ".mcp.json",
2342 | Path.home() / ".config" / "mcp" / "config.json",
2343 | Path.cwd() / ".mcp.json"
2344 | ]
2345 | for path in generic_mcp_paths:
2346 | if path.exists():
2347 | clients['generic_mcp'] = path
2348 | break
2349 |
2350 | # Check for Cursor IDE (similar to VS Code)
2351 | cursor_settings_paths = [
2352 | Path.home() / "AppData" / "Roaming" / "Cursor" / "User" / "settings.json", # Windows
2353 | Path.home() / "Library" / "Application Support" / "Cursor" / "User" / "settings.json", # macOS
2354 | Path.home() / ".config" / "Cursor" / "User" / "settings.json" # Linux
2355 | ]
2356 | for path in cursor_settings_paths:
2357 | if path.exists():
2358 | try:
2359 | import json
2360 | with open(path, 'r') as f:
2361 | settings = json.load(f)
2362 | # Check for MCP-related configurations
2363 | if any('mcp' in str(key).lower() or 'model-context-protocol' in str(key).lower()
2364 | for key in settings.keys()):
2365 | clients['cursor'] = path
2366 | break
2367 | except (json.JSONDecodeError, IOError):
2368 | pass
2369 |
2370 | return clients
2371 |
2372 | def print_detected_clients(clients):
2373 | """Print information about detected MCP clients."""
2374 | if clients:
2375 | print_info("Detected MCP Clients:")
2376 | for client_type, config_path in clients.items():
2377 | client_names = {
2378 | 'claude_desktop': 'Claude Desktop',
2379 | 'claude_code': 'Claude Code CLI',
2380 | 'vscode_mcp': 'VS Code with MCP',
2381 | 'continue': 'Continue IDE',
2382 | 'cursor': 'Cursor IDE',
2383 | 'generic_mcp': 'Generic MCP Client'
2384 | }
2385 | client_name = client_names.get(client_type, client_type.title())
2386 | config_info = config_path if isinstance(config_path, (str, Path)) else "CLI detected"
2387 | print_info(f" [*] {client_name}: {config_info}")
2388 | else:
2389 | print_info("No MCP clients detected - configuration will work with any future MCP client")
2390 |
2391 | def should_offer_multi_client_setup(args, final_backend):
2392 | """Determine if multi-client setup should be offered."""
2393 | # Only offer if using SQLite-vec backend (requirement for multi-client)
2394 | if final_backend != "sqlite_vec":
2395 | return False
2396 |
2397 | # Don't offer in pure server mode
2398 | if args.server_mode:
2399 | return False
2400 |
2401 | # Skip if user explicitly requested to skip
2402 | if args.skip_multi_client_prompt:
2403 | return False
2404 |
2405 | # Always beneficial for development environments - any future MCP client can benefit
2406 | return True
2407 |
2408 | def configure_detected_clients(clients, system_info, storage_backend="sqlite_vec"):
2409 | """Configure each detected client for multi-client access."""
2410 | success_count = 0
2411 |
2412 | for client_type, config_path in clients.items():
2413 | try:
2414 | if client_type == 'claude_desktop':
2415 | if configure_claude_desktop_multi_client(config_path, system_info, storage_backend):
2416 | success_count += 1
2417 | elif client_type == 'vscode_mcp' or client_type == 'cursor':
2418 | if configure_vscode_like_multi_client(config_path, client_type, storage_backend):
2419 | success_count += 1
2420 | elif client_type == 'continue':
2421 | if configure_continue_multi_client(config_path, storage_backend):
2422 | success_count += 1
2423 | elif client_type == 'generic_mcp':
2424 | if configure_generic_mcp_multi_client(config_path, storage_backend):
2425 | success_count += 1
2426 | elif client_type == 'claude_code':
2427 | # Claude Code uses project-level .mcp.json, handle separately
2428 | print_info(f" -> Claude Code: Configure via project .mcp.json (see instructions below)")
2429 | success_count += 1
2430 | except Exception as e:
2431 | print_warning(f" -> Failed to configure {client_type}: {e}")
2432 |
2433 | return success_count
2434 |
2435 | def configure_claude_desktop_multi_client(config_path, system_info, storage_backend="sqlite_vec"):
2436 | """Configure Claude Desktop for multi-client access."""
2437 | try:
2438 | import json
2439 |
2440 | # Read existing configuration
2441 | with open(config_path, 'r') as f:
2442 | config = json.load(f)
2443 |
2444 | # Ensure mcpServers section exists
2445 | if 'mcpServers' not in config:
2446 | config['mcpServers'] = {}
2447 |
2448 | # Update memory server configuration with multi-client settings
2449 | repo_path = str(Path.cwd()).replace('\\', '\\\\') # Escape backslashes for JSON
2450 | config['mcpServers']['memory'] = build_mcp_server_config(storage_backend, repo_path)
2451 |
2452 | # Write updated configuration
2453 | with open(config_path, 'w') as f:
2454 | json.dump(config, f, indent=2)
2455 |
2456 | print_info(f" [OK] Claude Desktop: Updated configuration for multi-client access")
2457 | return True
2458 |
2459 | except Exception as e:
2460 | print_warning(f" -> Claude Desktop configuration failed: {e}")
2461 | return False
2462 |
2463 | def configure_vscode_like_multi_client(config_path, client_type, storage_backend="sqlite_vec"):
2464 | """Configure VS Code or Cursor for multi-client access."""
2465 | try:
2466 | import json
2467 |
2468 | # For VS Code/Cursor, we provide instructions rather than modifying settings directly
2469 | # since MCP configuration varies by extension
2470 |
2471 | client_name = "VS Code" if client_type == 'vscode_mcp' else "Cursor"
2472 | print_info(f" -> {client_name}: MCP extension detected")
2473 | print_info(f" Add memory server to your MCP extension with these settings:")
2474 | print_info(f" - Backend: {storage_backend}")
2475 | if storage_backend == "sqlite_vec":
2476 | print_info(f" - Database: shared SQLite-vec database")
2477 | else:
2478 | print_info(f" - Database: shared ChromaDB database")
2479 | print_info(f" - See generic configuration below for details")
2480 | return True
2481 |
2482 | except Exception as e:
2483 | print_warning(f" -> {client_type} configuration failed: {e}")
2484 | return False
2485 |
2486 | def configure_continue_multi_client(config_path, storage_backend="sqlite_vec"):
2487 | """Configure Continue IDE for multi-client access."""
2488 | try:
2489 | import json
2490 |
2491 | # Read existing Continue configuration
2492 | with open(config_path, 'r') as f:
2493 | config = json.load(f)
2494 |
2495 | # Add or update MCP server configuration for Continue
2496 | if 'mcpServers' not in config:
2497 | config['mcpServers'] = {}
2498 |
2499 | config['mcpServers']['memory'] = build_mcp_server_config(storage_backend)
2500 |
2501 | # Write updated configuration
2502 | with open(config_path, 'w') as f:
2503 | json.dump(config, f, indent=2)
2504 |
2505 | print_info(f" [OK] Continue IDE: Updated configuration for multi-client access")
2506 | return True
2507 |
2508 | except Exception as e:
2509 | print_warning(f" -> Continue IDE configuration failed: {e}")
2510 | return False
2511 |
2512 | def configure_generic_mcp_multi_client(config_path, storage_backend="sqlite_vec"):
2513 | """Configure generic MCP client for multi-client access."""
2514 | try:
2515 | import json
2516 |
2517 | # Read existing configuration
2518 | with open(config_path, 'r') as f:
2519 | config = json.load(f)
2520 |
2521 | # Add memory server if not present
2522 | if 'mcpServers' not in config:
2523 | config['mcpServers'] = {}
2524 |
2525 | config['mcpServers']['memory'] = build_mcp_server_config(storage_backend)
2526 |
2527 | # Write updated configuration
2528 | with open(config_path, 'w') as f:
2529 | json.dump(config, f, indent=2)
2530 |
2531 | print_info(f" [OK] Generic MCP Client: Updated configuration")
2532 | return True
2533 |
2534 | except Exception as e:
2535 | print_warning(f" -> Generic MCP client configuration failed: {e}")
2536 | return False
2537 |
2538 | async def test_wal_mode_coordination():
2539 | """Test WAL mode storage coordination for multi-client access."""
2540 | try:
2541 | # Add src to path for import
2542 | sys.path.insert(0, str(Path(__file__).parent / "src"))
2543 |
2544 | from mcp_memory_service.storage.sqlite_vec import SqliteVecMemoryStorage
2545 | from mcp_memory_service.models.memory import Memory
2546 | from mcp_memory_service.utils.hashing import generate_content_hash
2547 |
2548 | import tempfile
2549 | import asyncio
2550 |
2551 | # Create a temporary database for testing
2552 | with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp:
2553 | test_db_path = tmp.name
2554 |
2555 | try:
2556 | # Test direct SQLite-vec storage with WAL mode
2557 | print_info(" -> Testing WAL mode coordination...")
2558 | storage = SqliteVecMemoryStorage(test_db_path)
2559 | await storage.initialize()
2560 |
2561 | # Test storing a memory
2562 | content = "Multi-client setup test - WAL mode verification"
2563 | test_memory = Memory(
2564 | content=content,
2565 | content_hash=generate_content_hash(content),
2566 | tags=["setup", "wal-test", "multi-client"],
2567 | memory_type="test"
2568 | )
2569 |
2570 | # Store memory
2571 | success, message = await storage.store(test_memory)
2572 | if not success:
2573 | print_warning(f" -> Memory storage failed: {message}")
2574 | return False
2575 |
2576 | # Test concurrent access simulation
2577 | storage2 = SqliteVecMemoryStorage(test_db_path)
2578 | await storage2.initialize()
2579 |
2580 | # Both should be able to read
2581 | results1 = await storage.search_by_tag(["setup"])
2582 | results2 = await storage2.search_by_tag(["setup"])
2583 |
2584 | if len(results1) != len(results2) or len(results1) == 0:
2585 | print_warning(" -> Concurrent read access test failed")
2586 | return False
2587 |
2588 | # Test concurrent write
2589 | content2 = "Second client test memory"
2590 | memory2 = Memory(
2591 | content=content2,
2592 | content_hash=generate_content_hash(content2),
2593 | tags=["setup", "client2"],
2594 | memory_type="test"
2595 | )
2596 |
2597 | success2, _ = await storage2.store(memory2)
2598 | if not success2:
2599 | print_warning(" -> Concurrent write access test failed")
2600 | return False
2601 |
2602 | # Verify both clients can see both memories
2603 | all_results = await storage.search_by_tag(["setup"])
2604 | if len(all_results) < 2:
2605 | print_warning(" -> Multi-client data sharing test failed")
2606 | return False
2607 |
2608 | storage.close()
2609 | storage2.close()
2610 |
2611 | print_info(" [OK] WAL mode coordination test passed")
2612 | return True
2613 |
2614 | finally:
2615 | # Cleanup test files
2616 | try:
2617 | os.unlink(test_db_path)
2618 | for ext in ["-wal", "-shm"]:
2619 | try:
2620 | os.unlink(test_db_path + ext)
2621 | except:
2622 | pass
2623 | except:
2624 | pass
2625 |
2626 | except Exception as e:
2627 | print_warning(f" -> WAL mode test failed: {e}")
2628 | return False
2629 |
2630 | def setup_shared_environment():
2631 | """Set up shared environment variables for multi-client access."""
2632 | try:
2633 | print_info(" -> Configuring shared environment variables...")
2634 |
2635 | # Set environment variables in current process (for testing)
2636 | os.environ['MCP_MEMORY_STORAGE_BACKEND'] = 'sqlite_vec'
2637 | os.environ['MCP_MEMORY_SQLITE_PRAGMAS'] = 'busy_timeout=15000,cache_size=20000'
2638 | os.environ['LOG_LEVEL'] = 'INFO'
2639 |
2640 | print_info(" [OK] Environment variables configured")
2641 |
2642 | # Provide instructions for permanent setup
2643 | system_info = detect_system()
2644 | if system_info["is_windows"]:
2645 | print_info(" -> For permanent setup, run these PowerShell commands as Administrator:")
2646 | print_info(" [System.Environment]::SetEnvironmentVariable('MCP_MEMORY_STORAGE_BACKEND', 'sqlite_vec', [System.EnvironmentVariableTarget]::User)")
2647 | print_info(" [System.Environment]::SetEnvironmentVariable('MCP_MEMORY_SQLITE_PRAGMAS', 'busy_timeout=15000,cache_size=20000', [System.EnvironmentVariableTarget]::User)")
2648 | print_info(" [System.Environment]::SetEnvironmentVariable('LOG_LEVEL', 'INFO', [System.EnvironmentVariableTarget]::User)")
2649 | else:
2650 | print_info(" -> For permanent setup, add to your shell profile:")
2651 | print_info(" export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec")
2652 | print_info(" export MCP_MEMORY_SQLITE_PRAGMAS='busy_timeout=15000,cache_size=20000'")
2653 | print_info(" export LOG_LEVEL=INFO")
2654 |
2655 | return True
2656 |
2657 | except Exception as e:
2658 | print_warning(f" -> Environment setup failed: {e}")
2659 | return False
2660 |
2661 | def provide_generic_configuration(storage_backend="sqlite_vec"):
2662 | """Provide configuration instructions for any MCP client."""
2663 | print_info("")
2664 | print_info("Universal MCP Client Configuration:")
2665 | print_info("=" * 50)
2666 | print_info("For any MCP-compatible client, use these settings:")
2667 | print_info("")
2668 | print_info("MCP Server Configuration:")
2669 |
2670 | repo_path = str(Path.cwd())
2671 |
2672 | # Windows-style path
2673 | uv_cmd = UV_EXECUTABLE_PATH or "uv"
2674 | if platform.system() == 'Windows':
2675 | print_info(f" Command: {uv_cmd} --directory \"{repo_path}\" run memory")
2676 | print_info(f" Alternative: python -m mcp_memory_service.server")
2677 | else:
2678 | print_info(f" Command: {uv_cmd} --directory {repo_path} run memory")
2679 | print_info(f" Alternative: python -m mcp_memory_service.server")
2680 |
2681 | print_info("")
2682 | print_info("Environment Variables:")
2683 | print_info(f" MCP_MEMORY_STORAGE_BACKEND={storage_backend}")
2684 | if storage_backend == "sqlite_vec":
2685 | print_info(" MCP_MEMORY_SQLITE_PRAGMAS=busy_timeout=15000,cache_size=20000")
2686 | print_info(" LOG_LEVEL=INFO")
2687 |
2688 | print_info("")
2689 | print_info("Shared Database Location:")
2690 | if storage_backend == "sqlite_vec":
2691 | if platform.system() == 'Windows':
2692 | print_info(" %LOCALAPPDATA%\\mcp-memory\\sqlite_vec.db")
2693 | elif platform.system() == 'Darwin':
2694 | print_info(" ~/Library/Application Support/mcp-memory/sqlite_vec.db")
2695 | else:
2696 | print_info(" ~/.local/share/mcp-memory/sqlite_vec.db")
2697 | else: # chromadb
2698 | if platform.system() == 'Windows':
2699 | print_info(" %LOCALAPPDATA%\\mcp-memory\\chroma_db")
2700 | elif platform.system() == 'Darwin':
2701 | print_info(" ~/Library/Application Support/mcp-memory/chroma_db")
2702 | else:
2703 | print_info(" ~/.local/share/mcp-memory/chroma_db")
2704 |
2705 | print_info("")
2706 | print_info("Claude Code Project Configuration (.mcp.json):")
2707 | print_info(" {")
2708 | print_info(" \"mcpServers\": {")
2709 | print_info(" \"memory\": {")
2710 | print_info(f" \"command\": \"{UV_EXECUTABLE_PATH or 'uv'}\",")
2711 | print_info(f" \"args\": [\"--directory\", \"{repo_path}\", \"run\", \"memory\"],")
2712 | print_info(" \"env\": {")
2713 | print_info(f" \"MCP_MEMORY_STORAGE_BACKEND\": \"{storage_backend}\",")
2714 | if storage_backend == "sqlite_vec":
2715 | print_info(" \"MCP_MEMORY_SQLITE_PRAGMAS\": \"busy_timeout=15000,cache_size=20000\",")
2716 | print_info(" \"LOG_LEVEL\": \"INFO\"")
2717 | print_info(" }")
2718 | print_info(" }")
2719 | print_info(" }")
2720 | print_info(" }")
2721 |
2722 | def setup_universal_multi_client_access(system_info, args, storage_backend="sqlite_vec"):
2723 | """Configure multi-client access for any MCP-compatible clients."""
2724 | print_step("7", "Configuring Universal Multi-Client Access")
2725 |
2726 | print_info("Setting up multi-client coordination for all MCP applications...")
2727 | print_info("Benefits:")
2728 | print_info(" • Share memories between Claude Desktop, VS Code, Continue, and other MCP clients")
2729 | print_info(" • Seamless context sharing across development environments")
2730 | print_info(" • Single source of truth for all your project memories")
2731 | print_info("")
2732 |
2733 | # Test WAL mode coordination only for sqlite_vec
2734 | if storage_backend == "sqlite_vec":
2735 | try:
2736 | import asyncio
2737 | wal_success = asyncio.run(test_wal_mode_coordination())
2738 | if not wal_success:
2739 | print_error("WAL mode coordination test failed")
2740 | return False
2741 | except Exception as e:
2742 | print_error(f"Failed to test WAL mode coordination: {e}")
2743 | return False
2744 |
2745 | # Detect available MCP clients
2746 | detected_clients = detect_mcp_clients()
2747 | print_detected_clients(detected_clients)
2748 | print_info("")
2749 |
2750 | # Configure each detected client
2751 | print_info("Configuring detected clients...")
2752 | success_count = configure_detected_clients(detected_clients, system_info, storage_backend)
2753 |
2754 | # Set up shared environment variables
2755 | setup_shared_environment()
2756 |
2757 | # Provide generic configuration for manual setup
2758 | provide_generic_configuration(storage_backend)
2759 |
2760 | print_info("")
2761 | print_success(f"Multi-client setup complete! {success_count} clients configured automatically.")
2762 | print_info("")
2763 | print_info("Next Steps:")
2764 | print_info(" 1. Restart your applications (Claude Desktop, VS Code, etc.)")
2765 | print_info(" 2. All clients will share the same memory database")
2766 | print_info(" 3. Test: Store memory in one app, access from another")
2767 | print_info(" 4. For Claude Code: Create .mcp.json in your project directory")
2768 |
2769 | return True
2770 |
2771 | def _parse_arguments():
2772 | """Parse command-line arguments."""
2773 | parser = argparse.ArgumentParser(description="Install MCP Memory Service")
2774 | parser.add_argument('--dev', action='store_true', help='Install in development mode')
2775 | parser.add_argument('--chroma-path', type=str, help='Path to ChromaDB storage')
2776 | parser.add_argument('--backups-path', type=str, help='Path to backups storage')
2777 | parser.add_argument('--force-compatible-deps', action='store_true',
2778 | help='Force compatible versions of PyTorch (2.0.1) and sentence-transformers (2.2.2)')
2779 | parser.add_argument('--fallback-deps', action='store_true',
2780 | help='Use fallback versions of PyTorch (1.13.1) and sentence-transformers (2.2.2)')
2781 | parser.add_argument('--storage-backend', choices=['cloudflare', 'sqlite_vec', 'hybrid', 'auto_detect'],
2782 | help='Choose storage backend: cloudflare (production cloud), sqlite_vec (local development), hybrid (production + local sync), or auto_detect')
2783 | parser.add_argument('--skip-pytorch', action='store_true',
2784 | help='Skip PyTorch installation and use ONNX runtime with SQLite-vec backend instead')
2785 | parser.add_argument('--use-homebrew-pytorch', action='store_true',
2786 | help='Use existing Homebrew PyTorch installation instead of pip version')
2787 | parser.add_argument('--force-pytorch', action='store_true',
2788 | help='Force PyTorch installation even when using SQLite-vec (overrides auto-skip)')
2789 |
2790 | # New intelligent installer options
2791 | parser.add_argument('--legacy-hardware', action='store_true',
2792 | help='Optimize installation for legacy hardware (2013-2017 Intel Macs)')
2793 | parser.add_argument('--server-mode', action='store_true',
2794 | help='Install for server/headless deployment (minimal UI dependencies)')
2795 | parser.add_argument('--enable-http-api', action='store_true',
2796 | help='Enable HTTP/SSE API functionality')
2797 | parser.add_argument('--migrate-from-chromadb', action='store_true',
2798 | help='Migrate existing ChromaDB installation to selected backend')
2799 | parser.add_argument('--configure-claude-code', action='store_true',
2800 | help='Automatically configure Claude Code MCP integration with optimized settings')
2801 | parser.add_argument('--help-detailed', action='store_true',
2802 | help='Show detailed hardware-specific installation recommendations')
2803 | parser.add_argument('--generate-docs', action='store_true',
2804 | help='Generate personalized setup documentation for your hardware')
2805 | parser.add_argument('--setup-multi-client', action='store_true',
2806 | help='Configure multi-client access for any MCP-compatible applications (Claude, VS Code, Continue, etc.)')
2807 | parser.add_argument('--skip-multi-client-prompt', action='store_true',
2808 | help='Skip the interactive prompt for multi-client setup')
2809 | parser.add_argument('--install-claude-commands', action='store_true',
2810 | help='Install Claude Code commands for memory operations')
2811 | parser.add_argument('--skip-claude-commands-prompt', action='store_true',
2812 | help='Skip the interactive prompt for Claude Code commands')
2813 | parser.add_argument('--non-interactive', action='store_true',
2814 | help='Run in non-interactive mode using default values for all prompts')
2815 |
2816 | return parser.parse_args()
2817 |
2818 | def _handle_special_modes(args):
2819 | """Handle special help and documentation modes that exit early."""
2820 | if args.help_detailed:
2821 | show_detailed_help()
2822 | sys.exit(0)
2823 |
2824 | if args.generate_docs:
2825 | generate_personalized_docs()
2826 | sys.exit(0)
2827 |
2828 | def _detect_system_and_environment(args):
2829 | """Detect system configuration and return system info dict."""
2830 | print_header("MCP Memory Service Installation")
2831 |
2832 | print_step("1", "Detecting system")
2833 | system_info = detect_system()
2834 | gpu_info = detect_gpu()
2835 | memory_gb = detect_memory_gb()
2836 |
2837 | if memory_gb > 0:
2838 | print_info(f"System memory: {memory_gb:.1f}GB")
2839 |
2840 | return system_info, gpu_info, memory_gb
2841 |
2842 | def _recommend_backend(args, system_info, gpu_info, memory_gb):
2843 | """Recommend and set storage backend based on system configuration."""
2844 | if not args.storage_backend:
2845 | recommended_backend = recommend_backend_intelligent(system_info, gpu_info, memory_gb, args)
2846 | args.storage_backend = recommended_backend
2847 | print_info(f"Recommended backend: {recommended_backend}")
2848 |
2849 | def _configure_legacy_hardware(args, system_info):
2850 | """Configure installation for legacy hardware."""
2851 | if args.legacy_hardware or is_legacy_hardware(system_info):
2852 | print_step("1a", "Legacy Hardware Optimization")
2853 | args.storage_backend = "sqlite_vec"
2854 | args.use_homebrew_pytorch = True
2855 | print_success("Configuring for legacy hardware compatibility")
2856 | print_info("• SQLite-vec backend selected")
2857 | print_info("• Homebrew PyTorch integration enabled")
2858 | print_info("• ONNX runtime will be configured")
2859 |
2860 | def _configure_server_mode(args):
2861 | """Configure installation for server mode."""
2862 | if args.server_mode:
2863 | print_step("1b", "Server Mode Configuration")
2864 | args.storage_backend = "sqlite_vec"
2865 | print_success("Configuring for server deployment")
2866 | print_info("• Lightweight SQLite-vec backend")
2867 | print_info("• Minimal UI dependencies")
2868 |
2869 | def _configure_http_api(args):
2870 | """Configure HTTP/SSE API settings."""
2871 | if args.enable_http_api:
2872 | print_step("1c", "HTTP/SSE API Configuration")
2873 | if args.storage_backend == "chromadb":
2874 | print_warning("HTTP/SSE API works best with SQLite-vec backend")
2875 | if args.non_interactive:
2876 | print_info("Non-interactive mode: switching to SQLite-vec for HTTP API compatibility")
2877 | args.storage_backend = "sqlite_vec"
2878 | else:
2879 | response = prompt_user_input("Switch to SQLite-vec for optimal HTTP API experience? (y/N, press Enter for N): ", "")
2880 | if response.lower().startswith('y'):
2881 | args.storage_backend = "sqlite_vec"
2882 |
2883 | def _setup_chromadb_migration(args):
2884 | """Set up ChromaDB migration if requested."""
2885 | if not args.migrate_from_chromadb:
2886 | return
2887 |
2888 | print_step("1d", "Migration Setup")
2889 | print_info("Preparing to migrate from existing ChromaDB installation")
2890 |
2891 | chromadb_paths = [
2892 | os.path.expanduser("~/.mcp_memory_chroma"),
2893 | os.path.expanduser("~/Library/Application Support/mcp-memory/chroma_db"),
2894 | os.path.expanduser("~/.local/share/mcp-memory/chroma_db")
2895 | ]
2896 |
2897 | chromadb_found = None
2898 | for path in chromadb_paths:
2899 | if os.path.exists(path):
2900 | chromadb_found = path
2901 | break
2902 |
2903 | if chromadb_found:
2904 | print_success(f"Found ChromaDB data at: {chromadb_found}")
2905 | args.storage_backend = "sqlite_vec"
2906 | args.chromadb_found = chromadb_found
2907 | print_info("Migration will run after installation completes")
2908 | else:
2909 | print_warning("No ChromaDB data found at standard locations")
2910 | if args.non_interactive:
2911 | print_info("Non-interactive mode: skipping ChromaDB migration")
2912 | args.migrate_from_chromadb = False
2913 | else:
2914 | manual_path = prompt_user_input("Enter ChromaDB path manually (or press Enter to skip): ", "")
2915 | if manual_path and os.path.exists(manual_path):
2916 | args.storage_backend = "sqlite_vec"
2917 | args.chromadb_found = manual_path
2918 | else:
2919 | print_info("Skipping migration - no valid ChromaDB path provided")
2920 | args.migrate_from_chromadb = False
2921 |
2922 | def _install_compatible_dependencies(args, system_info):
2923 | """Install compatible PyTorch/transformers versions for macOS Intel."""
2924 | if not args.force_compatible_deps:
2925 | return
2926 |
2927 | if not (system_info["is_macos"] and system_info["is_x86"]):
2928 | print_warning("--force-compatible-deps is only applicable for macOS with Intel CPUs")
2929 | return
2930 |
2931 | print_info("Installing compatible dependencies as requested...")
2932 | python_version = sys.version_info
2933 |
2934 | if python_version >= (3, 13):
2935 | torch_version, torch_vision_version = "2.3.0", "0.18.0"
2936 | torch_audio_version, st_version = "2.3.0", "3.0.0"
2937 | else:
2938 | torch_version, torch_vision_version = "2.0.1", "2.0.1"
2939 | torch_audio_version, st_version = "2.0.1", "2.2.2"
2940 |
2941 | try:
2942 | subprocess.check_call([
2943 | sys.executable, '-m', 'pip', 'install',
2944 | f"torch=={torch_version}", f"torchvision=={torch_vision_version}",
2945 | f"torchaudio=={torch_audio_version}", f"sentence-transformers=={st_version}"
2946 | ])
2947 | print_success("Compatible dependencies installed successfully")
2948 | except subprocess.SubprocessError as e:
2949 | print_error(f"Failed to install compatible dependencies: {e}")
2950 |
2951 | def _install_fallback_dependencies(args):
2952 | """Install fallback PyTorch/transformers versions for troubleshooting."""
2953 | if not args.fallback_deps:
2954 | return
2955 |
2956 | print_info("Installing fallback dependencies as requested...")
2957 | python_version = sys.version_info
2958 |
2959 | if python_version >= (3, 13):
2960 | torch_version, torch_vision_version = "2.3.0", "0.18.0"
2961 | torch_audio_version, st_version = "2.3.0", "3.0.0"
2962 | else:
2963 | torch_version, torch_vision_version = "1.13.1", "0.14.1"
2964 | torch_audio_version, st_version = "0.13.1", "2.2.2"
2965 |
2966 | try:
2967 | subprocess.check_call([
2968 | sys.executable, '-m', 'pip', 'install',
2969 | f"torch=={torch_version}", f"torchvision=={torch_vision_version}",
2970 | f"torchaudio=={torch_audio_version}", f"sentence-transformers=={st_version}"
2971 | ])
2972 | print_success("Fallback dependencies installed successfully")
2973 | except subprocess.SubprocessError as e:
2974 | print_error(f"Failed to install fallback dependencies: {e}")
2975 |
2976 | def _optimize_pytorch_for_backend(args):
2977 | """Auto-skip PyTorch for sqlite_vec backend."""
2978 | if (args.storage_backend == "sqlite_vec" and
2979 | not args.skip_pytorch and
2980 | not args.force_pytorch):
2981 | print_step("1d", "Optimizing for SQLite-vec setup")
2982 | args.skip_pytorch = True
2983 | print_success("Auto-skipping PyTorch installation for SQLite-vec backend")
2984 | print_info("• SQLite-vec uses SQLite for vector storage (lighter than ChromaDB)")
2985 | print_info("• Note: Embedding models still require PyTorch/SentenceTransformers")
2986 | print_info("• Add --force-pytorch if you want PyTorch installed here")
2987 | print_warning("• You'll need PyTorch available for embedding functionality")
2988 |
2989 | def _setup_logging_and_detect_system(args):
2990 | """Initialize logging and detect system configuration."""
2991 | try:
2992 | log_file_path = setup_installer_logging()
2993 | except Exception as e:
2994 | print(f"Warning: Could not set up logging: {e}")
2995 | log_file_path = None
2996 |
2997 | system_info, gpu_info, memory_gb = _detect_system_and_environment(args)
2998 | _recommend_backend(args, system_info, gpu_info, memory_gb)
2999 | _configure_legacy_hardware(args, system_info)
3000 | _configure_server_mode(args)
3001 | _configure_http_api(args)
3002 | _setup_chromadb_migration(args)
3003 | _install_compatible_dependencies(args, system_info)
3004 | _install_fallback_dependencies(args)
3005 | _optimize_pytorch_for_backend(args)
3006 |
3007 | return log_file_path, system_info
3008 |
3009 | def _execute_core_installation(args, system_info):
3010 | """Execute the core installation steps (dependencies, package, paths, verification)."""
3011 | if not check_dependencies():
3012 | sys.exit(1)
3013 |
3014 | if not install_package(args):
3015 | if system_info["is_macos"] and system_info["is_x86"]:
3016 | print_warning("Installation failed on macOS Intel.")
3017 | print_info("Try running the script with '--force-compatible-deps' to force compatible versions:")
3018 | print_info("python install.py --force-compatible-deps")
3019 | sys.exit(1)
3020 |
3021 | if not configure_paths(args):
3022 | print_warning("Path configuration failed, but installation may still work")
3023 |
3024 | _verify_installation_with_suggestions(system_info)
3025 |
3026 | def _verify_installation_with_suggestions(system_info):
3027 | """Verify installation and provide platform-specific troubleshooting suggestions."""
3028 | if not verify_installation():
3029 | print_warning("Installation verification failed, but installation may still work")
3030 | if system_info["is_macos"] and system_info["is_x86"]:
3031 | python_version = sys.version_info
3032 | print_info("For macOS Intel compatibility issues, try these steps:")
3033 | print_info("1. First uninstall current packages: pip uninstall -y torch torchvision torchaudio sentence-transformers")
3034 | print_info("2. Then reinstall with compatible versions: python install.py --force-compatible-deps")
3035 |
3036 | if python_version >= (3, 13):
3037 | print_info("For Python 3.13+, you may need to manually install the following:")
3038 | print_info("pip install torch==2.3.0 torchvision==0.18.0 torchaudio==2.3.0")
3039 | print_info("pip install sentence-transformers==3.0.0")
3040 |
3041 | def _execute_chromadb_migration(args):
3042 | """Execute ChromaDB migration if requested."""
3043 | if not (args.migrate_from_chromadb and hasattr(args, 'chromadb_found') and args.chromadb_found):
3044 | return
3045 |
3046 | print_step("6", "Migrating from ChromaDB")
3047 | try:
3048 | migration_script = "scripts/migrate_chroma_to_sqlite.py"
3049 | if os.path.exists(migration_script):
3050 | print_info("Running migration script...")
3051 | subprocess.check_call([sys.executable, migration_script, "--auto-confirm"])
3052 | print_success("Migration completed successfully!")
3053 | else:
3054 | print_warning("Migration script not found - manual migration required")
3055 | print_info("Run: python scripts/migrate_chroma_to_sqlite.py")
3056 | except subprocess.SubprocessError as e:
3057 | print_error(f"Migration failed: {e}")
3058 | print_info("You can run migration manually later with:")
3059 | print_info("python scripts/migrate_chroma_to_sqlite.py")
3060 |
3061 | def _configure_claude_code_if_requested(args, system_info):
3062 | """Configure Claude Code integration if requested."""
3063 | if args.configure_claude_code:
3064 | if not configure_claude_code_integration(system_info):
3065 | print_warning("Claude Code integration configuration failed")
3066 | print_info("You can configure it manually later using the documentation")
3067 |
3068 | def _handle_claude_code_commands(args):
3069 | """Handle Claude Code commands installation."""
3070 | should_install_commands = args.install_claude_commands
3071 |
3072 | if not should_install_commands and not args.skip_claude_commands_prompt:
3073 | if install_claude_commands is not None and check_claude_code_cli is not None:
3074 | claude_available, _ = check_claude_code_cli()
3075 | if claude_available:
3076 | should_install_commands = _prompt_for_claude_commands(args)
3077 |
3078 | if should_install_commands and install_claude_commands is not None:
3079 | _install_claude_commands_internal()
3080 |
3081 | def _prompt_for_claude_commands(args):
3082 | """Prompt user for Claude Code commands installation."""
3083 | print_step("7", "Optional Claude Code Commands")
3084 | print_info("Claude Code CLI detected! You can install memory operation commands.")
3085 | print_info("Commands would include: /memory-store, /memory-recall, /memory-search, /memory-health")
3086 |
3087 | if args.non_interactive:
3088 | print_info("Non-interactive mode: skipping Claude Code commands installation")
3089 | return False
3090 |
3091 | print("\n" + "=" * 60)
3092 | print("⚠️ USER INPUT REQUIRED")
3093 | print("=" * 60)
3094 | response = input("Install Claude Code memory commands? (y/N, press Enter for N): ")
3095 | print("=" * 60 + "\n")
3096 | return response.lower().startswith('y')
3097 |
3098 | def _install_claude_commands_internal():
3099 | """Install Claude Code commands."""
3100 | print_step("7", "Installing Claude Code Commands")
3101 | try:
3102 | if install_claude_commands(verbose=True):
3103 | print_success("Claude Code commands installed successfully!")
3104 | else:
3105 | print_warning("Claude Code commands installation had issues")
3106 | print_info("You can install them manually later with:")
3107 | print_info("python scripts/claude_commands_utils.py")
3108 | except Exception as e:
3109 | print_error(f"Failed to install Claude Code commands: {str(e)}")
3110 | print_info("You can install them manually later with:")
3111 | print_info("python scripts/claude_commands_utils.py")
3112 |
3113 | def _print_final_setup_notices():
3114 | """Print first-time setup expectations."""
3115 | print_header("Installation Complete")
3116 | print_info("")
3117 | print_info("⚠️ FIRST-TIME SETUP EXPECTATIONS:")
3118 | print_info("On first run, you may see these NORMAL warnings:")
3119 | print_info(" • 'No snapshots directory' - Model will download automatically (~25MB)")
3120 | print_info(" • 'TRANSFORMERS_CACHE deprecated' - Informational, doesn't affect operation")
3121 | print_info(" • Model download progress - One-time download (1-2 minutes)")
3122 | print_info("")
3123 | print_info("These warnings disappear after the first successful run.")
3124 | print_info("See docs/first-time-setup.md for details.")
3125 | print_info("")
3126 |
3127 | def _determine_final_backend(system_info):
3128 | """Determine final storage backend based on system configuration."""
3129 | if system_info["is_macos"] and system_info["is_x86"] and system_info.get("has_homebrew_pytorch"):
3130 | os.environ['MCP_MEMORY_STORAGE_BACKEND'] = 'sqlite_vec'
3131 | return 'sqlite_vec'
3132 | return os.environ.get('MCP_MEMORY_STORAGE_BACKEND', 'chromadb')
3133 |
3134 | def _setup_multi_client_access(args, system_info, final_backend):
3135 | """Set up multi-client access if requested or offered."""
3136 | if args.setup_multi_client:
3137 | _execute_explicit_multi_client_setup(system_info, args, final_backend)
3138 | elif should_offer_multi_client_setup(args, final_backend):
3139 | _handle_interactive_multi_client_setup(args, system_info, final_backend)
3140 |
3141 | def _execute_explicit_multi_client_setup(system_info, args, final_backend):
3142 | """Execute multi-client setup when explicitly requested."""
3143 | try:
3144 | setup_universal_multi_client_access(system_info, args, final_backend)
3145 | except Exception as e:
3146 | print_error(f"Multi-client setup failed: {e}")
3147 | print_info("You can set up multi-client access manually using:")
3148 | print_info("python setup_multi_client_complete.py")
3149 |
3150 | def _handle_interactive_multi_client_setup(args, system_info, final_backend):
3151 | """Handle interactive multi-client setup prompt."""
3152 | print_info("")
3153 | print_info("Multi-Client Access Available!")
3154 | print_info("")
3155 | print_info("The MCP Memory Service can be configured for multi-client access, allowing")
3156 | print_info("multiple applications and IDEs to share the same memory database.")
3157 | print_info("")
3158 | print_info("Benefits:")
3159 | print_info(" • Share memories between Claude Desktop, VS Code, Continue, and other MCP clients")
3160 | print_info(" • Seamless context sharing across development environments")
3161 | print_info(" • Single source of truth for all your project memories")
3162 | print_info("")
3163 |
3164 | try:
3165 | if args.non_interactive:
3166 | print_info("Non-interactive mode: skipping multi-client configuration")
3167 | response = 'n'
3168 | else:
3169 | print("\n" + "=" * 60)
3170 | print("⚠️ USER INPUT REQUIRED")
3171 | print("=" * 60)
3172 | response = input("Would you like to configure multi-client access? (y/N, press Enter for N): ").strip().lower()
3173 | print("=" * 60 + "\n")
3174 |
3175 | if response in ['y', 'yes']:
3176 | print_info("")
3177 | _execute_explicit_multi_client_setup(system_info, args, final_backend)
3178 | else:
3179 | print_info("Skipping multi-client setup. You can configure it later using:")
3180 | print_info("python setup_multi_client_complete.py")
3181 | except (EOFError, KeyboardInterrupt):
3182 | print_info("\nSkipping multi-client setup. You can configure it later using:")
3183 | print_info("python setup_multi_client_complete.py")
3184 |
3185 | print_info("")
3186 |
3187 | def _print_backend_configuration(final_backend, system_info):
3188 | """Print final backend configuration and recommendations."""
3189 | use_onnx = os.environ.get('MCP_MEMORY_USE_ONNX', '').lower() in ('1', 'true', 'yes')
3190 |
3191 | print_info("You can now run the MCP Memory Service using the 'memory' command")
3192 | print_info(f"Storage Backend: {final_backend.upper()}")
3193 |
3194 | if final_backend == 'sqlite_vec':
3195 | print_success("Using SQLite-vec - lightweight, fast, minimal dependencies")
3196 | print_info(" • No complex dependencies or build issues")
3197 | print_info(" • Excellent performance for typical use cases")
3198 | else:
3199 | print_success("Using ChromaDB - full-featured vector database")
3200 | print_info(" • Advanced features and extensive ecosystem")
3201 |
3202 | if use_onnx:
3203 | print_info("[OK] Using ONNX Runtime for inference")
3204 | print_info(" • Compatible with Homebrew PyTorch")
3205 | print_info(" • Reduced dependencies for better compatibility")
3206 |
3207 | print_info("For more information, see:")
3208 | print_info(" • Installation Guide: docs/guides/INSTALLATION_MASTER.md")
3209 | print_info(" • Backend Comparison: docs/guides/STORAGE_BACKENDS.md")
3210 | if system_info["is_macos"] and system_info["is_x86"] and is_legacy_hardware(system_info):
3211 | print_info(" • Legacy Mac Guide: docs/platforms/macos-intel-legacy.md")
3212 | print_info(" • Main README: README.md")
3213 |
3214 | def _print_macos_intel_notes(system_info):
3215 | """Print macOS Intel-specific notes and troubleshooting tips."""
3216 | if not (system_info["is_macos"] and system_info["is_x86"]):
3217 | return
3218 |
3219 | print_info("\nMacOS Intel Notes:")
3220 |
3221 | if system_info.get("has_homebrew_pytorch"):
3222 | print_info("- Using Homebrew PyTorch installation: " + system_info.get("homebrew_pytorch_version", "Unknown"))
3223 | print_info("- The MCP Memory Service is configured to use SQLite-vec + ONNX runtime")
3224 | print_info("- To start the memory service, use:")
3225 | print_info(" export MCP_MEMORY_USE_ONNX=1")
3226 | print_info(" export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec")
3227 | print_info(" memory")
3228 | else:
3229 | print_info("- If you encounter issues, try the --force-compatible-deps option")
3230 | python_version = sys.version_info
3231 | if python_version >= (3, 13):
3232 | print_info("- For optimal performance on Intel Macs with Python 3.13+, torch==2.3.0 and sentence-transformers==3.0.0 are recommended")
3233 | print_info("- You can manually install these versions with:")
3234 | print_info(" pip install torch==2.3.0 torchvision==0.18.0 torchaudio==2.3.0 sentence-transformers==3.0.0")
3235 | else:
3236 | print_info("- For optimal performance on Intel Macs, torch==2.0.1 and sentence-transformers==2.2.2 are recommended")
3237 | print_info("- You can manually install these versions with:")
3238 | print_info(" pip install torch==2.0.1 torchvision==2.0.1 torchaudio==2.0.1 sentence-transformers==2.2.2")
3239 |
3240 | print_info("\nTroubleshooting Tips:")
3241 | print_info("- If you have a Homebrew PyTorch installation, use: --use-homebrew-pytorch")
3242 | print_info("- To completely skip PyTorch installation, use: --skip-pytorch")
3243 | print_info("- To force the SQLite-vec backend, use: --storage-backend sqlite_vec")
3244 | print_info("- For a quick test, try running: python test_memory.py")
3245 |
3246 | def _cleanup_and_exit(log_file_path):
3247 | """Clean up logging system and exit."""
3248 | try:
3249 | cleanup_installer_logging()
3250 | if log_file_path:
3251 | print(f"\nInstallation log saved to: {log_file_path}")
3252 | except Exception:
3253 | pass # Silently ignore cleanup errors
3254 |
3255 | def main():
3256 | """Main installation function."""
3257 | args = _parse_arguments()
3258 | _handle_special_modes(args)
3259 |
3260 | log_file_path, system_info = _setup_logging_and_detect_system(args)
3261 | _execute_core_installation(args, system_info)
3262 | _execute_chromadb_migration(args)
3263 | _configure_claude_code_if_requested(args, system_info)
3264 | _handle_claude_code_commands(args)
3265 |
3266 | _print_final_setup_notices()
3267 | final_backend = _determine_final_backend(system_info)
3268 | _setup_multi_client_access(args, system_info, final_backend)
3269 | _print_backend_configuration(final_backend, system_info)
3270 | _print_macos_intel_notes(system_info)
3271 | _cleanup_and_exit(log_file_path)
3272 |
3273 | if __name__ == "__main__":
3274 | try:
3275 | main()
3276 | except KeyboardInterrupt:
3277 | print("\nInstallation interrupted by user")
3278 | cleanup_installer_logging()
3279 | sys.exit(1)
3280 | except Exception as e:
3281 | print(f"\nInstallation failed with error: {e}")
3282 | cleanup_installer_logging()
3283 | sys.exit(1)
3284 |
```