#
tokens: 49911/50000 47/625 files (page 1/35)
lines: off (toggle) GitHub
raw markdown copy
This is page 1 of 35. Use http://codebase.md/doobidoo/mcp-memory-service?lines=false&page={x} to view the full context.

# Directory Structure

```
├── .claude
│   ├── agents
│   │   ├── amp-bridge.md
│   │   ├── amp-pr-automator.md
│   │   ├── code-quality-guard.md
│   │   ├── gemini-pr-automator.md
│   │   └── github-release-manager.md
│   ├── settings.local.json.backup
│   └── settings.local.json.local
├── .commit-message
├── .dockerignore
├── .env.example
├── .env.sqlite.backup
├── .envnn#
├── .gitattributes
├── .github
│   ├── FUNDING.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.yml
│   │   ├── config.yml
│   │   ├── feature_request.yml
│   │   └── performance_issue.yml
│   ├── pull_request_template.md
│   └── workflows
│       ├── bridge-tests.yml
│       ├── CACHE_FIX.md
│       ├── claude-code-review.yml
│       ├── claude.yml
│       ├── cleanup-images.yml.disabled
│       ├── dev-setup-validation.yml
│       ├── docker-publish.yml
│       ├── LATEST_FIXES.md
│       ├── main-optimized.yml.disabled
│       ├── main.yml
│       ├── publish-and-test.yml
│       ├── README_OPTIMIZATION.md
│       ├── release-tag.yml.disabled
│       ├── release.yml
│       ├── roadmap-review-reminder.yml
│       ├── SECRET_CONDITIONAL_FIX.md
│       └── WORKFLOW_FIXES.md
├── .gitignore
├── .mcp.json.backup
├── .mcp.json.template
├── .pyscn
│   ├── .gitignore
│   └── reports
│       └── analyze_20251123_214224.html
├── AGENTS.md
├── archive
│   ├── deployment
│   │   ├── deploy_fastmcp_fixed.sh
│   │   ├── deploy_http_with_mcp.sh
│   │   └── deploy_mcp_v4.sh
│   ├── deployment-configs
│   │   ├── empty_config.yml
│   │   └── smithery.yaml
│   ├── development
│   │   └── test_fastmcp.py
│   ├── docs-removed-2025-08-23
│   │   ├── authentication.md
│   │   ├── claude_integration.md
│   │   ├── claude-code-compatibility.md
│   │   ├── claude-code-integration.md
│   │   ├── claude-code-quickstart.md
│   │   ├── claude-desktop-setup.md
│   │   ├── complete-setup-guide.md
│   │   ├── database-synchronization.md
│   │   ├── development
│   │   │   ├── autonomous-memory-consolidation.md
│   │   │   ├── CLEANUP_PLAN.md
│   │   │   ├── CLEANUP_README.md
│   │   │   ├── CLEANUP_SUMMARY.md
│   │   │   ├── dream-inspired-memory-consolidation.md
│   │   │   ├── hybrid-slm-memory-consolidation.md
│   │   │   ├── mcp-milestone.md
│   │   │   ├── multi-client-architecture.md
│   │   │   ├── test-results.md
│   │   │   └── TIMESTAMP_FIX_SUMMARY.md
│   │   ├── distributed-sync.md
│   │   ├── invocation_guide.md
│   │   ├── macos-intel.md
│   │   ├── master-guide.md
│   │   ├── mcp-client-configuration.md
│   │   ├── multi-client-server.md
│   │   ├── service-installation.md
│   │   ├── sessions
│   │   │   └── MCP_ENHANCEMENT_SESSION_MEMORY_v4.1.0.md
│   │   ├── UBUNTU_SETUP.md
│   │   ├── ubuntu.md
│   │   ├── windows-setup.md
│   │   └── windows.md
│   ├── docs-root-cleanup-2025-08-23
│   │   ├── AWESOME_LIST_SUBMISSION.md
│   │   ├── CLOUDFLARE_IMPLEMENTATION.md
│   │   ├── DOCUMENTATION_ANALYSIS.md
│   │   ├── DOCUMENTATION_CLEANUP_PLAN.md
│   │   ├── DOCUMENTATION_CONSOLIDATION_COMPLETE.md
│   │   ├── LITESTREAM_SETUP_GUIDE.md
│   │   ├── lm_studio_system_prompt.md
│   │   ├── PYTORCH_DOWNLOAD_FIX.md
│   │   └── README-ORIGINAL-BACKUP.md
│   ├── investigations
│   │   └── MACOS_HOOKS_INVESTIGATION.md
│   ├── litestream-configs-v6.3.0
│   │   ├── install_service.sh
│   │   ├── litestream_master_config_fixed.yml
│   │   ├── litestream_master_config.yml
│   │   ├── litestream_replica_config_fixed.yml
│   │   ├── litestream_replica_config.yml
│   │   ├── litestream_replica_simple.yml
│   │   ├── litestream-http.service
│   │   ├── litestream.service
│   │   └── requirements-cloudflare.txt
│   ├── release-notes
│   │   └── release-notes-v7.1.4.md
│   └── setup-development
│       ├── README.md
│       ├── setup_consolidation_mdns.sh
│       ├── STARTUP_SETUP_GUIDE.md
│       └── test_service.sh
├── CHANGELOG-HISTORIC.md
├── CHANGELOG.md
├── claude_commands
│   ├── memory-context.md
│   ├── memory-health.md
│   ├── memory-ingest-dir.md
│   ├── memory-ingest.md
│   ├── memory-recall.md
│   ├── memory-search.md
│   ├── memory-store.md
│   ├── README.md
│   └── session-start.md
├── claude-hooks
│   ├── config.json
│   ├── config.template.json
│   ├── CONFIGURATION.md
│   ├── core
│   │   ├── memory-retrieval.js
│   │   ├── mid-conversation.js
│   │   ├── session-end.js
│   │   ├── session-start.js
│   │   └── topic-change.js
│   ├── debug-pattern-test.js
│   ├── install_claude_hooks_windows.ps1
│   ├── install_hooks.py
│   ├── memory-mode-controller.js
│   ├── MIGRATION.md
│   ├── README-NATURAL-TRIGGERS.md
│   ├── README-phase2.md
│   ├── README.md
│   ├── simple-test.js
│   ├── statusline.sh
│   ├── test-adaptive-weights.js
│   ├── test-dual-protocol-hook.js
│   ├── test-mcp-hook.js
│   ├── test-natural-triggers.js
│   ├── test-recency-scoring.js
│   ├── tests
│   │   ├── integration-test.js
│   │   ├── phase2-integration-test.js
│   │   ├── test-code-execution.js
│   │   ├── test-cross-session.json
│   │   ├── test-session-tracking.json
│   │   └── test-threading.json
│   ├── utilities
│   │   ├── adaptive-pattern-detector.js
│   │   ├── context-formatter.js
│   │   ├── context-shift-detector.js
│   │   ├── conversation-analyzer.js
│   │   ├── dynamic-context-updater.js
│   │   ├── git-analyzer.js
│   │   ├── mcp-client.js
│   │   ├── memory-client.js
│   │   ├── memory-scorer.js
│   │   ├── performance-manager.js
│   │   ├── project-detector.js
│   │   ├── session-tracker.js
│   │   ├── tiered-conversation-monitor.js
│   │   └── version-checker.js
│   └── WINDOWS-SESSIONSTART-BUG.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Development-Sprint-November-2025.md
├── docs
│   ├── amp-cli-bridge.md
│   ├── api
│   │   ├── code-execution-interface.md
│   │   ├── memory-metadata-api.md
│   │   ├── PHASE1_IMPLEMENTATION_SUMMARY.md
│   │   ├── PHASE2_IMPLEMENTATION_SUMMARY.md
│   │   ├── PHASE2_REPORT.md
│   │   └── tag-standardization.md
│   ├── architecture
│   │   ├── search-enhancement-spec.md
│   │   └── search-examples.md
│   ├── architecture.md
│   ├── archive
│   │   └── obsolete-workflows
│   │       ├── load_memory_context.md
│   │       └── README.md
│   ├── assets
│   │   └── images
│   │       ├── dashboard-v3.3.0-preview.png
│   │       ├── memory-awareness-hooks-example.png
│   │       ├── project-infographic.svg
│   │       └── README.md
│   ├── CLAUDE_CODE_QUICK_REFERENCE.md
│   ├── cloudflare-setup.md
│   ├── deployment
│   │   ├── docker.md
│   │   ├── dual-service.md
│   │   ├── production-guide.md
│   │   └── systemd-service.md
│   ├── development
│   │   ├── ai-agent-instructions.md
│   │   ├── code-quality
│   │   │   ├── phase-2a-completion.md
│   │   │   ├── phase-2a-handle-get-prompt.md
│   │   │   ├── phase-2a-index.md
│   │   │   ├── phase-2a-install-package.md
│   │   │   └── phase-2b-session-summary.md
│   │   ├── code-quality-workflow.md
│   │   ├── dashboard-workflow.md
│   │   ├── issue-management.md
│   │   ├── pr-review-guide.md
│   │   ├── refactoring-notes.md
│   │   ├── release-checklist.md
│   │   └── todo-tracker.md
│   ├── docker-optimized-build.md
│   ├── document-ingestion.md
│   ├── DOCUMENTATION_AUDIT.md
│   ├── enhancement-roadmap-issue-14.md
│   ├── examples
│   │   ├── analysis-scripts.js
│   │   ├── maintenance-session-example.md
│   │   ├── memory-distribution-chart.jsx
│   │   └── tag-schema.json
│   ├── first-time-setup.md
│   ├── glama-deployment.md
│   ├── guides
│   │   ├── advanced-command-examples.md
│   │   ├── chromadb-migration.md
│   │   ├── commands-vs-mcp-server.md
│   │   ├── mcp-enhancements.md
│   │   ├── mdns-service-discovery.md
│   │   ├── memory-consolidation-guide.md
│   │   ├── migration.md
│   │   ├── scripts.md
│   │   └── STORAGE_BACKENDS.md
│   ├── HOOK_IMPROVEMENTS.md
│   ├── hooks
│   │   └── phase2-code-execution-migration.md
│   ├── http-server-management.md
│   ├── ide-compatability.md
│   ├── IMAGE_RETENTION_POLICY.md
│   ├── images
│   │   └── dashboard-placeholder.md
│   ├── implementation
│   │   ├── health_checks.md
│   │   └── performance.md
│   ├── IMPLEMENTATION_PLAN_HTTP_SSE.md
│   ├── integration
│   │   ├── homebrew.md
│   │   └── multi-client.md
│   ├── integrations
│   │   ├── gemini.md
│   │   ├── groq-bridge.md
│   │   ├── groq-integration-summary.md
│   │   └── groq-model-comparison.md
│   ├── integrations.md
│   ├── legacy
│   │   └── dual-protocol-hooks.md
│   ├── LM_STUDIO_COMPATIBILITY.md
│   ├── maintenance
│   │   └── memory-maintenance.md
│   ├── mastery
│   │   ├── api-reference.md
│   │   ├── architecture-overview.md
│   │   ├── configuration-guide.md
│   │   ├── local-setup-and-run.md
│   │   ├── testing-guide.md
│   │   └── troubleshooting.md
│   ├── migration
│   │   └── code-execution-api-quick-start.md
│   ├── natural-memory-triggers
│   │   ├── cli-reference.md
│   │   ├── installation-guide.md
│   │   └── performance-optimization.md
│   ├── oauth-setup.md
│   ├── pr-graphql-integration.md
│   ├── quick-setup-cloudflare-dual-environment.md
│   ├── README.md
│   ├── remote-configuration-wiki-section.md
│   ├── research
│   │   ├── code-execution-interface-implementation.md
│   │   └── code-execution-interface-summary.md
│   ├── ROADMAP.md
│   ├── sqlite-vec-backend.md
│   ├── statistics
│   │   ├── charts
│   │   │   ├── activity_patterns.png
│   │   │   ├── contributors.png
│   │   │   ├── growth_trajectory.png
│   │   │   ├── monthly_activity.png
│   │   │   └── october_sprint.png
│   │   ├── data
│   │   │   ├── activity_by_day.csv
│   │   │   ├── activity_by_hour.csv
│   │   │   ├── contributors.csv
│   │   │   └── monthly_activity.csv
│   │   ├── generate_charts.py
│   │   └── REPOSITORY_STATISTICS.md
│   ├── technical
│   │   ├── development.md
│   │   ├── memory-migration.md
│   │   ├── migration-log.md
│   │   ├── sqlite-vec-embedding-fixes.md
│   │   └── tag-storage.md
│   ├── testing
│   │   └── regression-tests.md
│   ├── testing-cloudflare-backend.md
│   ├── troubleshooting
│   │   ├── cloudflare-api-token-setup.md
│   │   ├── cloudflare-authentication.md
│   │   ├── general.md
│   │   ├── hooks-quick-reference.md
│   │   ├── pr162-schema-caching-issue.md
│   │   ├── session-end-hooks.md
│   │   └── sync-issues.md
│   └── tutorials
│       ├── advanced-techniques.md
│       ├── data-analysis.md
│       └── demo-session-walkthrough.md
├── examples
│   ├── claude_desktop_config_template.json
│   ├── claude_desktop_config_windows.json
│   ├── claude-desktop-http-config.json
│   ├── config
│   │   └── claude_desktop_config.json
│   ├── http-mcp-bridge.js
│   ├── memory_export_template.json
│   ├── README.md
│   ├── setup
│   │   └── setup_multi_client_complete.py
│   └── start_https_example.sh
├── install_service.py
├── install.py
├── LICENSE
├── NOTICE
├── pyproject.toml
├── pytest.ini
├── README.md
├── run_server.py
├── scripts
│   ├── .claude
│   │   └── settings.local.json
│   ├── archive
│   │   └── check_missing_timestamps.py
│   ├── backup
│   │   ├── backup_memories.py
│   │   ├── backup_sqlite_vec.sh
│   │   ├── export_distributable_memories.sh
│   │   └── restore_memories.py
│   ├── benchmarks
│   │   ├── benchmark_code_execution_api.py
│   │   ├── benchmark_hybrid_sync.py
│   │   └── benchmark_server_caching.py
│   ├── database
│   │   ├── analyze_sqlite_vec_db.py
│   │   ├── check_sqlite_vec_status.py
│   │   ├── db_health_check.py
│   │   └── simple_timestamp_check.py
│   ├── development
│   │   ├── debug_server_initialization.py
│   │   ├── find_orphaned_files.py
│   │   ├── fix_mdns.sh
│   │   ├── fix_sitecustomize.py
│   │   ├── remote_ingest.sh
│   │   ├── setup-git-merge-drivers.sh
│   │   ├── uv-lock-merge.sh
│   │   └── verify_hybrid_sync.py
│   ├── hooks
│   │   └── pre-commit
│   ├── installation
│   │   ├── install_linux_service.py
│   │   ├── install_macos_service.py
│   │   ├── install_uv.py
│   │   ├── install_windows_service.py
│   │   ├── install.py
│   │   ├── setup_backup_cron.sh
│   │   ├── setup_claude_mcp.sh
│   │   └── setup_cloudflare_resources.py
│   ├── linux
│   │   ├── service_status.sh
│   │   ├── start_service.sh
│   │   ├── stop_service.sh
│   │   ├── uninstall_service.sh
│   │   └── view_logs.sh
│   ├── maintenance
│   │   ├── assign_memory_types.py
│   │   ├── check_memory_types.py
│   │   ├── cleanup_corrupted_encoding.py
│   │   ├── cleanup_memories.py
│   │   ├── cleanup_organize.py
│   │   ├── consolidate_memory_types.py
│   │   ├── consolidation_mappings.json
│   │   ├── delete_orphaned_vectors_fixed.py
│   │   ├── fast_cleanup_duplicates_with_tracking.sh
│   │   ├── find_all_duplicates.py
│   │   ├── find_cloudflare_duplicates.py
│   │   ├── find_duplicates.py
│   │   ├── memory-types.md
│   │   ├── README.md
│   │   ├── recover_timestamps_from_cloudflare.py
│   │   ├── regenerate_embeddings.py
│   │   ├── repair_malformed_tags.py
│   │   ├── repair_memories.py
│   │   ├── repair_sqlite_vec_embeddings.py
│   │   ├── repair_zero_embeddings.py
│   │   ├── restore_from_json_export.py
│   │   └── scan_todos.sh
│   ├── migration
│   │   ├── cleanup_mcp_timestamps.py
│   │   ├── legacy
│   │   │   └── migrate_chroma_to_sqlite.py
│   │   ├── mcp-migration.py
│   │   ├── migrate_sqlite_vec_embeddings.py
│   │   ├── migrate_storage.py
│   │   ├── migrate_tags.py
│   │   ├── migrate_timestamps.py
│   │   ├── migrate_to_cloudflare.py
│   │   ├── migrate_to_sqlite_vec.py
│   │   ├── migrate_v5_enhanced.py
│   │   ├── TIMESTAMP_CLEANUP_README.md
│   │   └── verify_mcp_timestamps.py
│   ├── pr
│   │   ├── amp_collect_results.sh
│   │   ├── amp_detect_breaking_changes.sh
│   │   ├── amp_generate_tests.sh
│   │   ├── amp_pr_review.sh
│   │   ├── amp_quality_gate.sh
│   │   ├── amp_suggest_fixes.sh
│   │   ├── auto_review.sh
│   │   ├── detect_breaking_changes.sh
│   │   ├── generate_tests.sh
│   │   ├── lib
│   │   │   └── graphql_helpers.sh
│   │   ├── quality_gate.sh
│   │   ├── resolve_threads.sh
│   │   ├── run_pyscn_analysis.sh
│   │   ├── run_quality_checks.sh
│   │   ├── thread_status.sh
│   │   └── watch_reviews.sh
│   ├── quality
│   │   ├── fix_dead_code_install.sh
│   │   ├── phase1_dead_code_analysis.md
│   │   ├── phase2_complexity_analysis.md
│   │   ├── README_PHASE1.md
│   │   ├── README_PHASE2.md
│   │   ├── track_pyscn_metrics.sh
│   │   └── weekly_quality_review.sh
│   ├── README.md
│   ├── run
│   │   ├── run_mcp_memory.sh
│   │   ├── run-with-uv.sh
│   │   └── start_sqlite_vec.sh
│   ├── run_memory_server.py
│   ├── server
│   │   ├── check_http_server.py
│   │   ├── check_server_health.py
│   │   ├── memory_offline.py
│   │   ├── preload_models.py
│   │   ├── run_http_server.py
│   │   ├── run_memory_server.py
│   │   ├── start_http_server.bat
│   │   └── start_http_server.sh
│   ├── service
│   │   ├── deploy_dual_services.sh
│   │   ├── install_http_service.sh
│   │   ├── mcp-memory-http.service
│   │   ├── mcp-memory.service
│   │   ├── memory_service_manager.sh
│   │   ├── service_control.sh
│   │   ├── service_utils.py
│   │   └── update_service.sh
│   ├── sync
│   │   ├── check_drift.py
│   │   ├── claude_sync_commands.py
│   │   ├── export_memories.py
│   │   ├── import_memories.py
│   │   ├── litestream
│   │   │   ├── apply_local_changes.sh
│   │   │   ├── enhanced_memory_store.sh
│   │   │   ├── init_staging_db.sh
│   │   │   ├── io.litestream.replication.plist
│   │   │   ├── manual_sync.sh
│   │   │   ├── memory_sync.sh
│   │   │   ├── pull_remote_changes.sh
│   │   │   ├── push_to_remote.sh
│   │   │   ├── README.md
│   │   │   ├── resolve_conflicts.sh
│   │   │   ├── setup_local_litestream.sh
│   │   │   ├── setup_remote_litestream.sh
│   │   │   ├── staging_db_init.sql
│   │   │   ├── stash_local_changes.sh
│   │   │   ├── sync_from_remote_noconfig.sh
│   │   │   └── sync_from_remote.sh
│   │   ├── README.md
│   │   ├── safe_cloudflare_update.sh
│   │   ├── sync_memory_backends.py
│   │   └── sync_now.py
│   ├── testing
│   │   ├── run_complete_test.py
│   │   ├── run_memory_test.sh
│   │   ├── simple_test.py
│   │   ├── test_cleanup_logic.py
│   │   ├── test_cloudflare_backend.py
│   │   ├── test_docker_functionality.py
│   │   ├── test_installation.py
│   │   ├── test_mdns.py
│   │   ├── test_memory_api.py
│   │   ├── test_memory_simple.py
│   │   ├── test_migration.py
│   │   ├── test_search_api.py
│   │   ├── test_sqlite_vec_embeddings.py
│   │   ├── test_sse_events.py
│   │   ├── test-connection.py
│   │   └── test-hook.js
│   ├── utils
│   │   ├── claude_commands_utils.py
│   │   ├── generate_personalized_claude_md.sh
│   │   ├── groq
│   │   ├── groq_agent_bridge.py
│   │   ├── list-collections.py
│   │   ├── memory_wrapper_uv.py
│   │   ├── query_memories.py
│   │   ├── smithery_wrapper.py
│   │   ├── test_groq_bridge.sh
│   │   └── uv_wrapper.py
│   └── validation
│       ├── check_dev_setup.py
│       ├── check_documentation_links.py
│       ├── diagnose_backend_config.py
│       ├── validate_configuration_complete.py
│       ├── validate_memories.py
│       ├── validate_migration.py
│       ├── validate_timestamp_integrity.py
│       ├── verify_environment.py
│       ├── verify_pytorch_windows.py
│       └── verify_torch.py
├── SECURITY.md
├── selective_timestamp_recovery.py
├── SPONSORS.md
├── src
│   └── mcp_memory_service
│       ├── __init__.py
│       ├── api
│       │   ├── __init__.py
│       │   ├── client.py
│       │   ├── operations.py
│       │   ├── sync_wrapper.py
│       │   └── types.py
│       ├── backup
│       │   ├── __init__.py
│       │   └── scheduler.py
│       ├── cli
│       │   ├── __init__.py
│       │   ├── ingestion.py
│       │   ├── main.py
│       │   └── utils.py
│       ├── config.py
│       ├── consolidation
│       │   ├── __init__.py
│       │   ├── associations.py
│       │   ├── base.py
│       │   ├── clustering.py
│       │   ├── compression.py
│       │   ├── consolidator.py
│       │   ├── decay.py
│       │   ├── forgetting.py
│       │   ├── health.py
│       │   └── scheduler.py
│       ├── dependency_check.py
│       ├── discovery
│       │   ├── __init__.py
│       │   ├── client.py
│       │   └── mdns_service.py
│       ├── embeddings
│       │   ├── __init__.py
│       │   └── onnx_embeddings.py
│       ├── ingestion
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── chunker.py
│       │   ├── csv_loader.py
│       │   ├── json_loader.py
│       │   ├── pdf_loader.py
│       │   ├── registry.py
│       │   ├── semtools_loader.py
│       │   └── text_loader.py
│       ├── lm_studio_compat.py
│       ├── mcp_server.py
│       ├── models
│       │   ├── __init__.py
│       │   └── memory.py
│       ├── server.py
│       ├── services
│       │   ├── __init__.py
│       │   └── memory_service.py
│       ├── storage
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── cloudflare.py
│       │   ├── factory.py
│       │   ├── http_client.py
│       │   ├── hybrid.py
│       │   └── sqlite_vec.py
│       ├── sync
│       │   ├── __init__.py
│       │   ├── exporter.py
│       │   ├── importer.py
│       │   └── litestream_config.py
│       ├── utils
│       │   ├── __init__.py
│       │   ├── cache_manager.py
│       │   ├── content_splitter.py
│       │   ├── db_utils.py
│       │   ├── debug.py
│       │   ├── document_processing.py
│       │   ├── gpu_detection.py
│       │   ├── hashing.py
│       │   ├── http_server_manager.py
│       │   ├── port_detection.py
│       │   ├── system_detection.py
│       │   └── time_parser.py
│       └── web
│           ├── __init__.py
│           ├── api
│           │   ├── __init__.py
│           │   ├── analytics.py
│           │   ├── backup.py
│           │   ├── consolidation.py
│           │   ├── documents.py
│           │   ├── events.py
│           │   ├── health.py
│           │   ├── manage.py
│           │   ├── mcp.py
│           │   ├── memories.py
│           │   ├── search.py
│           │   └── sync.py
│           ├── app.py
│           ├── dependencies.py
│           ├── oauth
│           │   ├── __init__.py
│           │   ├── authorization.py
│           │   ├── discovery.py
│           │   ├── middleware.py
│           │   ├── models.py
│           │   ├── registration.py
│           │   └── storage.py
│           ├── sse.py
│           └── static
│               ├── app.js
│               ├── index.html
│               ├── README.md
│               ├── sse_test.html
│               └── style.css
├── start_http_debug.bat
├── start_http_server.sh
├── test_document.txt
├── test_version_checker.js
├── tests
│   ├── __init__.py
│   ├── api
│   │   ├── __init__.py
│   │   ├── test_compact_types.py
│   │   └── test_operations.py
│   ├── bridge
│   │   ├── mock_responses.js
│   │   ├── package-lock.json
│   │   ├── package.json
│   │   └── test_http_mcp_bridge.js
│   ├── conftest.py
│   ├── consolidation
│   │   ├── __init__.py
│   │   ├── conftest.py
│   │   ├── test_associations.py
│   │   ├── test_clustering.py
│   │   ├── test_compression.py
│   │   ├── test_consolidator.py
│   │   ├── test_decay.py
│   │   └── test_forgetting.py
│   ├── contracts
│   │   └── api-specification.yml
│   ├── integration
│   │   ├── package-lock.json
│   │   ├── package.json
│   │   ├── test_api_key_fallback.py
│   │   ├── test_api_memories_chronological.py
│   │   ├── test_api_tag_time_search.py
│   │   ├── test_api_with_memory_service.py
│   │   ├── test_bridge_integration.js
│   │   ├── test_cli_interfaces.py
│   │   ├── test_cloudflare_connection.py
│   │   ├── test_concurrent_clients.py
│   │   ├── test_data_serialization_consistency.py
│   │   ├── test_http_server_startup.py
│   │   ├── test_mcp_memory.py
│   │   ├── test_mdns_integration.py
│   │   ├── test_oauth_basic_auth.py
│   │   ├── test_oauth_flow.py
│   │   ├── test_server_handlers.py
│   │   └── test_store_memory.py
│   ├── performance
│   │   ├── test_background_sync.py
│   │   └── test_hybrid_live.py
│   ├── README.md
│   ├── smithery
│   │   └── test_smithery.py
│   ├── sqlite
│   │   └── simple_sqlite_vec_test.py
│   ├── test_client.py
│   ├── test_content_splitting.py
│   ├── test_database.py
│   ├── test_hybrid_cloudflare_limits.py
│   ├── test_hybrid_storage.py
│   ├── test_memory_ops.py
│   ├── test_semantic_search.py
│   ├── test_sqlite_vec_storage.py
│   ├── test_time_parser.py
│   ├── test_timestamp_preservation.py
│   ├── timestamp
│   │   ├── test_hook_vs_manual_storage.py
│   │   ├── test_issue99_final_validation.py
│   │   ├── test_search_retrieval_inconsistency.py
│   │   ├── test_timestamp_issue.py
│   │   └── test_timestamp_simple.py
│   └── unit
│       ├── conftest.py
│       ├── test_cloudflare_storage.py
│       ├── test_csv_loader.py
│       ├── test_fastapi_dependencies.py
│       ├── test_import.py
│       ├── test_json_loader.py
│       ├── test_mdns_simple.py
│       ├── test_mdns.py
│       ├── test_memory_service.py
│       ├── test_memory.py
│       ├── test_semtools_loader.py
│       ├── test_storage_interface_compatibility.py
│       └── test_tag_time_filtering.py
├── tools
│   ├── docker
│   │   ├── DEPRECATED.md
│   │   ├── docker-compose.http.yml
│   │   ├── docker-compose.pythonpath.yml
│   │   ├── docker-compose.standalone.yml
│   │   ├── docker-compose.uv.yml
│   │   ├── docker-compose.yml
│   │   ├── docker-entrypoint-persistent.sh
│   │   ├── docker-entrypoint-unified.sh
│   │   ├── docker-entrypoint.sh
│   │   ├── Dockerfile
│   │   ├── Dockerfile.glama
│   │   ├── Dockerfile.slim
│   │   ├── README.md
│   │   └── test-docker-modes.sh
│   └── README.md
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/.envnn#:
--------------------------------------------------------------------------------

```

```

--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------

```
# Auto-resolve lock file conflicts and regenerate lock file
uv.lock merge=uv-lock-merge
```

--------------------------------------------------------------------------------
/.env.sqlite.backup:
--------------------------------------------------------------------------------

```
# SQLite-vec Configuration for MCP Memory Service (Backup)
MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
MCP_MEMORY_SQLITE_PATH=/home/hkr/.local/share/mcp-memory/primary_sqlite_vec.db

```

--------------------------------------------------------------------------------
/.pyscn/.gitignore:
--------------------------------------------------------------------------------

```
# pyscn generated files

# Analysis reports (HTML, JSON)
reports/

# Historical metrics tracking
history/

# Temporary analysis files
*.tmp
*.log

# Keep directory structure
!.gitignore

```

--------------------------------------------------------------------------------
/.mcp.json.template:
--------------------------------------------------------------------------------

```
{
  "mcpServers": {
    "memory-service": {
      "type": "stdio",
      "command": "python",
      "args": ["scripts/run_memory_server.py"],
      "env": {
        "MCP_MEMORY_CHROMA_PATH": "{{USER_HOME}}/.mcp_memory_chroma",
        "LOG_LEVEL": "INFO",
        "MCP_TIMEOUT": "30000"
      }
    }
  }
}
```

--------------------------------------------------------------------------------
/.mcp.json.backup:
--------------------------------------------------------------------------------

```
{
  "mcpServers": {
    "memory-service": {
      "type": "stdio",
      "command": "python",
      "args": [
        "scripts/run_memory_server.py"
      ],
      "env": {
        "MCP_MEMORY_CHROMA_PATH": "/home/hkr/.mcp_memory_chroma",
        "LOG_LEVEL": "INFO",
        "MCP_TIMEOUT": "30000"
      }
    },
    "memory": {
      "command": "uv",
      "args": [
        "--directory",
        "/home/hkr/repositories/mcp-memory-service",
        "run",
        "memory"
      ],
      "env": {
        "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec",
        "MCP_MEMORY_SQLITE_PRAGMAS": "busy_timeout=15000,cache_size=20000",
        "LOG_LEVEL": "INFO"
      }
    }
  }
}
```

--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------

```
# Virtual Environments
.venv/
venv/
py310_venv/
venv_py310/
env/
.env
.Python

# Python Cache Files
__pycache__/
*.py[cod]
*$py.class
*.so
.pytest_cache/
.tox/
.coverage
coverage.*
.cache/
.hypothesis/

# Git Repository
.git/
.gitignore

# Build Artifacts
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
__pypackages__/

# Development Files
*.log
*.bak
*.tmp
*.old
*.swp
*.swo
test_output.txt
dxdiag_output.txt
dxdiag.txt
development_notes.md
migration_*.log
deploy_test*.sh

# IDE and Editor Files
.vscode/
.idea/
.codegpt/

# OS Specific Files
.DS_Store
.AppleDouble
.LSOverride
Thumbs.db
Desktop.ini

# Database and Storage Directories
chroma_db/
.mcp_memory_chroma/
.mcp_memory_sqlite/
*.sqlite
*.sqlite-*
*.sqlite3
backups/

# Configuration Files (may contain sensitive data)
claude_desktop_config_updated.json
claude_config/
.claude/
.mcp.json
CLAUDE_MEMORY.md
*_reference_memories_*.json

# Test Artifacts
tests/test_db/
/tmp/test_mcp*.py
test_mcp*.py
debug_mcp*.py
*_test.py.old
test_*.py.backup
*.cover
nosetests.xml
coverage.xml

# Temporary and Personal Files
YOUR_PERSONALIZED_SETUP_GUIDE.md
CLAUDE_PERSONALIZED.md
*_old
*_backup

# Aider and AI Tools
.aider.*

# Documentation Build Artifacts
docs/_build/
docs/site/

# Archive Directories
archive/
```

--------------------------------------------------------------------------------
/.commit-message:
--------------------------------------------------------------------------------

```
fix: address Gemini code review feedback (PR #209)

CRITICAL FIXES:
1. Fix async/await issue in operations.py health() function
   - Changed get_storage() to await get_storage_async()
   - Ensures proper async execution throughout call stack

2. Fix hardcoded macOS path in client.py
   - Replaced ~/Library/Application Support with cross-platform get_base_directory()
   - Works on macOS, Linux, and Windows

DOCUMENTATION IMPROVEMENTS:
3. Replace Unicode multiplication symbols (x) with ASCII (x)
   - Fixed 6 documentation files for better compatibility
   - Prevents encoding issues in terminals and editors

4. Replace absolute local paths with relative repo paths
   - Removed /Users/hkr/Documents/GitHub/mcp-memory-service/
   - Now uses relative paths (src/, docs/)

RESOURCE MANAGEMENT:
5. Add explicit close() and close_async() methods
   - Proper resource cleanup for storage backends
   - Sync and async versions for different contexts
   - Exported in public API

NEW FEATURES:
6. Write 5-minute migration guide
   - Located at docs/migration/code-execution-api-quick-start.md
   - Covers fresh install and upgrade paths
   - Includes troubleshooting and ROI calculator

7. Enable code execution by default in installer
   - Auto-detects Python path (python3 for Unix, python for Windows)
   - Adds codeExecution config to hooks
   - Updates both generate_basic_config() and generate_hooks_config_from_mcp()
   - Success messaging shows 75-90% token reduction benefits

TESTING:
- API tests pass (23/42 passing, 19 failures are pre-existing database duplicates)
- Manual validation confirms health() works correctly
- Cross-platform path handling verified

FILES CHANGED:
- src/mcp_memory_service/api/operations.py (async fix)
- src/mcp_memory_service/api/client.py (paths + close methods)
- src/mcp_memory_service/api/__init__.py (export close functions)
- claude-hooks/install_hooks.py (code execution config)
- claude-hooks/config.json (add codeExecution section)
- docs/migration/code-execution-api-quick-start.md (NEW)
- docs/api/* (Unicode + path fixes)
- docs/research/* (Unicode + path fixes)

Ready for PR #209 merge.

Generated with Claude Code

Co-Authored-By: Claude <[email protected]>

```

--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------

```

# Aider files
.aider.*

# Private/sensitive documents
docs/saas-monetization-strategy.md

# Local memory exports (contain sensitive data)
local_export*.json

# Python
__pycache__/
*.py[cod]
*$py.class
*.so

# Python environment & build artifacts
.Python
env/
.venv/
venv/
py310_venv/
venv_py310/
.uv/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
!scripts/pr/lib/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg

# Python project tools
pip-wheel-metadata/
__pypackages__/

# Virtual Environment activation files (redundant due to .venv/ but explicit if needed)
.env
.env.local
.env.development
.env.test
.env.production
# But include template files
!.env.example

# IDEs & Editors
.idea/
.vscode/
*.swp
*.swo

# OS-specific files
.DS_Store
.AppleDouble
.LSOverride

# CodeGPT / Extensions
.codegpt/

# ChromaDB artifacts
chroma_db/
tests/test_db/chroma.sqlite3

# SQLite-vec artifacts
*.sqlite
*.sqlite-*
.mcp_memory_sqlite/
.mcp_memory_chroma/

# pyscn analysis artifacts (reports and history tracked in .pyscn/.gitignore)
.pyscn/reports/
.pyscn/history/

# Project-specific artifacts
backups/
test_output.txt
dxdiag_output.txt
dxdiag.txt
claude_desktop_config_updated.json
claude_config/claude_desktop_config.json

# Remove these if mistakenly included
=1.0.0,
=11.0.3
=*.*.*
**vX.Y.Z**
**vX.Y.Z**:
timestamp
updated_at

# Logs and debugging
*.log
*.bak
*.tmp
*.old

# Test and development artifacts
test_*.py.backup
*_test.py.old
development_notes.md

# Temporary MCP test files
/tmp/test_mcp*.py
test_mcp*.py
debug_mcp*.py

# Migration artifacts
migration_*.log
deploy_test*.sh

# Optional: VSCode debugging & Python caches
*.coverage
coverage.*
.cache/
.pytest_cache/
.tox/
nosetests.xml
coverage.xml
*.cover
.hypothesis/
claude_config/claude_desktop_config.json
.claude/mcp_config.json

# Personalized setup guides (generated locally)
YOUR_PERSONALIZED_SETUP_GUIDE.md
.mcp.json

# Local memory service configuration (contains private endpoints/keys)
CLAUDE_MEMORY.md
*_reference_memories_*.json

# Claude Code local settings (personal configurations)
.claude/settings.local.json*
scripts/.claude/settings.local.json*

# Amp CLI Bridge (entire directory - user-specific working space)
.claude/amp/

# Personal files
CLAUDE_PERSONALIZED.md
CLAUDE.local.md
SESSION_MEMORY_2025-08-11.md

# Context crystallizer artifacts
.context-crystallizer/

# Git tools
.git-rewrite/

# Node.js dependencies and artifacts
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
package-lock.json
.npm
.mcp.json.disabled
data/

# GitHub wiki clone (managed separately)
.wiki/

# Keep directory structure but ignore content
!.claude/amp/prompts/pending/.gitkeep
!.claude/amp/responses/ready/.gitkeep
!.claude/amp/responses/consumed/.gitkeep
.local/

# Social media posts
social-media/

```

--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------

```
# Cloudflare Configuration for MCP Memory Service
# ================================================
# Copy this file to .env and replace with your actual credentials
#
# Setup Instructions:
# 1. Copy this file: cp .env.example .env
# 2. Create Cloudflare API Token at: https://dash.cloudflare.com/profile/api-tokens
# 3. Replace placeholder values below with your actual credentials
# 4. Never commit your .env file to git (it's already in .gitignore)

# =============================================================================
# REQUIRED: Cloudflare API Token
# =============================================================================
# Create at: https://dash.cloudflare.com/profile/api-tokens
# Required permissions:
#   - Account: Cloudflare Workers:Edit
#   - Zone Resources: Include All zones
#   - Account Resources: Include All accounts
#
# IMPORTANT: Test your token with the account-scoped endpoint:
#   curl "https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/tokens/verify" \
#        -H "Authorization: Bearer {YOUR_TOKEN}"
#
# DO NOT use the generic endpoint (will fail for scoped tokens):
#   curl "https://api.cloudflare.com/client/v4/user/tokens/verify" ❌
CLOUDFLARE_API_TOKEN=your-cloudflare-api-token-here

# =============================================================================
# REQUIRED: Cloudflare Account ID
# =============================================================================
# Find in: Cloudflare Dashboard > Right sidebar under "Account ID"
# Example: be0e35a26715043ef8df90253268c33f
CLOUDFLARE_ACCOUNT_ID=your-account-id-here

# =============================================================================
# REQUIRED: D1 Database ID
# =============================================================================
# Create with: wrangler d1 create mcp-memory-database
# Or find existing: wrangler d1 list
# Example: f745e9b4-ba8e-4d47-b38f-12af91060d5a
CLOUDFLARE_D1_DATABASE_ID=your-d1-database-id-here

# =============================================================================
# REQUIRED: Vectorize Index Name
# =============================================================================
# Create with: wrangler vectorize create mcp-memory-index --dimensions=384
# Or find existing: wrangler vectorize list
# Example: mcp-memory-index
CLOUDFLARE_VECTORIZE_INDEX=your-vectorize-index-name

# =============================================================================
# OPTIONAL: R2 Bucket for Large Content Storage
# =============================================================================
# Create with: wrangler r2 bucket create mcp-memory-content
# Only needed if you plan to store large content (>1MB)
# CLOUDFLARE_R2_BUCKET=mcp-memory-content

# =============================================================================
# STORAGE BACKEND CONFIGURATION
# =============================================================================
# Options: sqlite_vec | cloudflare | hybrid
# - sqlite_vec: Fast local storage (development)
# - cloudflare: Cloud storage with Cloudflare (production)
# - hybrid: Best of both - local speed + cloud persistence (recommended)
MCP_MEMORY_STORAGE_BACKEND=cloudflare

# =============================================================================
# OPTIONAL: Advanced Configuration
# =============================================================================

# Cloudflare embedding model (default is recommended)
# CLOUDFLARE_EMBEDDING_MODEL=@cf/baai/bge-base-en-v1.5

# Large content threshold for R2 storage (bytes)
# CLOUDFLARE_LARGE_CONTENT_THRESHOLD=1048576

# HTTP Interface (Web Dashboard)
# MCP_HTTP_ENABLED=true
# MCP_HTTP_PORT=8888
# MCP_HTTPS_ENABLED=true
# MCP_HTTPS_PORT=8443

# OAuth 2.1 Authentication (for web interface)
# MCP_OAUTH_ENABLED=false

# Hybrid Backend Configuration (if using hybrid)
# MCP_HYBRID_SYNC_INTERVAL=300      # Sync every 5 minutes
# MCP_HYBRID_BATCH_SIZE=50          # Sync 50 operations at a time
# MCP_HYBRID_SYNC_ON_STARTUP=true   # Initial sync on startup

# =============================================================================
# TROUBLESHOOTING
# =============================================================================
# Common issues:
# 1. "Invalid API Token" - Check token permissions and expiry
# 2. "Database not found" - Verify D1 database ID is correct
# 3. "Vectorize index not found" - Check index name and dimensions (384)
# 4. "Account access denied" - Ensure API token has account permissions
#
# Documentation: https://github.com/doobidoo/mcp-memory-service/wiki
# Support: https://github.com/doobidoo/mcp-memory-service/issues
```

--------------------------------------------------------------------------------
/tests/README.md:
--------------------------------------------------------------------------------

```markdown
# MCP-MEMORY-SERVICE Tests

This directory contains tests for the MCP-MEMORY-SERVICE project.

## Directory Structure

- `integration/` - Integration tests between components
- `unit/` - Unit tests for individual components
- `performance/` - Performance benchmarks

## Running Tests

```bash
# Run all tests
pytest

# Run specific test category
pytest tests/unit/
pytest tests/integration/
pytest tests/performance/
```

```

--------------------------------------------------------------------------------
/archive/setup-development/README.md:
--------------------------------------------------------------------------------

```markdown
# Development Files Archive

This directory contains files used during the development and setup process:

## 📁 Archived Files

- **`setup_consolidation_mdns.sh`** - Original manual startup script (superseded by systemd service)
- **`test_service.sh`** - Debug script for troubleshooting service startup issues
- **`STARTUP_SETUP_GUIDE.md`** - Original startup guide (superseded by COMPLETE_SETUP_GUIDE.md)

## 🔄 Superseded By

These files were used during development but are now superseded by:

- **Production Service**: `mcp-memory.service` + `service_control.sh`
- **Complete Documentation**: `COMPLETE_SETUP_GUIDE.md`
- **Quick Start**: `README_PRODUCTION.md`

## 🗂️ Purpose

These files are kept for:
- Historical reference
- Debugging if needed
- Understanding the development process
- Potential future troubleshooting

**Note**: Use the production files in the root directory for normal operation.
```

--------------------------------------------------------------------------------
/tools/README.md:
--------------------------------------------------------------------------------

```markdown
# Development Tools and Utilities

This directory contains development tools, build utilities, and deployment configurations for MCP Memory Service.

## Directory Structure

### `/build/` - Build Tools
- `setup.py` - Python package build configuration
- Build scripts and packaging utilities

### `/deployments/` - Deployment Tools
- `cloudflare/` - Cloudflare Workers deployment configuration
- Cloud platform deployment scripts and configurations

### `/docker/` - Docker Tools
- Multiple Docker configurations and Dockerfiles
- Docker Compose files for different deployment scenarios
- Docker utility scripts and entrypoints

## Usage

### Build Tools
```bash
# Build Python package
cd tools/build
python setup.py sdist bdist_wheel
```

### Docker Deployment
```bash
# Use various Docker configurations
cd tools/docker
docker-compose -f docker-compose.yml up
docker-compose -f docker-compose.standalone.yml up
```

### Cloudflare Workers
```bash
# Deploy to Cloudflare Workers
cd tools/deployments/cloudflare
npm install
wrangler deploy
```

## Related Documentation

- [Docker Deployment Guide](../docs/deployment/docker.md) - Comprehensive Docker setup
- [Installation Guide](../docs/installation/master-guide.md) - General installation
- [Development Guide](../docs/technical/development.md) - Development setup
```

--------------------------------------------------------------------------------
/docs/assets/images/README.md:
--------------------------------------------------------------------------------

```markdown
# MCP Memory Service Screenshots

This directory contains screenshots and visual assets for the MCP Memory Service documentation.

## Dashboard Screenshots

### v3.3.0 Dashboard Features

The modern dashboard includes:

![Dashboard Layout](dashboard-v3.3.0-preview.png)

**Key Features:**
- **Modern Design**: Gradient backgrounds with professional card layout
- **Live Statistics**: Real-time server metrics including:
  - Total memories count
  - Embedding model information  
  - Server health status
  - Response time metrics
- **Interactive Endpoints**: Organized API documentation with hover effects
- **Tech Stack Badges**: Visual representation of FastAPI, SQLite-vec, PyTorch, etc.
- **Responsive Layout**: Works on desktop and mobile devices
- **Auto-Refresh**: Stats update every 30 seconds

## Access URLs

- **Dashboard**: http://localhost:8000
- **mDNS**: http://mcp-memory-service.local:8000  
- **API Docs**: http://localhost:8000/api/docs
- **ReDoc**: http://localhost:8000/api/redoc

## Screenshot Instructions

To capture the dashboard for documentation:

1. Ensure the HTTP server is running: `python scripts/run_http_server.py`
2. Wait for stats to load (shows actual memory count)
3. Take full-page screenshot at 1920x1080 resolution
4. Save as `dashboard-v3.3.0.png` in this directory
5. Update README.md references accordingly

## File Naming Convention

- `dashboard-v{version}.png` - Main dashboard screenshots
- `api-docs-v{version}.png` - API documentation screenshots  
- `mobile-v{version}.png` - Mobile responsive views
- `feature-{name}-v{version}.png` - Specific feature screenshots
```

--------------------------------------------------------------------------------
/tools/docker/README.md:
--------------------------------------------------------------------------------

```markdown
# Docker Setup for MCP Memory Service

## 🚀 Quick Start

Choose your mode:

### MCP Protocol Mode (for Claude Desktop, VS Code)
```bash
docker-compose up -d
```

### HTTP API Mode (for REST API, Web Dashboard)
```bash
docker-compose -f docker-compose.http.yml up -d
```

## 📝 What's New (v5.0.4)

Thanks to feedback from Joe Esposito, we've completely simplified the Docker setup:

### ✅ Fixed Issues
- **PYTHONPATH** now correctly set to `/app/src`
- **run_server.py** properly copied for HTTP mode
- **Embedding models** pre-downloaded during build (no runtime failures)

### 🎯 Simplified Structure
- **2 clear modes** instead of 4 confusing variants
- **Unified entrypoint** that auto-detects mode
- **Single Dockerfile** for all configurations

## 🔧 Configuration

### Environment Variables

| Variable | Description | Default |
|----------|-------------|---------|
| `MCP_MODE` | Operation mode: `mcp` or `http` | `mcp` |
| `MCP_API_KEY` | API key for HTTP mode | `your-secure-api-key-here` |
| `HTTP_PORT` | Host port for HTTP mode | `8000` |
| `LOG_LEVEL` | Logging level | `INFO` |

### Volume Mounts

All data is stored in a single `./data` directory:
- SQLite database: `./data/sqlite_vec.db`
- Backups: `./data/backups/`

## 🧪 Testing

Run the test script to verify both modes work:
```bash
./test-docker-modes.sh
```

## 📊 HTTP Mode Endpoints

When running in HTTP mode:
- **Dashboard**: http://localhost:8000/
- **API Docs**: http://localhost:8000/api/docs
- **Health Check**: http://localhost:8000/api/health

## 🔄 Migration from Old Setup

If you were using the old Docker files:

| Old File | New Alternative |
|----------|-----------------|
| `docker-compose.standalone.yml` | Use `docker-compose.http.yml` |
| `docker-compose.uv.yml` | UV is now built-in |
| `docker-compose.pythonpath.yml` | Fixed in main Dockerfile |

See [DEPRECATED.md](./DEPRECATED.md) for details.

## 🐛 Troubleshooting

### Container exits immediately
- For HTTP mode: Check logs with `docker-compose -f docker-compose.http.yml logs`
- Ensure `MCP_MODE=http` is set in environment

### Cannot connect to HTTP endpoints
- Verify container is running: `docker ps`
- Check port mapping: `docker port <container_name>`
- Test health: `curl http://localhost:8000/api/health`

### Embedding model errors
- Models are pre-downloaded during build
- If issues persist, rebuild: `docker-compose build --no-cache`

## 🙏 Credits

Special thanks to **Joe Esposito** for identifying and helping fix the Docker setup issues!
```

--------------------------------------------------------------------------------
/examples/README.md:
--------------------------------------------------------------------------------

```markdown
# MCP Memory Service Examples

This directory contains example configurations, scripts, and setup utilities for deploying MCP Memory Service in various scenarios.

## Directory Structure

### `/config/` - Configuration Examples
- Example Claude Desktop configurations
- Template configuration files for different deployment scenarios
- MCP server configuration samples

### `/setup/` - Setup Scripts and Utilities  
- Multi-client setup scripts
- Automated configuration tools
- Installation helpers

## Core Files

### `http-mcp-bridge.js`
A Node.js script that bridges MCP JSON-RPC protocol to HTTP REST API calls. This allows MCP clients like Claude Desktop to connect to a remote HTTP server instead of running a local instance.

**Usage:**
1. Configure your server endpoint and API key as environment variables
2. Use this script as the MCP server command in your client configuration

### `claude-desktop-http-config.json`
Example Claude Desktop configuration for connecting to a remote MCP Memory Service HTTP server via the bridge script.

**Setup:**
1. Update the path to `http-mcp-bridge.js`
2. Set your server endpoint URL
3. Add your API key (if authentication is enabled)
4. Copy this configuration to your Claude Desktop config file

### `codex-mcp-config.json`
Example Codex configuration using `mcp-proxy` to bridge stdio to the service’s Streamable HTTP endpoint at `/mcp`.

**Setup:**
1. Install the proxy: `pipx install mcp-proxy` (or `uv tool install mcp-proxy`)
2. Set server API key on the server: `export MCP_API_KEY=...`
3. Copy this file and adjust `your-server` and API key
4. Place it in Codex’s MCP config location (see Codex docs)

Why proxy? Codex does not support HTTP transports natively and requires a stdio bridge.

## Quick Start

### 1. Server Setup
```bash
# On your server machine
cd mcp-memory-service
python install.py --server-mode --storage-backend sqlite_vec
export MCP_HTTP_HOST=0.0.0.0
export MCP_API_KEY="your-secure-key"
python scripts/run_http_server.py
```

### 2. Client Configuration
```bash
# Update the bridge script path and server details
cp examples/claude-desktop-http-config.json ~/.config/claude-desktop/
```

### 3. Test Connection
```bash
# Test the HTTP API directly
curl -H "Authorization: Bearer your-secure-key" \
  http://your-server:8000/api/health
```

## Advanced Usage

- See the [Multi-Client Setup Guide](../docs/integration/multi-client.md) for Codex, Cursor, Qwen, and Gemini recipes.
- For Cursor/Qwen/Gemini direct HTTP usage, prefer the Streamable HTTP endpoint: `http(s)://<host>:8000/mcp` with header `Authorization: Bearer <MCP_API_KEY>`.

```

--------------------------------------------------------------------------------
/docs/archive/obsolete-workflows/README.md:
--------------------------------------------------------------------------------

```markdown
# Obsolete Workflows Archive

This directory contains historical documentation of workflows that have been superseded by better, automated solutions.

## Contents

### `load_memory_context.md` (August 2025)

**Original Purpose**: Manual prompt with curl commands to load memory context at Claude Code session start.

**Why Obsolete**: Completely superseded by Natural Memory Triggers v7.1.3+ (September 2025).

#### Evolution Timeline

**Phase 1: Manual Loading (Aug 2025)** ❌ OBSOLETE
```bash
# Users had to manually run curl commands and paste output
curl -k -s -X POST https://server:8443/mcp \
  -H "Authorization: Bearer token" \
  -d '{"method": "tools/call", "params": {...}}'
```
**Problems**: Manual, error-prone, required copy-paste, network configuration complexity

**Phase 2: SessionStart Hooks (Aug-Sept 2025)** ✅ IMPROVED
- Automatic memory retrieval at session start
- Project detection and intelligent scoring
- Git-aware context integration

**Phase 3: Natural Memory Triggers (Sept 2025+)** ✅ PRODUCTION
- 85%+ trigger accuracy with semantic pattern detection
- Multi-tier performance optimization (50ms → 500ms tiers)
- CLI management system for real-time configuration
- Adaptive learning based on usage patterns

**Phase 4: Team Collaboration (v7.0.0+)** ✅ NETWORK DISTRIBUTION
- OAuth 2.1 Dynamic Client Registration
- Claude Code HTTP transport
- Zero-configuration team collaboration
- Better than manual network sharing

#### Current Solution

Instead of manual prompts, users now get:

```bash
# One-time installation
cd claude-hooks && python install_hooks.py --natural-triggers

# That's it! Automatic context injection from now on
```

**Benefits**:
- ✅ Zero manual steps per session
- ✅ 85%+ trigger accuracy
- ✅ Intelligent pattern detection
- ✅ Multi-tier performance
- ✅ Team collaboration via OAuth

#### Historical Value

This archive demonstrates:
1. **UX Evolution**: Manual → Semi-automatic → Fully automatic
2. **Problem Recognition**: Identifying pain points (manual commands)
3. **Iterative Improvement**: Each phase solved previous limitations
4. **User-Centric Design**: Continuously reducing friction

#### Migration

If you're still using manual prompts:

**Old Approach** (manual):
```bash
curl -k -s -X POST https://server:8443/mcp ... | jq -r '.result.content[0].text'
```

**New Approach** (automatic):
```bash
# Install once
python claude-hooks/install_hooks.py --natural-triggers

# Enjoy automatic context injection forever
```

See: [Natural Memory Triggers Guide](https://github.com/doobidoo/mcp-memory-service/wiki/Natural-Memory-Triggers-v7.1.0)

---

**Last Updated**: October 25, 2025
**Status**: Archived for historical reference only

```

--------------------------------------------------------------------------------
/src/mcp_memory_service/web/static/README.md:
--------------------------------------------------------------------------------

```markdown
# MCP Memory Service Interactive Dashboard

This directory contains the static assets for the interactive memory management dashboard.

## Files Overview

- **`index.html`** - Main dashboard interface with complete UI structure
- **`app.js`** - Frontend JavaScript application with API integration and real-time updates
- **`style.css`** - Comprehensive CSS with responsive design and component styling
- **`sse_test.html`** - Server-Sent Events testing interface (existing)

## Features Implemented

### 🎯 Core Dashboard
- **Welcome overview** with memory statistics and quick actions
- **Recent memories** display with click-to-view details
- **Quick stats** showing total memories, recent activity, and tag counts
- **Responsive design** that works on mobile, tablet, and desktop

### 🔍 Search & Browse
- **Semantic search** with real-time suggestions
- **Advanced filters** by tags, date range, and content type
- **Grid/list view** toggles for different viewing preferences
- **Search results** with relevance scoring and highlighting

### 📝 Memory Management
- **Add new memories** with modal interface and auto-tagging
- **View memory details** with full content, metadata, and tags
- **Edit/delete operations** with confirmation and undo capabilities
- **Bulk operations** for efficient memory organization

### ⚡ Real-time Features
- **Server-Sent Events** integration for live updates
- **Connection status** monitoring with reconnection logic
- **Toast notifications** for user feedback
- **Live statistics** updates without page refresh

### 🎨 User Experience
- **Progressive disclosure** - simple by default, powerful when needed
- **Keyboard shortcuts** - Ctrl+K for search, Ctrl+M for add memory
- **Loading states** and error handling throughout
- **Accessibility** features with proper ARIA labels and focus management

## Testing the Dashboard

### 1. Start the Server
```bash
# With HTTPS (recommended)
export MCP_HTTPS_ENABLED=true
export MCP_HTTPS_PORT=8443
python run_server.py
```

### 2. Access the Dashboard
- **HTTPS**: https://localhost:8443/
- **HTTP**: http://localhost:8000/

### 3. Test Core Functionality
1. **Search**: Use semantic queries and apply filters
2. **Add Memory**: Click "Add Memory" and create test content
3. **View Details**: Click memory cards to see full details
4. **Real-time Updates**: Open in multiple tabs to test SSE
5. **Keyboard Shortcuts**: Try Ctrl+K (search) and Ctrl+M (add)

## Technical Implementation

- **Vanilla JavaScript** - No build process required
- **CSS Grid + Flexbox** - Modern responsive layouts
- **Server-Sent Events** - Real-time backend communication
- **Progressive Enhancement** - Works without JavaScript for basics
- **API Integration** - Full REST API connectivity with error handling
```

--------------------------------------------------------------------------------
/scripts/sync/README.md:
--------------------------------------------------------------------------------

```markdown
# Database Synchronization Scripts

This directory contains scripts for synchronizing SQLite-vec databases across multiple machines using JSON export/import and Litestream replication.

## Overview

The synchronization system enables you to:
- Export memories from one machine to JSON format
- Import memories from multiple JSON files into a central database
- Set up real-time replication using Litestream
- Maintain consistent memory databases across multiple devices

## Scripts

### export_memories.py

Export memories from a local SQLite-vec database to JSON format.

**Usage:**
```bash
# Basic export
python export_memories.py

# Export from specific database
python export_memories.py --db-path /path/to/sqlite_vec.db

# Export to specific file
python export_memories.py --output my_export.json

# Export with embedding vectors (large file size)
python export_memories.py --include-embeddings

# Export only specific tags
python export_memories.py --filter-tags claude-code,architecture
```

**Features:**
- Preserves original timestamps and metadata
- Adds source machine tracking
- Supports tag filtering
- Optional embedding vector export
- Cross-platform compatibility

### import_memories.py

Import memories from JSON export files into a central database.

**Usage:**
```bash
# Import single file
python import_memories.py windows_export.json

# Import multiple files
python import_memories.py windows_export.json macbook_export.json

# Dry run analysis
python import_memories.py --dry-run exports/*.json

# Import to specific database
python import_memories.py --db-path /path/to/central.db exports/*.json
```

**Features:**
- Intelligent deduplication based on content hash
- Preserves original timestamps
- Adds source tracking tags
- Conflict detection and resolution
- Comprehensive import statistics

## Typical Workflow

### Phase 1: Initial Consolidation

1. **Export from each machine:**
   ```bash
   # On Windows PC
   python export_memories.py --output windows_memories.json
   
   # On MacBook
   python export_memories.py --output macbook_memories.json
   ```

2. **Transfer files to central server:**
   ```bash
   scp windows_memories.json central-server:/tmp/
   scp macbook_memories.json central-server:/tmp/
   ```

3. **Import on central server:**
   ```bash
   # Analyze first
   python import_memories.py --dry-run /tmp/*.json
   
   # Import for real
   python import_memories.py /tmp/windows_memories.json /tmp/macbook_memories.json
   ```

### Phase 2: Set up Litestream

After consolidating all memories into the central database, set up Litestream for ongoing synchronization.

## JSON Export Format

The export format preserves all memory data:

```json
{
  "export_metadata": {
    "source_machine": "machine-name",
    "export_timestamp": "2025-08-12T10:30:00Z",
    "total_memories": 450,
    "database_path": "/path/to/sqlite_vec.db",
    "platform": "Windows",
    "exporter_version": "5.0.0"
  },
  "memories": [
    {
      "content": "Memory content here",
      "content_hash": "sha256hash",
      "tags": ["tag1", "tag2"],
      "created_at": 1673545200.0,
      "updated_at": 1673545200.0,
      "memory_type": "note",
      "metadata": {},
      "export_source": "machine-name"
    }
  ]
}
```

## Deduplication Strategy

Memories are deduplicated based on content hash:
- Same content hash = duplicate (skipped)
- Different content hash = unique (imported)
- Original timestamps are preserved
- Source machine tags are added for tracking

## Error Handling

Both scripts include comprehensive error handling:
- JSON format validation
- Database connectivity checks
- File existence verification
- Transaction rollback on failures
- Detailed error logging

## Performance Considerations

- **Export**: ~1000 memories/second
- **Import**: ~500 memories/second with deduplication
- **File Size**: ~1KB per memory (without embeddings)
- **Memory Usage**: Processes files in streaming fashion

## Troubleshooting

### Common Issues

1. **Database not found:**
   ```bash
   # Check default location
   python -c "from mcp_memory_service.config import get_storage_path; print(get_storage_path())"
   ```

2. **Permission errors:**
   ```bash
   # Ensure database directory is writable
   chmod 755 ~/.local/share/mcp-memory/
   ```

3. **JSON format errors:**
   ```bash
   # Validate JSON file
   python -m json.tool export.json > /dev/null
   ```

### Logging

Enable verbose logging for debugging:
```bash
python export_memories.py --verbose
python import_memories.py --verbose
```

## Next Steps

After using these scripts for initial consolidation:

1. Set up Litestream for real-time sync
2. Configure replica nodes
3. Implement monitoring and alerting
4. Schedule regular backups

See the main documentation for complete Litestream setup instructions.
```

--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------

```markdown
# MCP Memory Service Documentation

Welcome to the comprehensive documentation for MCP Memory Service - a Model Context Protocol server that provides semantic memory and persistent storage capabilities for Claude Desktop and other MCP clients.

## Quick Start

- **New Users**: Start with the [Installation Guide](installation/master-guide.md)
- **Multi-Client Setup**: See [Multi-Client Integration](integration/multi-client.md)
- **Docker Users**: Check out [Docker Deployment](deployment/docker.md)
- **Troubleshooting**: Visit [General Troubleshooting](troubleshooting/general.md)

## Documentation Structure

### 📦 Installation & Setup

- **[Master Installation Guide](installation/master-guide.md)** - Comprehensive installation instructions for all platforms
- **[Platform-Specific Guides](platforms/)** - Detailed setup for specific operating systems
  - [macOS Intel](platforms/macos-intel.md) - Intel Mac setup (including legacy 2013-2017 models)
  - [Windows](platforms/windows.md) - Windows installation with CUDA/DirectML support
  - [Ubuntu](platforms/ubuntu.md) - Ubuntu setup for desktop and server

### 🔗 Integration & Connectivity

- **[Multi-Client Setup](integration/multi-client.md)** - Share memory across multiple applications
- **[Homebrew Integration](integration/homebrew.md)** - Use system-installed PyTorch via Homebrew
- **[Claude Desktop Integration](guides/claude_integration.md)** - Connect with Claude Desktop
- **[IDE Compatibility](ide-compatability.md)** - VS Code, Continue, and other IDE integrations

### 🚀 Deployment

- **[Docker Deployment](deployment/docker.md)** - Containerized deployment with various configurations
- **[Server Deployment](deployment/multi-client-server.md)** - Production server setups
- **[Cloud Deployment](glama-deployment.md)** - Cloud platform deployment guides

### 📚 User Guides

- **[MCP Protocol Enhancements](guides/mcp-enhancements.md)** - Resources, Prompts, and Progress Tracking (v4.1.0)
- **[Storage Backends](guides/STORAGE_BACKENDS.md)** - ChromaDB vs SQLite-vec comparison and configuration
- **[Migration Guide](guides/migration.md)** - Migrate between storage backends and versions
- **[Scripts Reference](guides/scripts.md)** - Available utility scripts
- **[Invocation Guide](guides/invocation_guide.md)** - Different ways to run the service

### 🎯 Tutorials & Examples

- **[Data Analysis Examples](tutorials/data-analysis.md)** - Advanced data analysis with memory service
- **[Advanced Techniques](tutorials/advanced-techniques.md)** - Power user techniques and patterns
- **[Demo Session Walkthrough](tutorials/demo-session-walkthrough.md)** - Step-by-step usage examples

### 🔧 Maintenance & Administration

- **[Memory Maintenance](maintenance/memory-maintenance.md)** - Database cleanup, optimization, and backup
- **[Health Checks](implementation/health_checks.md)** - Monitoring and diagnostics
- **[Performance Tuning](implementation/performance.md)** - Optimization techniques

### 📖 API Reference

- **[Memory Metadata API](api/memory-metadata-api.md)** - Advanced metadata operations
- **[Tag Standardization](api/tag-standardization.md)** - Tag schema and conventions
- **[HTTP/SSE API](IMPLEMENTATION_PLAN_HTTP_SSE.md)** - Web API documentation for multi-client setups

### 🛠️ Development & Technical

- **[Development Guide](technical/development.md)** - Contributing and development setup
- **[Architecture Overview](development/multi-client-architecture.md)** - System architecture and design patterns
- **[Technical Implementation](technical/)** - Deep dive into technical details
  - [Memory Migration](technical/memory-migration.md)
  - [Tag Storage](technical/tag-storage.md)

### 🔍 Troubleshooting

- **[General Troubleshooting](troubleshooting/general.md)** - Common issues and solutions
- **[Docker Issues](deployment/docker.md#troubleshooting)** - Docker-specific troubleshooting
- **[Platform-Specific Issues](platforms/)** - Platform-specific troubleshooting sections

## Project Information

### About MCP Memory Service

MCP Memory Service enables persistent, semantic memory for AI applications through the Model Context Protocol. It provides:

- **Semantic Search**: Vector-based memory retrieval using sentence transformers
- **Multiple Storage Backends**: ChromaDB for full features, SQLite-vec for lightweight deployments
- **Multi-Client Support**: Shared memory across multiple applications
- **Cross-Platform**: Support for macOS, Windows, and Linux
- **Flexible Deployment**: Local installation, Docker containers, or cloud deployment

### Key Features

- ✅ **Semantic Memory Storage**: Store and retrieve memories using natural language
- ✅ **Multi-Client Access**: Share memories across Claude Desktop, VS Code, and other MCP clients
- ✅ **Flexible Storage**: Choose between ChromaDB (full-featured) or SQLite-vec (lightweight)
- ✅ **Cross-Platform**: Native support for macOS (Intel & Apple Silicon), Windows, and Linux
- ✅ **Docker Ready**: Complete containerization support with multiple deployment options
- ✅ **Hardware Optimized**: Automatic detection and optimization for available hardware (CUDA, MPS, DirectML)
- ✅ **Production Ready**: HTTP/SSE API, authentication, monitoring, and scaling features

### Recent Updates

- **v0.2.2+**: Enhanced multi-client support with automatic MCP application detection
- **SQLite-vec Backend**: Lightweight alternative to ChromaDB for resource-constrained systems
- **Homebrew Integration**: Native support for Homebrew-installed PyTorch on macOS
- **Docker Improvements**: Fixed boot loops, added multiple deployment configurations
- **HTTP/SSE API**: Real-time multi-client communication with Server-Sent Events

## Getting Help

### Quick Links

- **Installation Issues**: Check the [Installation Guide](installation/master-guide.md) and platform-specific guides
- **Configuration Problems**: See [Troubleshooting](troubleshooting/general.md)
- **Multi-Client Setup**: Follow the [Multi-Client Guide](integration/multi-client.md)
- **Performance Issues**: Review [Performance Tuning](implementation/performance.md)

### Support Resources

- **GitHub Issues**: Report bugs and request features
- **Documentation**: Comprehensive guides for all use cases
- **Community**: Share experiences and get help from other users

### Contributing

We welcome contributions! See the [Development Guide](technical/development.md) for information on:

- Setting up a development environment
- Running tests
- Submitting pull requests
- Code style and conventions

## Version History

- **Latest**: Enhanced documentation organization, consolidated guides, improved navigation
- **v0.2.2**: Multi-client improvements, SQLite-vec backend, Homebrew integration
- **v0.2.1**: Docker deployment fixes, HTTP/SSE API enhancements
- **v0.2.0**: Multi-client support, cross-platform compatibility improvements

---

## Navigation Tips

- **📁 Folders**: Click on folder names to explore sections
- **🔗 Links**: All internal links are relative and work offline
- **📱 Mobile**: Documentation is mobile-friendly for on-the-go reference
- **🔍 Search**: Use your browser's search (Ctrl/Cmd+F) to find specific topics

**Happy memory-ing! 🧠✨**
```

--------------------------------------------------------------------------------
/claude_commands/README.md:
--------------------------------------------------------------------------------

```markdown
# Claude Code Commands for MCP Memory Service

This directory contains conversational Claude Code commands that integrate memory functionality into your Claude Code workflow. These commands follow the CCPlugins pattern of markdown-based conversational instructions.

## Available Commands

### `/session-start` - Manual Session Initialization
Display session memory context manually by running the session-start hook. **Windows Workaround**: Required for Windows users due to SessionStart hook bug (#160). Works on all platforms.

**Usage:**
```bash
claude /session-start
```

**Why this exists:**
- Windows users cannot use automatic SessionStart hooks (causes Claude Code to hang)
- Provides same functionality as automatic session-start hook
- Safe manual alternative that works on all platforms (Windows, macOS, Linux)

**What it does:**
- Loads relevant project memories at session start
- Analyzes git history and recent changes
- Displays categorized memory context (recent work, current problems, additional context)

### `/memory-store` - Store Current Context
Store information in your MCP Memory Service with proper context and tagging. Automatically detects project context, applies relevant tags, and includes machine hostname for source tracking.

**Usage:**
```bash
claude /memory-store "Important architectural decision about database backend"
claude /memory-store --tags "decision,architecture" "We chose SQLite-vec for performance"
```

### `/memory-recall` - Time-based Memory Retrieval
Retrieve memories using natural language time expressions. Perfect for finding past conversations and decisions.

**Usage:**
```bash
claude /memory-recall "what did we decide about the database last week?"
claude /memory-recall "yesterday's architectural discussions"
```

### `/memory-search` - Tag and Content Search
Search through stored memories using tags, content keywords, and semantic similarity.

**Usage:**
```bash
claude /memory-search --tags "architecture,database"
claude /memory-search "SQLite performance optimization"
```

### `/memory-context` - Session Context Integration
Capture the current conversation and project context as a memory for future reference. Automatically includes machine source identification for multi-device workflows.

**Usage:**
```bash
claude /memory-context
claude /memory-context --summary "Architecture planning session"
```

### `/memory-health` - Service Health Check
Check the health and status of your MCP Memory Service, providing diagnostics and statistics.

**Usage:**
```bash
claude /memory-health
claude /memory-health --detailed
```

## Installation

### Automatic Installation (Recommended)

The commands can be installed automatically during the main MCP Memory Service installation:

```bash
# Install with commands (will prompt if Claude Code CLI is detected)
python install.py

# Force install commands
python install.py --install-claude-commands

# Skip command installation prompt
python install.py --skip-claude-commands-prompt
```

### Manual Installation

You can also install the commands manually:

```bash
# Install commands directly
python scripts/claude_commands_utils.py

# Test installation prerequisites
python scripts/claude_commands_utils.py --test

# Uninstall commands
python scripts/claude_commands_utils.py --uninstall
```

## Requirements

- **Claude Code CLI**: Must be installed and available in PATH
- **MCP Memory Service**: Should be installed and configured
- **File System Access**: Write access to `~/.claude/commands/` directory

## How It Works

1. **Command Files**: Each command is a markdown file with conversational instructions
2. **Claude Code Integration**: Commands are installed to `~/.claude/commands/`
3. **Service Connection**: Commands connect via mDNS discovery or direct HTTPS endpoints
4. **Context Awareness**: Commands understand your current project and session context
5. **API Integration**: Uses standard MCP Memory Service API endpoints for all operations

## Command Features

- **Machine Source Tracking**: Automatic hostname tagging for multi-device memory filtering
- **Conversational Interface**: Natural language interactions following CCPlugins pattern
- **Context Detection**: Automatic project and session context recognition
- **Smart Tagging**: Intelligent tag generation based on current work
- **Auto-Save**: Immediate storage without confirmation prompts
- **Flexible Configuration**: Supports both mDNS discovery and direct HTTPS endpoints
- **API Compatibility**: Works with standard MCP Memory Service API endpoints

## Example Workflow

```bash
# Start a development session (automatically tagged with machine hostname)
claude /memory-context --summary "Starting work on mDNS integration"

# Store important decisions (includes source:machine-name tag)
claude /memory-store --tags "mDNS,architecture" "Decided to use zeroconf library for service discovery"

# Continue development...

# Later, recall what was decided
claude /memory-recall "what did we decide about mDNS last week?"

# Search for related information
claude /memory-search --tags "mDNS,zeroconf"

# Search memories from specific machine
claude /memory-search --tags "source:laptop-work"

# Check service health
claude /memory-health
```

## Troubleshooting

### Commands Not Available
- Ensure Claude Code CLI is installed: `claude --version`
- Check if commands are installed: `ls ~/.claude/commands/memory-*.md`
- Reinstall commands: `python scripts/claude_commands_utils.py`

### Service Connection Issues
- **Service Health**: Check if service is accessible: `curl -k https://your-endpoint:8443/api/health`
- **Comprehensive Diagnostics**: Use `claude /memory-health` for detailed service status
- **Local Development**: Verify server can start: `python scripts/run_memory_server.py --help`
- **Endpoint Configuration**: Verify API endpoints match your service deployment

### Permission Issues
- Check directory permissions: `ls -la ~/.claude/commands/`
- Ensure write access to the commands directory
- Try running installation with appropriate permissions

## Configuration & API Compatibility

These commands integrate with your MCP Memory Service through configurable endpoints:

- **Flexible Connection**: Supports both mDNS auto-discovery and direct HTTPS endpoints
- **Configurable Endpoints**: Template uses `memory.local:8443` but can be customized to your setup
- **API Requirements**: Commands must use these standard API endpoints to function:
  - `POST /api/search` - Semantic similarity search
  - `POST /api/search/by-tag` - Tag-based search (AND/OR matching)
  - `POST /api/search/by-time` - Time-based natural language queries
  - `GET /api/memories` - List memories with pagination
  - `POST /api/memories` - Store new memories
  - `GET /api/health/detailed` - Service health diagnostics
- **Backend Compatibility**: Works with both ChromaDB and SQLite-vec storage backends
- **HTTPS Support**: Uses curl with `-k` flag for self-signed certificates

**Important**: Commands are configurable but must maintain compatibility with the documented API endpoints to function properly. Users can customize server locations but not the API contract.

## Development

The commands are implemented using:

- **Markdown Format**: Conversational instructions in markdown files
- **Python Utilities**: Installation and management scripts in `scripts/claude_commands_utils.py`
- **Integration Logic**: Seamless installation via main `install.py` script
- **Cross-Platform Support**: Works on Windows, macOS, and Linux

For more information about the MCP Memory Service, see the main project documentation.
```

--------------------------------------------------------------------------------
/claude-hooks/README.md:
--------------------------------------------------------------------------------

```markdown
# Claude Code Memory Awareness Hooks

Automatic memory awareness and intelligent context injection for Claude Code using the MCP Memory Service.

## Quick Start

```bash
cd claude-hooks

# Install Natural Memory Triggers v7.1.3 (recommended)
python install_hooks.py --natural-triggers

# OR install basic memory awareness hooks
python install_hooks.py --basic
```

> **Note**: The unified Python installer replaces all previous installers and provides cross-platform compatibility with enhanced features. See [MIGRATION.md](MIGRATION.md) for details.

This installs hooks that automatically:
- Load relevant project memories when Claude Code starts
- Inject meaningful contextual information (no more generic fluff!)
- Store session insights and decisions for future reference
- Provide on-demand memory retrieval when you need it

## Components

- **Core Hooks**: `session-start.js` (Hook v2.2), `session-end.js`, `memory-retrieval.js` - Smart memory management
- **Utilities**: Project detection, quality-aware scoring, intelligent formatting, context shift detection  
- **Tests**: Comprehensive integration test suite (14 tests)

## Features

### 🎯 **NEW in v8.5.7**: SessionStart Hook Visibility Features
Three complementary ways to view session memory context:

1. **Visible Summary Output** - Clean bordered console display at session start
   - Shows: project name, storage backend, memory count (with recent indicator), git context
   - Respects `cleanMode` configuration for minimal output

2. **Detailed Log File** - `~/.claude/last-session-context.txt`
   - Auto-generated on each session start
   - Contains: project details, storage backend, memory statistics, git analysis, top loaded memories

3. **Status Line Display** ⭐ - Always-visible status bar at bottom of terminal
   - Format: `🧠 8 (8 recent) memories | 📊 10 commits`
   - Displays static session memory context (set once at session start)
   - **Requires**: `jq` (JSON parser) and Claude Code statusLine configuration
   - **Platform**: Linux/macOS (Windows SessionStart hook broken - issue #160)
   - **Windows Workaround**: Use `/session-start` slash command for manual session initialization

### ✨ **Hook v2.2.0**: Enhanced Output Control
- **Clean Output Mode**: Configurable verbosity levels for minimal or detailed output
- **Smart Filtering**: Hide memory scoring details while keeping essential information
- **Professional UX**: Removed noisy wrapper tags and improved ANSI formatting
- **Granular Control**: Fine-tune what information is displayed during hook execution

### 🧠 **Previous Features (Project v6.7.0)**: Smart Memory Context  
- **Quality Content Extraction**: Extracts actual decisions/insights from session summaries instead of "implementation..." fluff
- **Duplicate Filtering**: Automatically removes repetitive session summaries
- **Smart Timing**: Only injects memories when contextually appropriate (no more mid-session disruptions)
- **On-Demand Retrieval**: Manual memory refresh with `memory-retrieval.js` hook

### 🧠 **Core Features**
- **Automatic Memory Injection**: Load relevant memories at session start with quality filtering
- **Project Awareness**: Detect current project context and frameworks  
- **Memory Consolidation**: Store session outcomes and insights
- **Intelligent Selection**: Quality-aware scoring that prioritizes meaningful content over just recency

## Installation

### Unified Installer (Recommended)
```bash
cd claude-hooks
python install_hooks.py  # Installs all features

# OR install specific features
python install_hooks.py --basic             # Basic memory hooks only
python install_hooks.py --natural-triggers  # Natural Memory Triggers only
```

**Prerequisites:**
- Python 3.10+
- Node.js (for hooks execution)
- **`jq`** (required for statusLine feature - displays memory context in Claude Code status bar)
  - macOS: `brew install jq`
  - Linux: `sudo apt install jq` (Ubuntu/Debian) or equivalent
  - Windows: `choco install jq` or download from https://jqlang.github.io/jq/

### Manual
```bash
cp -r claude-hooks/* ~/.claude/hooks/
chmod +x ~/.claude/hooks/statusline.sh  # Make statusline executable (Unix only)
# Edit ~/.claude/settings.json and ~/.claude/hooks/config.json
```

## Verification

After installation:
```bash
claude --debug hooks  # Should show "Found 1 hook matchers in settings"
cd ~/.claude/hooks && node tests/integration-test.js  # Run 14 integration tests
```

## Configuration

Edit `~/.claude/hooks/config.json`:
```json
{
  "memoryService": {
    "endpoint": "https://your-server:8443",
    "apiKey": "your-api-key",
    "maxMemoriesPerSession": 8,
    "injectAfterCompacting": false
  },
  "memoryScoring": {
    "weights": {
      "timeDecay": 0.25,
      "tagRelevance": 0.35,
      "contentRelevance": 0.15,
      "contentQuality": 0.25
    }
  }
}
```

### ⚙️ **Output Verbosity Control (Hook v2.2.0)**
```json
{
  "output": {
    "verbose": true,           // Show hook activity messages
    "showMemoryDetails": false, // Hide detailed memory scoring
    "showProjectDetails": true, // Show project detection info
    "showScoringDetails": false,// Hide scoring breakdowns
    "cleanMode": false         // Ultra-minimal output mode
  }
}
```

**Verbosity Levels**:
- **Normal** (`verbose: true`, others `false`): Shows essential information only
- **Detailed** (`showMemoryDetails: true`): Include memory scoring details  
- **Clean** (`cleanMode: true`): Minimal output, only success/error messages
- **Silent** (`verbose: false`): Hook works silently in background

### ⚙️ **Previous Configuration Options (Project v6.7.0)**
- `injectAfterCompacting`: Controls whether to inject memories after compacting events (default: `false`)
- `contentQuality`: New scoring weight for content quality assessment (filters generic summaries)
- Enhanced memory filtering automatically removes "implementation..." fluff

## Usage

Once installed, hooks work automatically:
- **Session start**: Load relevant project memories
- **Session end**: Store insights and decisions
- No manual intervention required

## Troubleshooting

### Quick Fixes
- **Hooks not detected**: `ls ~/.claude/settings.json` → Reinstall if missing
- **JSON parse errors**: Update to latest version (includes Python dict conversion)
- **Connection failed**: Check `curl -k https://your-endpoint:8443/api/health`
- **Wrong directory**: Move `~/.claude-code/hooks/*` to `~/.claude/hooks/`

### Debug Mode
```bash
claude --debug hooks  # Shows hook execution details
node ~/.claude/hooks/core/session-start.js  # Test individual hooks
```

### Windows-Specific Issues

#### Path Configuration
- **Directory Structure**: Hooks should be installed to `%USERPROFILE%\.claude\hooks\`
- **JSON Path Format**: Use forward slashes in settings.json: `"command": "node C:/Users/username/.claude/hooks/core/session-start.js"`
- **Avoid Backslashes**: Windows backslashes in JSON need escaping: `"C:\\\\Users\\\\..."` (prefer forward slashes instead)

#### Settings Configuration Example
```json
{
  "hooks": [
    {
      "pattern": "session-start",
      "command": "node C:/Users/your-username/.claude/hooks/core/session-start.js"
    }
  ]
}
```

#### Common Fixes
- **Wrong Path Format**: If you see `session-start-wrapper.bat` errors, update your settings.json to use the Node.js script directly
- **Legacy Directory**: If using old `.claude-code` directory, move contents to `.claude` directory
- **Permission Issues**: Run installation scripts as Administrator if needed

## Changelog

### Hook v2.2.0 (2025-01-25) - Enhanced Output Control
**🎯 Focus**: Professional UX and configurable verbosity

**New Features**:
- **Output Verbosity Control**: Granular configuration for hook output levels
- **Clean Mode**: Ultra-minimal output option for distraction-free usage
- **Smart Filtering**: Hide memory scoring details while preserving essential information

**Improvements**:
- **Removed Noise**: Eliminated `<session-start-hook>` wrapper tags and verbose logging
- **Enhanced ANSI**: Improved color consistency and formatting throughout
- **Better Defaults**: Less verbose output by default while maintaining functionality

**Configuration**:
- Added `output` section with `verbose`, `showMemoryDetails`, `showProjectDetails`, `cleanMode` options
- Backwards compatible - existing configurations work without changes
- Self-documenting configuration with clear field names

### Hook v2.1.0 - Smart Memory Integration
- Advanced memory scoring and quality assessment
- Enhanced context injection with deduplication
- Improved project detection and context awareness

### Project v6.7.0 - Smart Memory Context
- Quality content extraction and duplicate filtering
- Smart timing and context shift detection
- On-demand memory retrieval capabilities

## Documentation

For comprehensive documentation including detailed troubleshooting, advanced configuration, and development guides, see:

**[📖 Memory Awareness Hooks - Detailed Guide](https://github.com/doobidoo/mcp-memory-service/wiki/Memory-Awareness-Hooks-Detailed-Guide)**

This guide covers:
- Advanced installation methods and configuration options
- Comprehensive troubleshooting with solutions
- Custom hook development and architecture diagrams
- Memory service integration and testing frameworks
```

--------------------------------------------------------------------------------
/scripts/maintenance/README.md:
--------------------------------------------------------------------------------

```markdown
# Maintenance Scripts

This directory contains maintenance and diagnostic scripts for the MCP Memory Service database.

## Quick Reference

| Script | Purpose | Performance | Use Case |
|--------|---------|-------------|----------|
| [`check_memory_types.py`](#check_memory_typespy-new) | Display type distribution | <1s | Quick health check, pre/post-consolidation validation |
| [`consolidate_memory_types.py`](#consolidate_memory_typespy-new) | Consolidate fragmented types | ~5s for 1000 updates | Type taxonomy cleanup, reduce fragmentation |
| [`regenerate_embeddings.py`](#regenerate_embeddingspy) | Regenerate all embeddings | ~5min for 2600 memories | After cosine migration or embedding corruption |
| [`fast_cleanup_duplicates.sh`](#fast_cleanup_duplicatessh) | Fast duplicate removal | <5s for 100+ duplicates | Bulk duplicate cleanup |
| [`find_all_duplicates.py`](#find_all_duplicatespy) | Detect near-duplicates | <2s for 2000 memories | Duplicate detection and analysis |
| [`find_duplicates.py`](#find_duplicatespy) | API-based duplicate finder | Slow (~90s/duplicate) | Detailed duplicate analysis via API |
| [`repair_sqlite_vec_embeddings.py`](#repair_sqlite_vec_embeddingspy) | Fix embedding corruption | Varies | Repair corrupted embeddings |
| [`repair_zero_embeddings.py`](#repair_zero_embeddingspy) | Fix zero-valued embeddings | Varies | Repair zero embeddings |
| [`cleanup_corrupted_encoding.py`](#cleanup_corrupted_encodingpy) | Fix encoding issues | Varies | Repair encoding corruption |

## Detailed Documentation

### `check_memory_types.py` 🆕

**Purpose**: Quick diagnostic tool to display memory type distribution in the database.

**When to Use**:
- Before running consolidation to see what needs cleanup
- After consolidation to verify results
- Regular health checks to monitor type fragmentation
- When investigating memory organization issues

**Usage**:
```bash
# Display type distribution (Windows)
python scripts/maintenance/check_memory_types.py

# On macOS/Linux, update the database path in the script first
```

**Output Example**:
```
Memory Type Distribution
============================================================
Total memories: 1,978
Unique types: 128

Memory Type                              Count      %
------------------------------------------------------------
note                                       609  30.8%
session                                     89   4.5%
fix                                         67   3.4%
milestone                                   60   3.0%
reference                                   45   2.3%
...
```

**Performance**: < 1 second for any database size (read-only SQL query)

**Features**:
- Shows top 30 types by frequency
- Displays total memory count and unique type count
- Identifies NULL/empty types as "(empty/NULL)"
- Percentage calculation for easy analysis
- Zero risk (read-only operation)

**Workflow Integration**:
1. Run `check_memory_types.py` to identify fragmentation
2. If types > 150, consider running consolidation
3. Run `consolidate_memory_types.py --dry-run` to preview
4. Execute `consolidate_memory_types.py` to clean up
5. Run `check_memory_types.py` again to verify improvement

### `consolidate_memory_types.py` 🆕

**Purpose**: Consolidates fragmented memory types into a standardized 24-type taxonomy.

**When to Use**:
- Type fragmentation (e.g., `bug-fix`, `bugfix`, `technical-fix` all coexisting)
- Many types with only 1-2 memories
- Inconsistent naming across similar concepts
- After importing memories from external sources
- Monthly maintenance to prevent type proliferation

**Usage**:
```bash
# Preview changes (safe, read-only)
python scripts/maintenance/consolidate_memory_types.py --dry-run

# Execute consolidation
python scripts/maintenance/consolidate_memory_types.py

# Use custom mapping configuration
python scripts/maintenance/consolidate_memory_types.py --config custom_mappings.json
```

**Performance**: ~5 seconds for 1,000 memory updates (Nov 2025 real-world test: 1,049 updates in 5s)

**Safety Features**:
- ✅ Automatic timestamped backup before execution
- ✅ Dry-run mode shows preview without changes
- ✅ Transaction safety (atomic with rollback on error)
- ✅ Database lock detection (prevents concurrent access)
- ✅ HTTP server warning (recommends stopping before execution)
- ✅ Disk space verification (needs 2x database size)
- ✅ Backup verification (size and existence checks)

**Standard 24-Type Taxonomy**:

**Content Types:** `note`, `reference`, `document`, `guide`
**Activity Types:** `session`, `implementation`, `analysis`, `troubleshooting`, `test`
**Artifact Types:** `fix`, `feature`, `release`, `deployment`
**Progress Types:** `milestone`, `status`
**Infrastructure Types:** `configuration`, `infrastructure`, `process`, `security`, `architecture`
**Other Types:** `documentation`, `solution`, `achievement`, `technical`

**Example Consolidations**:
- NULL/empty → `note`
- `bug-fix`, `bugfix`, `technical-fix` → `fix`
- `session-summary`, `session-checkpoint` → `session`
- `project-milestone`, `development-milestone` → `milestone`
- All `technical-*` → base type (remove prefix)
- All `project-*` → base type (remove prefix)

**Typical Results** (from production database, Nov 2025):
```
Before: 342 unique types, 609 NULL/empty, fragmented naming
After:  128 unique types (63% reduction), all valid types
Updated: 1,049 memories (59% of database)
Time: ~5 seconds
```

**Configuration**:

Edit `consolidation_mappings.json` to customize behavior:
```json
{
  "mappings": {
    "old-type-name": "new-type-name",
    "bug-fix": "fix",
    "technical-solution": "solution"
  }
}
```

**Prerequisites**:
```bash
# 1. Stop HTTP server
systemctl --user stop mcp-memory-http.service

# 2. Disconnect MCP clients (Claude Code: /mcp command)

# 3. Verify disk space (need 2x database size)
df -h ~/.local/share/mcp-memory/
```

**Recovery**:
```bash
# If something goes wrong, restore from automatic backup
cp ~/.local/share/mcp-memory/sqlite_vec.db.backup-TIMESTAMP ~/.local/share/mcp-memory/sqlite_vec.db

# Verify restoration
sqlite3 ~/.local/share/mcp-memory/sqlite_vec.db "SELECT COUNT(*), COUNT(DISTINCT memory_type) FROM memories;"
```

**Notes**:
- Creates timestamped backup automatically (e.g., `sqlite_vec.db.backup-20251101-202042`)
- No data loss - only type reassignment
- Safe to run multiple times (idempotent for same mappings)
- Comprehensive reporting shows before/after statistics
- See `consolidation_mappings.json` for full mapping list

**Maintenance Schedule**:
- Run `--dry-run` monthly to check fragmentation
- Execute when unique types exceed 150
- Review custom mappings quarterly

---

### `regenerate_embeddings.py`

**Purpose**: Regenerates embeddings for all memories in the database after schema migrations or corruption.

**When to Use**:
- After cosine distance migration
- When embeddings table is dropped but memories are preserved
- After embedding corruption detected

**Usage**:
```bash
/home/hkr/repositories/mcp-memory-service/venv/bin/python scripts/maintenance/regenerate_embeddings.py
```

**Performance**: ~5 minutes for 2600 memories with all-MiniLM-L6-v2 model

**Notes**:
- Uses configured storage backend (hybrid, cloudflare, or sqlite_vec)
- Creates embeddings using sentence-transformers model
- Shows progress every 100 memories
- Safe to run multiple times (idempotent)

---

### `fast_cleanup_duplicates.sh`

**Purpose**: Fast duplicate removal using direct SQL access instead of API calls.

**When to Use**:
- Bulk duplicate cleanup after detecting duplicates
- When API-based deletion is too slow (>1min per duplicate)
- Production cleanup without extended downtime

**Usage**:
```bash
bash scripts/maintenance/fast_cleanup_duplicates.sh
```

**Performance**: <5 seconds for 100+ duplicates

**How It Works**:
1. Stops HTTP server to avoid database locking
2. Uses direct SQL DELETE with timestamp normalization
3. Keeps newest copy of each duplicate group
4. Restarts HTTP server automatically

**Warnings**:
- ⚠️ Requires systemd HTTP server setup (`mcp-memory-http.service`)
- ⚠️ Brief service interruption during cleanup
- ⚠️ Direct database access bypasses Cloudflare sync (background sync handles it later)

---

### `find_all_duplicates.py`

**Purpose**: Fast duplicate detection using content normalization and hash comparison.

**When to Use**:
- Regular duplicate audits
- Before running cleanup operations
- Investigating duplicate memory issues

**Usage**:
```bash
/home/hkr/repositories/mcp-memory-service/venv/bin/python scripts/maintenance/find_all_duplicates.py
```

**Performance**: <2 seconds for 2000 memories

**Detection Method**:
- Normalizes content by removing timestamps (dates, ISO timestamps)
- Groups memories by MD5 hash of normalized content
- Reports duplicate groups with counts

**Output**:
```
Found 23 groups of duplicates
Total memories to delete: 115
Total memories after cleanup: 1601
```

---

### `find_duplicates.py`

**Purpose**: Comprehensive duplicate detection via HTTP API with detailed analysis.

**When to Use**:
- Need detailed duplicate analysis with full metadata
- API-based workflow required
- Integration with external tools

**Usage**:
```bash
/home/hkr/repositories/mcp-memory-service/venv/bin/python scripts/maintenance/find_duplicates.py
```

**Performance**: Slow (~90 seconds per duplicate deletion)

**Features**:
- Loads configuration from Claude hooks config
- Supports self-signed SSL certificates
- Pagination support for large datasets
- Detailed duplicate grouping and reporting

**Notes**:
- 15K script with comprehensive error handling
- Useful for API integration scenarios
- Slower than `find_all_duplicates.py` due to network overhead

---

### `repair_sqlite_vec_embeddings.py`

**Purpose**: Repairs corrupted embeddings in the sqlite-vec virtual table.

**When to Use**:
- Embedding corruption detected
- vec0 extension errors
- Database integrity issues

**Usage**:
```bash
/home/hkr/repositories/mcp-memory-service/venv/bin/python scripts/maintenance/repair_sqlite_vec_embeddings.py
```

**Warnings**:
- ⚠️ Requires vec0 extension to be properly installed
- ⚠️ May drop and recreate embeddings table

---

### `repair_zero_embeddings.py`

**Purpose**: Detects and fixes memories with zero-valued embeddings.

**When to Use**:
- Search results showing 0% similarity scores
- After embedding regeneration failures
- Embedding quality issues

**Usage**:
```bash
/home/hkr/repositories/mcp-memory-service/venv/bin/python scripts/maintenance/repair_zero_embeddings.py
```

---

### `cleanup_corrupted_encoding.py`

**Purpose**: Fixes encoding corruption issues in memory content.

**When to Use**:
- UTF-8 encoding errors
- Display issues with special characters
- After data migration from different encoding

**Usage**:
```bash
/home/hkr/repositories/mcp-memory-service/venv/bin/python scripts/maintenance/cleanup_corrupted_encoding.py
```

---

## Best Practices

### Before Running Maintenance Scripts

1. **Backup your database**:
   ```bash
   cp ~/.local/share/mcp-memory/sqlite_vec.db ~/.local/share/mcp-memory/sqlite_vec.db.backup
   ```

2. **Check memory count**:
   ```bash
   sqlite3 ~/.local/share/mcp-memory/sqlite_vec.db "SELECT COUNT(*) FROM memories"
   ```

3. **Stop HTTP server if needed** (for direct database access):
   ```bash
   systemctl --user stop mcp-memory-http.service
   ```

### After Running Maintenance Scripts

1. **Verify results**:
   ```bash
   sqlite3 ~/.local/share/mcp-memory/sqlite_vec.db "SELECT COUNT(*) FROM memories"
   ```

2. **Check for duplicates**:
   ```bash
   /home/hkr/repositories/mcp-memory-service/venv/bin/python scripts/maintenance/find_all_duplicates.py
   ```

3. **Restart HTTP server**:
   ```bash
   systemctl --user start mcp-memory-http.service
   ```

4. **Test search functionality**:
   ```bash
   curl -s "http://127.0.0.1:8000/api/health"
   ```

### Performance Comparison

| Operation | API-based | Direct SQL | Speedup |
|-----------|-----------|------------|---------|
| Delete 1 duplicate | ~90 seconds | ~0.05 seconds | **1800x faster** |
| Delete 100 duplicates | ~2.5 hours | <5 seconds | **1800x faster** |
| Find duplicates | ~30 seconds | <2 seconds | **15x faster** |

**Recommendation**: Use direct SQL scripts (`fast_cleanup_duplicates.sh`, `find_all_duplicates.py`) for production maintenance. API-based scripts are useful for integration and detailed analysis.

## Troubleshooting

### "Database is locked"

**Cause**: HTTP server or MCP server has open connection

**Solution**:
```bash
# Stop HTTP server
systemctl --user stop mcp-memory-http.service

# Disconnect MCP server in Claude Code
# Type: /mcp

# Run maintenance script
bash scripts/maintenance/fast_cleanup_duplicates.sh

# Restart services
systemctl --user start mcp-memory-http.service
```

### "No such module: vec0"

**Cause**: Python sqlite3 module doesn't load vec0 extension automatically

**Solution**: Use scripts that work with the vec0-enabled environment:
- ✅ Use: `fast_cleanup_duplicates.sh` (bash wrapper with Python)
- ✅ Use: `/venv/bin/python` with proper storage backend
- ❌ Avoid: Direct `sqlite3` Python module for virtual table operations

### Slow API Performance

**Cause**: Hybrid backend syncs each operation to Cloudflare

**Solution**: Use direct SQL scripts for bulk operations:
```bash
bash scripts/maintenance/fast_cleanup_duplicates.sh  # NOT Python API scripts
```

## Related Documentation

- [Database Schema](../../docs/database-schema.md) - sqlite-vec table structure
- [Storage Backends](../../CLAUDE.md#storage-backends) - Hybrid, Cloudflare, SQLite-vec
- [Troubleshooting](../../docs/troubleshooting.md) - Common issues and solutions

## Contributing

When adding new maintenance scripts:

1. Add comprehensive docstring explaining purpose and usage
2. Include progress indicators for long-running operations
3. Add error handling and validation
4. Document in this README with performance characteristics
5. Test with both sqlite_vec and hybrid backends

```

--------------------------------------------------------------------------------
/scripts/README.md:
--------------------------------------------------------------------------------

```markdown
# MCP Memory Service Scripts

This directory contains organized utility scripts for maintaining, managing, and operating the MCP Memory Service. Scripts are categorized by function for easy navigation and maintenance.

## 📁 Directory Structure

```
scripts/
├── backup/          # Backup and restore operations
├── database/        # Database analysis and health monitoring
├── development/     # Development tools and debugging utilities
├── installation/    # Setup and installation scripts
├── linux/           # Linux service management shortcuts (v7.5.1+)
├── maintenance/     # Database cleanup and repair operations
├── migration/       # Data migration and schema updates
├── server/          # Server runtime and operational scripts
├── service/         # Service management and deployment
├── sync/            # Backend synchronization utilities
├── testing/         # Test scripts and validation
├── utils/           # General utility scripts and wrappers
├── validation/      # Configuration and system validation
├── run/             # Runtime execution scripts
├── archive/         # Deprecated scripts (kept for reference)
└── README.md        # This file
```

## 🚀 Quick Reference

### Essential Daily Operations
```bash
# Service Management
./service/memory_service_manager.sh status           # Check service status
./service/memory_service_manager.sh start-cloudflare # Start with Cloudflare backend

# Backend Synchronization
./sync/claude_sync_commands.py status               # Check sync status
./sync/claude_sync_commands.py backup               # Backup Cloudflare → SQLite
./sync/claude_sync_commands.py sync                 # Bidirectional sync
./sync/sync_now.py --verbose                        # Manual on-demand hybrid sync (v7.5.1+)

# Configuration Validation
./validation/validate_configuration_complete.py     # Comprehensive config validation
./validation/diagnose_backend_config.py             # Cloudflare backend diagnostics
./validation/verify_environment.py                  # Check environment setup

# Database Health
./database/simple_timestamp_check.py                # Quick health check
./database/db_health_check.py                       # Comprehensive health analysis
```

## 📂 Detailed Directory Guide

### 🔄 **sync/** - Backend Synchronization
Essential for managing dual-backend setups and data synchronization.

#### Cloudflare Hybrid Sync (Cloud Backend)
| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `sync_memory_backends.py` | Core bidirectional sync engine | `python sync/sync_memory_backends.py --status` |
| `claude_sync_commands.py` | User-friendly sync wrapper | `python sync/claude_sync_commands.py backup` |
| `sync_now.py` | Manual on-demand hybrid sync (v7.5.1+) | `python sync/sync_now.py --verbose` |
| `export_memories.py` | Export memories to JSON | `python sync/export_memories.py` |
| `import_memories.py` | Import memories from JSON | `python sync/import_memories.py data.json` |

**Key Features:**
- ✅ Bidirectional Cloudflare ↔ SQLite synchronization
- ✅ Intelligent deduplication using content hashing
- ✅ Dry-run mode for safe testing
- ✅ Comprehensive status reporting

#### Litestream Sync (Local Network HTTP API)
Located in `sync/litestream/` - Git-like staging workflow for syncing to central SQLite-vec HTTP API server.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `memory_sync.sh` | Main sync orchestrator (stash → pull → apply → push) | `./sync/litestream/memory_sync.sh sync` |
| `push_to_remote.sh` | Push staged changes to remote API | `./sync/litestream/push_to_remote.sh` |
| `pull_remote_changes.sh` | Pull latest from remote | `./sync/litestream/pull_remote_changes.sh` |
| `stash_local_changes.sh` | Stash local changes to staging | `./sync/litestream/stash_local_changes.sh` |
| `apply_local_changes.sh` | Apply staged changes locally | `./sync/litestream/apply_local_changes.sh` |
| `setup_local_litestream.sh` | Initialize local Litestream setup | `./sync/litestream/setup_local_litestream.sh` |
| `setup_remote_litestream.sh` | Setup remote API server | `./sync/litestream/setup_remote_litestream.sh` |

**Key Features:**
- ✅ Git-like staging database workflow
- ✅ Conflict detection and resolution
- ✅ Multi-device local network synchronization
- ✅ Sync to central HTTP API (e.g., `https://narrowbox.local:8443/api/memories`)
- ✅ macOS launchd service for automatic replication

**Note:** Litestream sync (local network) is separate from Cloudflare hybrid sync (cloud backend)

### 🛠️ **service/** - Service Management
Linux service management for production deployments.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `memory_service_manager.sh` | Complete service lifecycle management | `./service/memory_service_manager.sh start-cloudflare` |
| `service_control.sh` | Basic service control operations | `./service/service_control.sh restart` |
| `service_utils.py` | Service utility functions | Used by other service scripts |
| `deploy_dual_services.sh` | Deploy dual-backend architecture | `./service/deploy_dual_services.sh` |
| `update_service.sh` | Update running service | `./service/update_service.sh` |

**Key Features:**
- ✅ Dual-backend configuration management
- ✅ Environment file handling (.env, .env.sqlite)
- ✅ Service health monitoring
- ✅ Integrated sync operations

### ✅ **validation/** - Configuration & System Validation
Ensure proper setup and configuration.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `validate_configuration_complete.py` | Comprehensive config validation | `python validation/validate_configuration_complete.py` |
| `diagnose_backend_config.py` | Cloudflare backend diagnostics | `python validation/diagnose_backend_config.py` |
| `validate_memories.py` | Memory data validation | `python validation/validate_memories.py` |
| `validate_migration.py` | Migration validation | `python validation/validate_migration.py` |
| `verify_environment.py` | Environment setup checker | `python validation/verify_environment.py` |
| `verify_pytorch_windows.py` | PyTorch Windows validation | `python validation/verify_pytorch_windows.py` |
| `verify_torch.py` | PyTorch installation check | `python validation/verify_torch.py` |
| `check_documentation_links.py` | Documentation link validator | `python validation/check_documentation_links.py` |

**Key Features:**
- ✅ Claude Code global configuration validation
- ✅ Cloudflare credentials verification
- ✅ Environment conflict detection
- ✅ Comprehensive error reporting with solutions

### 🗄️ **database/** - Database Analysis & Health
Monitor and analyze database health and performance.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `simple_timestamp_check.py` | Quick timestamp health check | `python database/simple_timestamp_check.py` |
| `db_health_check.py` | Comprehensive health analysis | `python database/db_health_check.py` |
| `analyze_sqlite_vec_db.py` | Detailed SQLite-vec analysis | `python database/analyze_sqlite_vec_db.py` |
| `check_sqlite_vec_status.py` | SQLite-vec status checker | `python database/check_sqlite_vec_status.py` |

**Exit Codes (for CI/CD):**
- `0` - Excellent/Good health
- `1` - Warning status
- `2` - Critical issues
- `3` - Analysis failed

### 🧹 **maintenance/** - Database Cleanup & Repair
Scripts for maintaining database integrity and performance.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `find_duplicates.py` | Find and remove duplicate memories | `python maintenance/find_duplicates.py --execute` |
| `cleanup_corrupted_encoding.py` | Fix corrupted emoji encoding | `python maintenance/cleanup_corrupted_encoding.py --execute` |
| `repair_memories.py` | Repair corrupted memory entries | `python maintenance/repair_memories.py` |
| `cleanup_memories.py` | General memory cleanup | `python maintenance/cleanup_memories.py` |
| `repair_sqlite_vec_embeddings.py` | Fix embedding inconsistencies | `python maintenance/repair_sqlite_vec_embeddings.py` |
| `repair_zero_embeddings.py` | Fix zero/null embeddings | `python maintenance/repair_zero_embeddings.py` |

**Safety Features:**
- ✅ Dry-run mode available for all scripts
- ✅ Comprehensive backup recommendations
- ✅ Detailed reporting of changes

### 💾 **backup/** - Backup & Restore Operations
Data protection and recovery operations.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `backup_memories.py` | Create memory backups | `python backup/backup_memories.py` |
| `restore_memories.py` | Restore from backups | `python backup/restore_memories.py backup.json` |
| `backup_sqlite_vec.sh` | SQLite-vec database backup | `./backup/backup_sqlite_vec.sh` |
| `export_distributable_memories.sh` | Create distributable exports | `./backup/export_distributable_memories.sh` |

### 🔄 **migration/** - Data Migration & Schema Updates
Handle database migrations and data transformations.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `migrate_to_cloudflare.py` | Migrate to Cloudflare backend | `python migration/migrate_to_cloudflare.py` |
| `migrate_chroma_to_sqlite.py` | ChromaDB → SQLite migration | `python migration/migrate_chroma_to_sqlite.py` |
| `migrate_sqlite_vec_embeddings.py` | Update embedding format | `python migration/migrate_sqlite_vec_embeddings.py` |
| `migrate_timestamps.py` | Fix timestamp issues | `python migration/migrate_timestamps.py` |
| `cleanup_mcp_timestamps.py` | Clean timestamp proliferation | `python migration/cleanup_mcp_timestamps.py` |
| `verify_mcp_timestamps.py` | Verify timestamp consistency | `python migration/verify_mcp_timestamps.py` |

### 🏠 **installation/** - Setup & Installation
Platform-specific installation and setup scripts.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `install.py` | Platform-aware installer with backend selection | `python installation/install.py --storage-backend hybrid` |
| `install_linux_service.py` | Linux service installation | `python installation/install_linux_service.py` |
| `install_macos_service.py` | macOS service setup | `python installation/install_macos_service.py` |
| `install_windows_service.py` | Windows service installation | `python installation/install_windows_service.py` |
| `install_uv.py` | UV package manager installation | `python installation/install_uv.py` |
| `setup_cloudflare_resources.py` | Cloudflare resource setup | `python installation/setup_cloudflare_resources.py` |
| `setup_claude_mcp.sh` | Claude MCP configuration | `./installation/setup_claude_mcp.sh` |
| `setup_backup_cron.sh` | Automated backup scheduling | `./installation/setup_backup_cron.sh` |

### 🐧 **linux/** - Linux Service Management Shortcuts
Quick service management wrappers for Linux systemd deployments (v7.5.1+).

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `service_status.sh` | Check systemd service status | `./linux/service_status.sh` |
| `start_service.sh` | Start mcp-memory service | `./linux/start_service.sh` |
| `stop_service.sh` | Stop mcp-memory service | `./linux/stop_service.sh` |
| `view_logs.sh` | View service logs | `./linux/view_logs.sh` |
| `uninstall_service.sh` | Remove systemd service | `./linux/uninstall_service.sh` |

**Key Features:**
- ✅ Simple wrappers for systemd service management
- ✅ User-level service control (~/.config/systemd/user/)
- ✅ Quick status and log viewing
- ✅ Clean uninstall capabilities

### 🖥️ **server/** - Server Runtime & Operations
Scripts for running and managing the memory server.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `run_memory_server.py` | Start memory server | `python server/run_memory_server.py` |
| `run_http_server.py` | Start HTTP API server | `python server/run_http_server.py` |
| `check_server_health.py` | Health check endpoint | `python server/check_server_health.py` |
| `memory_offline.py` | Offline memory operations | `python server/memory_offline.py` |
| `preload_models.py` | Pre-load ML models | `python server/preload_models.py` |

### 🧪 **testing/** - Test Scripts & Validation
Comprehensive testing and validation scripts.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `run_complete_test.py` | Complete system test | `python testing/run_complete_test.py` |
| `test_memory_api.py` | API functionality tests | `python testing/test_memory_api.py` |
| `test_cloudflare_backend.py` | Cloudflare backend tests | `python testing/test_cloudflare_backend.py` |
| `test_sqlite_vec_embeddings.py` | Embedding system tests | `python testing/test_sqlite_vec_embeddings.py` |
| `simple_test.py` | Basic functionality test | `python testing/simple_test.py` |

### 🔧 **utils/** - General Utilities
Helper scripts and utility functions.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `claude_commands_utils.py` | Claude command utilities | Used by Claude Code hooks |
| `query_memories.py` | Direct memory querying | `python utils/query_memories.py "search term"` |
| `memory_wrapper_uv.py` | UV package manager wrapper | Used by other scripts |
| `generate_personalized_claude_md.sh` | Generate custom CLAUDE.md | `./utils/generate_personalized_claude_md.sh` |

### 🛠️ **development/** - Development Tools
Tools for developers and debugging.

| Script | Purpose | Quick Usage |
|--------|---------|-------------|
| `setup-git-merge-drivers.sh` | Configure git merge drivers | `./development/setup-git-merge-drivers.sh` |
| `fix_mdns.sh` | Fix mDNS issues | `./development/fix_mdns.sh` |
| `uv-lock-merge.sh` | Handle UV lock file merges | `./development/uv-lock-merge.sh` |
| `find_orphaned_files.py` | Find orphaned database files | `python development/find_orphaned_files.py` |

## 🎯 Common Use Cases

### Initial Setup
```bash
# 1. Validate environment
python validation/verify_environment.py

# 2. Install appropriate service
python installation/install_linux_service.py

# 3. Validate configuration
python validation/validate_config.py

# 4. Start service
./service/memory_service_manager.sh start-cloudflare
```

### Daily Operations
```bash
# Check overall health
./service/memory_service_manager.sh status
python database/simple_timestamp_check.py

# Sync backends
python sync/claude_sync_commands.py sync

# Backup
python sync/claude_sync_commands.py backup
```

### Troubleshooting
```bash
# Validate configuration
python validation/validate_config.py

# Check database health
python database/db_health_check.py

# Fix common issues
python maintenance/find_duplicates.py --execute
python maintenance/cleanup_corrupted_encoding.py --execute
```

### Migration & Upgrades
```bash
# Before migration - backup
python backup/backup_memories.py

# Migrate to new backend
python migration/migrate_to_cloudflare.py

# Verify migration
python validation/validate_memories.py
```

## 🚨 Safety Guidelines

### Before Running Maintenance Scripts
1. **Always backup first**: `python backup/backup_memories.py`
2. **Use dry-run mode**: Most scripts support `--dry-run` or similar
3. **Test with small datasets** when possible
4. **Check database health**: `python database/simple_timestamp_check.py`

### Script Execution Order
1. **Validation** scripts first (check environment)
2. **Backup** before any data modifications
3. **Maintenance** operations (cleanup, repair)
4. **Verification** after changes
5. **Service restart** if needed

## 🔗 Integration with Documentation

This scripts directory integrates with:
- **CLAUDE.md**: Essential commands for Claude Code users
- **AGENTS.md**: Agent development and release process
- **Wiki**: Detailed setup and troubleshooting guides
- **GitHub Actions**: CI/CD pipeline integration

## 📝 Adding New Scripts

When adding new scripts:
1. **Choose appropriate category** based on primary function
2. **Follow naming conventions**: `snake_case.py` or `kebab-case.sh`
3. **Include proper documentation** in script headers
4. **Add safety mechanisms** for data-modifying operations
5. **Update this README** with script description
6. **Test with multiple backends** (SQLite-vec, Cloudflare)

## 🆘 Getting Help

- **Configuration issues**: Run `python validation/validate_config.py`
- **Database problems**: Run `python database/db_health_check.py`
- **Documentation links**: Run `python validation/check_documentation_links.py`
- **General health**: Run `./service/memory_service_manager.sh status`

For complex issues, check the [project wiki](https://github.com/doobidoo/mcp-memory-service/wiki) or create an issue with the output from relevant diagnostic scripts.
```

--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------

```markdown
# MCP Memory Service

[![License: Apache 2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
[![PyPI version](https://img.shields.io/pypi/v/mcp-memory-service?color=blue&logo=pypi&logoColor=white)](https://pypi.org/project/mcp-memory-service/)
[![Python](https://img.shields.io/pypi/pyversions/mcp-memory-service?logo=python&logoColor=white)](https://pypi.org/project/mcp-memory-service/)
[![CI/CD](https://github.com/doobidoo/mcp-memory-service/actions/workflows/main.yml/badge.svg)](https://github.com/doobidoo/mcp-memory-service/actions/workflows/main.yml)
[![Downloads](https://img.shields.io/pypi/dm/mcp-memory-service)](https://pypi.org/project/mcp-memory-service/)
[![Last Commit](https://img.shields.io/github/last-commit/doobidoo/mcp-memory-service)](https://github.com/doobidoo/mcp-memory-service/commits/main)
[![GitHub stars](https://img.shields.io/github/stars/doobidoo/mcp-memory-service?style=social)](https://github.com/doobidoo/mcp-memory-service/stargazers)
[![Production Ready](https://img.shields.io/badge/Production-Ready-brightgreen?style=flat&logo=checkmark)](https://github.com/doobidoo/mcp-memory-service#-in-production)

[![Works with Claude](https://img.shields.io/badge/Works%20with-Claude-blue)](https://claude.ai)
[![Works with Cursor](https://img.shields.io/badge/Works%20with-Cursor-orange)](https://cursor.sh)
[![MCP Protocol](https://img.shields.io/badge/MCP-Compatible-4CAF50?style=flat)](https://modelcontextprotocol.io/)
[![Multi-Client](https://img.shields.io/badge/Multi--Client-13+%20Apps-FF6B35?style=flat)](https://github.com/doobidoo/mcp-memory-service/wiki)
[![Docker](https://img.shields.io/badge/Docker-Available-2496ED?logo=docker&logoColor=white)](https://github.com/doobidoo/mcp-memory-service/pkgs/container/mcp-memory-service)
[![Issues](https://img.shields.io/github/issues/doobidoo/mcp-memory-service)](https://github.com/doobidoo/mcp-memory-service/issues)
[![Ask DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/doobidoo/mcp-memory-service)

**Production-ready MCP memory service** with **zero database locks**, **hybrid backend** (fast local + cloud sync), and **intelligent memory search** for **AI assistants**. Features **v8.9.0 auto-configuration** for multi-client access, **5ms local reads** with background Cloudflare sync, **Natural Memory Triggers** with 85%+ accuracy, and **OAuth 2.1 team collaboration**. Works with **Claude Desktop, VS Code, Cursor, Continue, and 13+ AI applications**.

<img width="240" alt="MCP Memory Service" src="https://github.com/user-attachments/assets/eab1f341-ca54-445c-905e-273cd9e89555" />

## 🚀 Quick Start (2 minutes)

### 🆕 Latest Release: **v8.42.0** (Nov 27, 2025)

**Memory Awareness Enhancements**

- 👁️ **Visible Memory Injection** - Users now see top 3 memories at session start with relevance scores, age, and tags
- 🎯 **Quality Session Summaries** - Raised quality thresholds (200 char min, 0.5 confidence) to prevent generic boilerplate
- 🤖 **LLM-Powered Summarization** - Optional Gemini CLI integration for intelligent session analysis
- 🧹 **Database Quality** - Cleaned 167 generic summaries (3352 → 3185 memories)

**Previous Releases**:
- **v8.41.2** - Hook Installer Utility File Deployment (ALL 14 utilities copied, future-proof glob pattern)
- **v8.41.1** - Context Formatter Memory Sorting (recency sorting within categories, newest first)
- **v8.41.0** - Session Start Hook Reliability Improvements (error suppression, clean output, memory filtering, classification fixes)
- **v8.40.0** - Session Start Version Display (automatic version comparison, PyPI status labels)
- **v8.39.1** - Dashboard Analytics Bug Fixes: Three critical fixes (top tags filtering, recent activity display, storage report fields)
- **v8.39.0** - Performance Optimization: Storage-layer date-range filtering (10x faster analytics, 97% data transfer reduction)
- **v8.38.1** - Critical Hotfix: HTTP MCP JSON-RPC 2.0 compliance fix (Claude Code/Desktop connection failures resolved)
- **v8.38.0** - Code Quality: Phase 2b COMPLETE (~176-186 lines duplicate code eliminated, 10 consolidations)
- **v8.37.0** - Code Quality: Phase 2a COMPLETE (5 duplicate high-complexity functions eliminated)
- **v8.36.1** - Critical Hotfix: HTTP server startup crash fix (forward reference error in analytics.py)
- **v8.36.0** - Code Quality: Phase 2 COMPLETE (100% of target achieved, -39 complexity points)
- **v8.35.0** - Code Quality: Phase 2 Batch 1 (install.py, cloudflare.py, -15 complexity points)
- **v8.34.0** - Code Quality: Phase 2 Complexity Reduction (analytics.py refactored, 11 → 6-7 complexity)
- **v8.33.0** - Critical Installation Bug Fix + Code Quality Improvements (dead code cleanup, automatic MCP setup)
- **v8.32.0** - Code Quality Excellence: pyscn Static Analysis Integration (multi-layer QA workflow)
- **v8.31.0** - Revolutionary Batch Update Performance (21,428x faster memory consolidation)
- **v8.30.0** - Analytics Intelligence: Adaptive Charts & Critical Data Fixes (accurate trend visualization)
- **v8.28.1** - Critical HTTP MCP Transport JSON-RPC 2.0 Compliance Fix (Claude Code compatibility)
- **v8.28.0** - Cloudflare AND/OR Tag Filtering (unified search API, 3-5x faster hybrid sync)
- **v8.27.1** - Critical Hotfix: Timestamp Regression (created_at preservation during metadata sync)
- **v8.26.0** - Revolutionary MCP Performance (534,628x faster tools, 90%+ cache hit rate)
- **v8.25.0** - Hybrid Backend Drift Detection (automatic metadata sync, bidirectional awareness)
- **v8.24.4** - Code Quality Improvements from Gemini Code Assist (regex sanitization, DOM caching)
- **v8.24.3** - Test Coverage & Release Agent Improvements (tag+time filtering tests, version history fix)
- **v8.24.2** - CI/CD Workflow Fixes (bash errexit handling, exit code capture)
- **v8.24.1** - Test Infrastructure Improvements (27 test failures resolved, 63% → 71% pass rate)
- **v8.24.0** - PyPI Publishing Enabled (automated package publishing via GitHub Actions)
- **v8.23.1** - Stale Virtual Environment Prevention System (6-layer developer protection)
- **v8.23.0** - Consolidation Scheduler via Code Execution API (88% token reduction)

**📖 Full Details**: [CHANGELOG.md](CHANGELOG.md#8222---2025-11-09) | [All Releases](https://github.com/doobidoo/mcp-memory-service/releases)

---

```bash
# One-command installation with auto-configuration
git clone https://github.com/doobidoo/mcp-memory-service.git
cd mcp-memory-service && python install.py

# Choose option 4 (Hybrid - RECOMMENDED) when prompted
# Installer automatically configures:
#   ✅ SQLite pragmas for concurrent access
#   ✅ Cloudflare credentials for cloud sync
#   ✅ Claude Desktop integration

# Done! Fast local + cloud sync with zero database locks
```

### PyPI Installation (Simplest)

**Install from PyPI:**
```bash
# Install latest version from PyPI
pip install mcp-memory-service

# Or with uv (faster)
uv pip install mcp-memory-service
```

**Then configure Claude Desktop** by adding to `~/Library/Application Support/Claude/claude_desktop_config.json` (macOS) or equivalent:
```json
{
  "mcpServers": {
    "memory": {
      "command": "memory",
      "args": ["server"],
      "env": {
        "MCP_MEMORY_STORAGE_BACKEND": "hybrid"
      }
    }
  }
}
```

For advanced configuration with the interactive installer, clone the repo and run `python scripts/installation/install.py`.

### Developer Setup (Contributing)

**For development and contributing**, use editable install to ensure source code changes take effect immediately:

```bash
# Clone repository
git clone https://github.com/doobidoo/mcp-memory-service.git
cd mcp-memory-service

# Create and activate virtual environment
python -m venv venv
source venv/bin/activate  # On Windows: venv\Scripts\activate

# CRITICAL: Editable install (code changes take effect immediately)
pip install -e .

# Verify editable mode (should show source directory, not site-packages)
pip show mcp-memory-service | grep Location
# Expected: Location: /path/to/mcp-memory-service/src

# Start development server
uv run memory server
```

**⚠️ Important**: Editable install (`-e` flag) ensures MCP servers load from source code, not stale `site-packages`. Without this, source changes won't be reflected until you reinstall the package.

**Version Mismatch Check:**
```bash
# Verify installed version matches source code
python scripts/validation/check_dev_setup.py
```

See [CLAUDE.md](CLAUDE.md#development-setup-critical) for complete development guidelines.

### Traditional Setup Options

**Universal Installer (Most Compatible):**
```bash
# Clone and install with automatic platform detection
git clone https://github.com/doobidoo/mcp-memory-service.git
cd mcp-memory-service

# Lightweight installation (SQLite-vec with ONNX embeddings - recommended)
python install.py

# Add full ML capabilities (torch + sentence-transformers for advanced features)
python install.py --with-ml

# Install with hybrid backend (SQLite-vec + Cloudflare sync)
python install.py --storage-backend hybrid
```

**📝 Installation Options Explained:**
- **Default (recommended)**: Lightweight SQLite-vec with ONNX embeddings - fast, works offline, <100MB dependencies
- **`--with-ml`**: Adds PyTorch + sentence-transformers for advanced ML features - heavier but more capable
- **`--storage-backend hybrid`**: Hybrid backend with SQLite-vec + Cloudflare sync - best for multi-device access

**Docker (Fastest):**
```bash
# For MCP protocol (Claude Desktop)
docker-compose up -d

# For HTTP API + OAuth (Team Collaboration)
docker-compose -f docker-compose.http.yml up -d
```

**Smithery (Claude Desktop):**
```bash
# Auto-install for Claude Desktop
npx -y @smithery/cli install @doobidoo/mcp-memory-service --client claude
```

## ⚠️ v6.17.0+ Script Migration Notice

**Updating from an older version?** Scripts have been reorganized for better maintainability:
- **Recommended**: Use `python -m mcp_memory_service.server` in your Claude Desktop config (no path dependencies!)
- **Alternative 1**: Use `uv run memory server` with UV tooling
- **Alternative 2**: Update path from `scripts/run_memory_server.py` to `scripts/server/run_memory_server.py`
- **Backward compatible**: Old path still works with a migration notice

## ⚠️ First-Time Setup Expectations

On your first run, you'll see some warnings that are **completely normal**:

- **"WARNING: Failed to load from cache: No snapshots directory"** - The service is checking for cached models (first-time setup)
- **"WARNING: Using TRANSFORMERS_CACHE is deprecated"** - Informational warning, doesn't affect functionality
- **Model download in progress** - The service automatically downloads a ~25MB embedding model (takes 1-2 minutes)

These warnings disappear after the first successful run. The service is working correctly! For details, see our [First-Time Setup Guide](docs/first-time-setup.md).

### 🐍 Python 3.13 Compatibility Note

**sqlite-vec** may not have pre-built wheels for Python 3.13 yet. If installation fails:
- The installer will automatically try multiple installation methods
- Consider using Python 3.12 for the smoothest experience: `brew install [email protected]`
- Alternative: Use Cloudflare backend with `--storage-backend cloudflare`
- See [Troubleshooting Guide](docs/troubleshooting/general.md#python-313-sqlite-vec-issues) for details

### 🍎 macOS SQLite Extension Support

**macOS users** may encounter `enable_load_extension` errors with sqlite-vec:
- **System Python** on macOS lacks SQLite extension support by default
- **Solution**: Use Homebrew Python: `brew install python && rehash`
- **Alternative**: Use pyenv: `PYTHON_CONFIGURE_OPTS='--enable-loadable-sqlite-extensions' pyenv install 3.12.0`
- **Fallback**: Use Cloudflare or Hybrid backend: `--storage-backend cloudflare` or `--storage-backend hybrid`
- See [Troubleshooting Guide](docs/troubleshooting/general.md#macos-sqlite-extension-issues) for details

## 🎯 Memory Awareness in Action

**Intelligent Context Injection** - See how the memory service automatically surfaces relevant information at session start:

<img src="docs/assets/images/memory-awareness-hooks-example.png" alt="Memory Awareness Hooks in Action" width="100%" />

**What you're seeing:**
- 🧠 **Automatic memory injection** - 8 relevant memories found from 2,526 total
- 📂 **Smart categorization** - Recent Work, Current Problems, Additional Context
- 📊 **Git-aware analysis** - Recent commits and keywords automatically extracted
- 🎯 **Relevance scoring** - Top memories scored at 100% (today), 89% (8d ago), 84% (today)
- ⚡ **Fast retrieval** - SQLite-vec backend with 5ms read performance
- 🔄 **Background sync** - Hybrid backend syncing to Cloudflare

**Result**: Claude starts every session with full project context - no manual prompting needed.

## 📚 Complete Documentation

**👉 Visit our comprehensive [Wiki](https://github.com/doobidoo/mcp-memory-service/wiki) for detailed guides:**

### 🧠 v7.1.3 Natural Memory Triggers (Latest)
- **[Natural Memory Triggers v7.1.3 Guide](https://github.com/doobidoo/mcp-memory-service/wiki/Natural-Memory-Triggers-v7.1.0)** - Intelligent automatic memory awareness
  - ✅ **85%+ trigger accuracy** with semantic pattern detection
  - ✅ **Multi-tier performance** (50ms instant → 150ms fast → 500ms intensive)
  - ✅ **CLI management system** for real-time configuration
  - ✅ **Git-aware context** integration for enhanced relevance
  - ✅ **Zero-restart installation** with dynamic hook loading

### 🆕 v7.0.0 OAuth & Team Collaboration
- **[🔐 OAuth 2.1 Setup Guide](https://github.com/doobidoo/mcp-memory-service/wiki/OAuth-2.1-Setup-Guide)** - **NEW!** Complete OAuth 2.1 Dynamic Client Registration guide
- **[🔗 Integration Guide](https://github.com/doobidoo/mcp-memory-service/wiki/03-Integration-Guide)** - Claude Desktop, **Claude Code HTTP transport**, VS Code, and more
- **[🛡️ Advanced Configuration](https://github.com/doobidoo/mcp-memory-service/wiki/04-Advanced-Configuration)** - **Updated!** OAuth security, enterprise features

### 🧬 v8.23.0+ Memory Consolidation
- **[📊 Memory Consolidation System Guide](https://github.com/doobidoo/mcp-memory-service/wiki/Memory-Consolidation-System-Guide)** - **NEW!** Automated memory maintenance with real-world performance metrics
  - ✅ **Dream-inspired consolidation** (decay scoring, association discovery, compression, archival)
  - ✅ **24/7 automatic scheduling** (daily/weekly/monthly via HTTP server)
  - ✅ **Token-efficient Code Execution API** (90% token reduction vs MCP tools)
  - ✅ **Real-world performance data** (4-6 min for 2,495 memories with hybrid backend)
  - ✅ **Three manual trigger methods** (HTTP API, MCP tools, Python API)

### 🚀 Setup & Installation
- **[📋 Installation Guide](https://github.com/doobidoo/mcp-memory-service/wiki/01-Installation-Guide)** - Complete installation for all platforms and use cases
- **[🖥️ Platform Setup Guide](https://github.com/doobidoo/mcp-memory-service/wiki/02-Platform-Setup-Guide)** - Windows, macOS, and Linux optimizations
- **[⚡ Performance Optimization](https://github.com/doobidoo/mcp-memory-service/wiki/05-Performance-Optimization)** - Speed up queries, optimize resources, scaling

### 🧠 Advanced Topics
- **[👨‍💻 Development Reference](https://github.com/doobidoo/mcp-memory-service/wiki/06-Development-Reference)** - Claude Code hooks, API reference, debugging
- **[🔧 Troubleshooting Guide](https://github.com/doobidoo/mcp-memory-service/wiki/07-TROUBLESHOOTING)** - **Updated!** OAuth troubleshooting + common issues
- **[❓ FAQ](https://github.com/doobidoo/mcp-memory-service/wiki/08-FAQ)** - Frequently asked questions
- **[📝 Examples](https://github.com/doobidoo/mcp-memory-service/wiki/09-Examples)** - Practical code examples and workflows

### 📂 Internal Documentation
- **[📊 Repository Statistics](docs/statistics/REPOSITORY_STATISTICS.md)** - 10 months of development metrics, activity patterns, and insights
- **[🏗️ Architecture Specs](docs/architecture/)** - Search enhancement specifications and design documents
- **[👩‍💻 Development Docs](docs/development/)** - AI agent instructions, release checklist, refactoring notes
- **[🚀 Deployment Guides](docs/deployment/)** - Docker, dual-service, and production deployment
- **[📚 Additional Guides](docs/guides/)** - Storage backends, migration, mDNS discovery

## ✨ Key Features

### 🏆 **Production-Ready Reliability** 🆕 v8.9.0
- **Hybrid Backend** - Fast 5ms local SQLite + background Cloudflare sync (RECOMMENDED default)
  - Zero user-facing latency for cloud operations
  - Automatic multi-device synchronization
  - Graceful offline operation
- **Zero Database Locks** - Concurrent HTTP + MCP server access works flawlessly
  - Auto-configured SQLite pragmas (`busy_timeout=15000,cache_size=20000`)
  - WAL mode with proper multi-client coordination
  - Tested: 5/5 concurrent writes succeeded with no errors
- **Auto-Configuration** - Installer handles everything
  - SQLite pragmas for concurrent access
  - Cloudflare credentials with connection testing
  - Claude Desktop integration with hybrid backend
  - Graceful fallback to sqlite_vec if cloud setup fails

### 📄 **Document Ingestion System** v8.6.0
- **Interactive Web UI** - Drag-and-drop document upload with real-time progress
- **Multiple Formats** - PDF, TXT, MD, JSON with intelligent chunking
- **Document Viewer** - Browse chunks, view metadata, search content
- **Smart Tagging** - Automatic tagging with length validation (max 100 chars)
- **Optional semtools** - Enhanced PDF/DOCX/PPTX parsing with LlamaParse
- **Security Hardened** - Path traversal protection, XSS prevention, input validation
- **7 New Endpoints** - Complete REST API for document management

### 🔐 **Enterprise Authentication & Team Collaboration**
- **OAuth 2.1 Dynamic Client Registration** - RFC 7591 & RFC 8414 compliant
- **Claude Code HTTP Transport** - Zero-configuration team collaboration
- **JWT Authentication** - Enterprise-grade security with scope validation
- **Auto-Discovery Endpoints** - Seamless client registration and authorization
- **Multi-Auth Support** - OAuth + API keys + optional anonymous access

### 🧠 **Intelligent Memory Management**
- **Semantic search** with vector embeddings
- **Natural language time queries** ("yesterday", "last week")
- **Tag-based organization** with smart categorization
- **Memory consolidation** with dream-inspired algorithms
- **Document-aware search** - Query across uploaded documents and manual memories

### 🔗 **Universal Compatibility**
- **Claude Desktop** - Native MCP integration
- **Claude Code** - **HTTP transport** + Memory-aware development with hooks
  - 🪟 **Windows Support**: `/session-start` command for manual session initialization (workaround for issue #160)
  - 🍎 **macOS/Linux**: Full automatic SessionStart hooks + slash command
- **VS Code, Cursor, Continue** - IDE extensions
- **13+ AI applications** - REST API compatibility

### 💾 **Flexible Storage**
- **Hybrid** 🌟 (RECOMMENDED) - Fast local SQLite + background Cloudflare sync (v8.9.0 default)
  - 5ms local reads with zero user-facing latency
  - Multi-device synchronization
  - Zero database locks with auto-configured pragmas
  - Automatic backups and cloud persistence
- **SQLite-vec** - Local-only storage (lightweight ONNX embeddings, 5ms reads)
  - Good for single-user offline use
  - No cloud dependencies
- **Cloudflare** - Cloud-only storage (global edge distribution with D1 + Vectorize)
  - Network-dependent performance

> **Note**: All heavy ML dependencies (PyTorch, sentence-transformers) are now optional to dramatically reduce build times and image sizes. SQLite-vec uses lightweight ONNX embeddings by default. Install with `--with-ml` for full ML capabilities.

### 🚀 **Production Ready**
- **Cross-platform** - Windows, macOS, Linux
- **Service installation** - Auto-start background operation
- **HTTPS/SSL** - Secure connections with OAuth 2.1
- **Docker support** - Easy deployment with team collaboration
- **Interactive Dashboard** - Web UI at http://127.0.0.1:8888/ for complete management

## 💡 Basic Usage

### 📄 **Document Ingestion** (v8.6.0+)
```bash
# Start server with web interface
uv run memory server --http

# Access interactive dashboard
open http://127.0.0.1:8888/

# Upload documents via CLI
curl -X POST http://127.0.0.1:8888/api/documents/upload \
  -F "[email protected]" \
  -F "tags=documentation,reference"

# Search document content
curl -X POST http://127.0.0.1:8888/api/search \
  -H "Content-Type: application/json" \
  -d '{"query": "authentication flow", "limit": 10}'
```

### 🔗 **Team Collaboration with OAuth** (v7.0.0+)
```bash
# Start OAuth-enabled server for team collaboration
export MCP_OAUTH_ENABLED=true
uv run memory server --http

# Claude Code team members connect via HTTP transport
claude mcp add --transport http memory-service http://your-server:8000/mcp
# → Automatic OAuth discovery, registration, and authentication
```

### 🧠 **Memory Operations**
```bash
# Store a memory
uv run memory store "Fixed race condition in authentication by adding mutex locks"

# Search for relevant memories
uv run memory recall "authentication race condition"

# Search by tags
uv run memory search --tags python debugging

# Check system health (shows OAuth status)
uv run memory health
```

## 🔧 Configuration

### Claude Desktop Integration
**Recommended approach** - Add to your Claude Desktop config (`~/.claude/config.json`):

```json
{
  "mcpServers": {
    "memory": {
      "command": "python",
      "args": ["-m", "mcp_memory_service.server"],
      "env": {
        "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec"
      }
    }
  }
}
```

**Alternative approaches:**
```json
// Option 1: UV tooling (if using UV)
{
  "mcpServers": {
    "memory": {
      "command": "uv",
      "args": ["--directory", "/path/to/mcp-memory-service", "run", "memory", "server"],
      "env": {
        "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec"
      }
    }
  }
}

// Option 2: Direct script path (v6.17.0+)
{
  "mcpServers": {
    "memory": {
      "command": "python",
      "args": ["/path/to/mcp-memory-service/scripts/server/run_memory_server.py"],
      "env": {
        "MCP_MEMORY_STORAGE_BACKEND": "sqlite_vec"
      }
    }
  }
}
```

### Environment Variables

**Hybrid Backend (v8.9.0+ RECOMMENDED):**
```bash
# Hybrid backend with auto-configured pragmas
export MCP_MEMORY_STORAGE_BACKEND=hybrid
export MCP_MEMORY_SQLITE_PRAGMAS="busy_timeout=15000,cache_size=20000"

# Cloudflare credentials (required for hybrid)
export CLOUDFLARE_API_TOKEN="your-token"
export CLOUDFLARE_ACCOUNT_ID="your-account"
export CLOUDFLARE_D1_DATABASE_ID="your-db-id"
export CLOUDFLARE_VECTORIZE_INDEX="mcp-memory-index"

# Enable HTTP API
export MCP_HTTP_ENABLED=true
export MCP_HTTP_PORT=8000

# Security
export MCP_API_KEY="your-secure-key"
```

**SQLite-vec Only (Local):**
```bash
# Local-only storage
export MCP_MEMORY_STORAGE_BACKEND=sqlite_vec
export MCP_MEMORY_SQLITE_PRAGMAS="busy_timeout=15000,cache_size=20000"
```

## 🏗️ Architecture

```
┌─────────────────┐    ┌─────────────────┐    ┌─────────────────┐
│   AI Clients    │    │  MCP Memory     │    │ Storage Backend │
│                 │    │  Service v8.9   │    │                 │
│ • Claude Desktop│◄──►│ • MCP Protocol  │◄──►│ • Hybrid 🌟     │
│ • Claude Code   │    │ • HTTP Transport│    │   (5ms local +  │
│   (HTTP/OAuth)  │    │ • OAuth 2.1 Auth│    │    cloud sync)  │
│ • VS Code       │    │ • Memory Store  │    │ • SQLite-vec    │
│ • Cursor        │    │ • Semantic      │    │ • Cloudflare    │
│ • 13+ AI Apps   │    │   Search        │    │                 │
│ • Web Dashboard │    │ • Doc Ingestion │    │ Zero DB Locks ✅│
│   (Port 8888)   │    │ • Zero DB Locks │    │ Auto-Config ✅  │
└─────────────────┘    └─────────────────┘    └─────────────────┘
```

## 🛠️ Development

### Project Structure
```
mcp-memory-service/
├── src/mcp_memory_service/    # Core application
│   ├── models/                # Data models
│   ├── storage/               # Storage backends
│   ├── web/                   # HTTP API & dashboard
│   └── server.py              # MCP server
├── scripts/                   # Utilities & installation
├── tests/                     # Test suite
└── tools/docker/              # Docker configuration
```

### Contributing
1. Fork the repository
2. Create a feature branch
3. Make your changes with tests
4. Submit a pull request

See [CONTRIBUTING.md](CONTRIBUTING.md) for detailed guidelines.

## 🆘 Support

- **📖 Documentation**: [Wiki](https://github.com/doobidoo/mcp-memory-service/wiki) - Comprehensive guides
- **🐛 Bug Reports**: [GitHub Issues](https://github.com/doobidoo/mcp-memory-service/issues)
- **💬 Discussions**: [GitHub Discussions](https://github.com/doobidoo/mcp-memory-service/discussions)
- **🔧 Troubleshooting**: [Troubleshooting Guide](https://github.com/doobidoo/mcp-memory-service/wiki/07-TROUBLESHOOTING)
- **✅ Configuration Validator**: Run `python scripts/validation/validate_configuration_complete.py` to check your setup
- **🔄 Backend Sync Tools**: See [scripts/README.md](scripts/README.md#backend-synchronization) for Cloudflare↔SQLite sync

## 📊 In Production

**Real-world metrics from active deployments:**
- **1700+ memories** stored and actively used across teams
- **5ms local reads** with hybrid backend (v8.9.0)
- **Zero database locks** with concurrent HTTP + MCP access (v8.9.0)
  - Tested: 5/5 concurrent writes succeeded
  - Auto-configured pragmas prevent lock errors
- **<500ms response time** for semantic search (local & HTTP transport)
- **65% token reduction** in Claude Code sessions with OAuth collaboration
- **96.7% faster** context setup (15min → 30sec)
- **100% knowledge retention** across sessions and team members
- **Zero-configuration** setup success rate: **98.5%** (OAuth + hybrid backend)

## 🏆 Recognition

- [![Smithery](https://smithery.ai/badge/@doobidoo/mcp-memory-service)](https://smithery.ai/server/@doobidoo/mcp-memory-service) **Verified MCP Server**
- [![Glama AI](https://img.shields.io/badge/Featured-Glama%20AI-blue)](https://glama.ai/mcp/servers/bzvl3lz34o) **Featured AI Tool**
- **Production-tested** across 13+ AI applications
- **Community-driven** with real-world feedback and improvements

## 📄 License

Apache License 2.0 - see [LICENSE](LICENSE) for details.

---

**Ready to supercharge your AI workflow?** 🚀

👉 **[Start with our Installation Guide](https://github.com/doobidoo/mcp-memory-service/wiki/01-Installation-Guide)** or explore the **[Wiki](https://github.com/doobidoo/mcp-memory-service/wiki)** for comprehensive documentation.

*Transform your AI conversations into persistent, searchable knowledge that grows with you.*
```

--------------------------------------------------------------------------------
/AGENTS.md:
--------------------------------------------------------------------------------

```markdown
# MCP Memory Service - Agent Guidelines

## Available Agents

### amp-bridge
**Purpose**: Leverage Amp CLI capabilities (research, code analysis, web search) without consuming Claude Code credits.

**Usage**: `Use @agent-amp-bridge to research XYZ`

**How it works**:
1. Agent creates concise prompt in `.claude/amp/prompts/pending/{uuid}.json`
2. Shows you command: `amp @{prompt-file}`
3. You run command in your authenticated Amp session (free tier)
4. Amp writes response to `.claude/amp/responses/ready/{uuid}.json`
5. Agent detects, reads, and presents results

**Key principle**: Agent creates SHORT, focused prompts (2-4 sentences) to conserve Amp credits.

**Example**:
- ❌ Bad: "Research TypeScript 5.0 in detail covering: 1. Const params... 2. Decorators... 3. Export modifiers..."
- ✅ Good: "Research TypeScript 5.0's key new features with brief code examples."

## Build/Lint/Test Commands
- **Run all tests**: `pytest tests/`
- **Run single test**: `pytest tests/test_filename.py::test_function_name -v`
- **Run specific test class**: `pytest tests/test_filename.py::TestClass -v`
- **Run with markers**: `pytest -m "unit or integration"`
- **Server startup**: `uv run memory server`
- **Install dependencies**: `python scripts/installation/install.py`

## Architecture & Codebase Structure
- **Main package**: `src/mcp_memory_service/` - Core MCP server implementation
- **Storage backends**: `storage/` (SQLite-Vec, Cloudflare, Hybrid) implementing abstract `MemoryStorage` class
- **Web interface**: `web/` - FastAPI dashboard with real-time updates via SSE
- **MCP protocol**: `server.py` - Model Context Protocol implementation with async handlers
- **Memory consolidation**: `consolidation/` - Autonomous memory management and deduplication
- **Document ingestion**: `ingestion/` - PDF/DOCX/PPTX loaders with optional semtools integration
- **CLI tools**: `cli/` - Command-line interface for server management

## Code Style Guidelines
- **Imports**: Absolute imports preferred, conditional imports for optional dependencies
- **Types**: Python 3.10+ type hints throughout, TypedDict for MCP responses
- **Async/await**: All I/O operations use async/await pattern
- **Naming**: snake_case for functions/variables, PascalCase for classes, SCREAMING_SNAKE_CASE for constants
- **Error handling**: Try/except blocks with specific exceptions, logging for debugging
- **Memory types**: Use 24 core types from taxonomy (note, reference, session, implementation, etc.)
- **Documentation**: NumPy-style docstrings, CLAUDE.md for project conventions

## Development Rules (from CLAUDE.md)
- Follow MCP protocol specification for tool schemas and responses
- Implement storage backends extending abstract base class
- Use semantic commit messages with conventional commit format
- Test both OAuth enabled/disabled modes for web interface
- Validate search endpoints: semantic, tag-based, time-based queries

```

--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------

```markdown
# Contributor Covenant Code of Conduct

## Our Pledge

We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual
identity and orientation.

We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.

## Our Standards

Examples of behavior that contributes to a positive environment for our
community include:

* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
  and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall
  community

Examples of unacceptable behavior include:

* The use of sexualized language or imagery, and sexual attention or advances of
  any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address,
  without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
  professional setting

## Enforcement Responsibilities

Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.

Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.

## Scope

This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.

## Enforcement

Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement via:

* **GitHub Issues**: For public concerns that warrant community discussion
* **GitHub Discussions**: For questions about community standards
* **Direct Contact**: For sensitive matters, contact the project maintainers directly

All complaints will be reviewed and investigated promptly and fairly.

All community leaders are obligated to respect the privacy and security of the
reporter of any incident.

## Enforcement Guidelines

Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:

### 1. Correction

**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.

**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.

### 2. Warning

**Community Impact**: A violation through a single incident or series of
actions.

**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent
ban.

### 3. Temporary Ban

**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.

**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.

### 4. Permanent Ban

**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.

**Consequence**: A permanent ban from any sort of public interaction within the
community.

## Attribution

This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].

Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].

For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].

[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations

```

--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------

```markdown
# Security Policy

## Supported Versions

We actively maintain and provide security updates for the following versions of MCP Memory Service:

| Version | Supported          | Notes |
| ------- | ------------------ | ----- |
| 8.x.x   | :white_check_mark: | Current stable release - full support |
| 7.x.x   | :white_check_mark: | Previous stable - security fixes only |
| < 7.0   | :x:                | No longer supported |

## Reporting a Vulnerability

We take the security of MCP Memory Service seriously. If you discover a security vulnerability, please report it responsibly.

### How to Report

**For sensitive security issues**, please use one of these private reporting methods:

1. **GitHub Security Advisory** (Preferred):
   - Navigate to the [Security Advisories](https://github.com/doobidoo/mcp-memory-service/security/advisories) page
   - Click "Report a vulnerability"
   - Provide detailed information about the vulnerability

2. **Direct Contact**:
   - Open a GitHub Discussion with `[SECURITY]` prefix for initial contact
   - We'll provide a secure communication channel for details

**For non-sensitive security concerns**, you may open a regular GitHub issue.

### What to Include

When reporting a vulnerability, please include:

1. **Description**: Clear description of the vulnerability
2. **Impact**: Potential security impact and affected versions
3. **Reproduction**: Step-by-step instructions to reproduce the issue
4. **Environment**:
   - Python version
   - Operating system
   - Storage backend (SQLite-vec, Cloudflare, Hybrid)
   - Installation method (pip, Docker, source)
5. **Proof of Concept**: Code or commands demonstrating the vulnerability (if applicable)
6. **Suggested Fix**: Any ideas for fixing the issue (optional)

### Response Timeline

We aim to respond to security reports according to the following timeline:

- **Acknowledgment**: Within 48 hours of report
- **Initial Assessment**: Within 5 business days
- **Status Updates**: Weekly until resolved
- **Fix Development**: 7-14 days for high-severity issues
- **Patch Release**: As soon as fix is validated and tested
- **Public Disclosure**: After patch is released (coordinated with reporter)

### Severity Classification

We use the following severity levels to prioritize security issues:

**Critical** 🔴
- Remote code execution
- Authentication bypass
- Data exfiltration from other users' memories
- Complete system compromise

**High** 🟠
- Privilege escalation
- SQL injection
- Cross-site scripting (XSS) in dashboard
- Denial of service affecting all users

**Medium** 🟡
- Information disclosure (limited scope)
- Cross-site request forgery (CSRF)
- Local file access vulnerabilities
- Resource exhaustion (single user)

**Low** 🟢
- Timing attacks
- Security configuration issues
- Low-impact information leaks

## Security Best Practices

### For Users

1. **Keep Updated**: Always use the latest stable version
2. **Secure Configuration**:
   - Use strong API keys (`openssl rand -base64 32`)
   - Enable HTTPS for HTTP server mode
   - Restrict network access to localhost unless needed
3. **Credential Management**:
   - Never commit `.env` files with credentials
   - Use environment variables for sensitive data
   - Rotate Cloudflare API tokens regularly
4. **Authentication**: Enable OAuth 2.1 for multi-user deployments
5. **Monitoring**: Review logs for suspicious activity
6. **Backups**: Regularly backup your memory database

### For Contributors

1. **Dependency Security**:
   - Review dependency updates for known vulnerabilities
   - Use `pip-audit` to scan for security issues
   - Keep dependencies up to date
2. **Input Validation**:
   - Sanitize all user input
   - Use parameterized queries (no string concatenation)
   - Validate file uploads and document ingestion
3. **Authentication & Authorization**:
   - Use secure session management
   - Implement proper access controls
   - Follow OAuth 2.1 security best practices
4. **Sensitive Data**:
   - Never log API keys, tokens, or passwords
   - Encrypt sensitive data at rest (user responsibility)
   - Use secure random number generation
5. **Code Review**: All PRs must pass security review before merge

## Known Security Considerations

### SQLite-vec Backend
- **Local File Access**: Database file should have appropriate permissions (600)
- **Concurrent Access**: Use proper locking to prevent corruption
- **Backup Encryption**: User responsibility to encrypt backups

### Cloudflare Backend
- **API Token Security**: Tokens have full account access - guard carefully
- **Rate Limiting**: Cloudflare enforces rate limits (10k requests/min)
- **Data Residency**: Data stored in Cloudflare's network per your account settings

### Hybrid Backend
- **Synchronization**: Ensure secure sync between local and cloud storage
- **Credential Exposure**: Both SQLite and Cloudflare credentials needed

### Web Dashboard
- **HTTPS Recommended**: Use HTTPS in production environments
- **XSS Protection**: All user input is escaped before rendering
- **CSRF Protection**: Implement for state-changing operations
- **Session Security**: Enable secure cookies in production

### MCP Protocol
- **Local Access Only**: MCP server typically runs locally via stdin/stdout
- **Process Isolation**: Each client gets isolated server process
- **No Network Exposure**: By default, MCP mode has no network attack surface

## Security Updates

Security patches are released as:
- **Patch versions** (8.x.Y) for low/medium severity
- **Minor versions** (8.X.0) for high severity requiring API changes
- **Out-of-band releases** for critical vulnerabilities

Security advisories are published at:
- [GitHub Security Advisories](https://github.com/doobidoo/mcp-memory-service/security/advisories)
- [CHANGELOG.md](CHANGELOG.md) with `[SECURITY]` tag
- Release notes for affected versions

## Disclosure Policy

We follow **coordinated disclosure**:

1. Vulnerability reported privately
2. We confirm and develop a fix
3. Security advisory drafted (private)
4. Patch released with security note
5. Public disclosure 7 days after patch release
6. Reporter credited (if desired)

We appreciate security researchers following responsible disclosure practices and will acknowledge contributors in our security advisories.

## Security Hall of Fame

We recognize security researchers who help make MCP Memory Service more secure:

<!-- Security contributors will be listed here -->
*No security vulnerabilities have been publicly disclosed to date.*

## Contact

For security concerns that don't fit the above categories:
- **General Security Questions**: [GitHub Discussions](https://github.com/doobidoo/mcp-memory-service/discussions)
- **Project Security**: See reporting instructions above

---

**Last Updated**: November 2025
**Policy Version**: 1.0

```

--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------

```markdown
# Contributing to MCP Memory Service

Thank you for your interest in contributing to MCP Memory Service! 🎉

This project provides semantic memory and persistent storage for AI assistants through the Model Context Protocol. We welcome contributions of all kinds - from bug fixes and features to documentation and testing.

## Table of Contents

- [Code of Conduct](#code-of-conduct)
- [Ways to Contribute](#ways-to-contribute)
- [Getting Started](#getting-started)
- [Development Process](#development-process)
- [Coding Standards](#coding-standards)
- [Testing Requirements](#testing-requirements)
- [Documentation](#documentation)
- [Submitting Changes](#submitting-changes)
- [Reporting Issues](#reporting-issues)
- [Community & Support](#community--support)
- [Recognition](#recognition)

## Code of Conduct

We are committed to providing a welcoming and inclusive environment for all contributors. Please:

- Be respectful and considerate in all interactions
- Welcome newcomers and help them get started
- Focus on constructive criticism and collaborative problem-solving
- Respect differing viewpoints and experiences
- Avoid harassment, discrimination, or inappropriate behavior

## Ways to Contribute

### 🐛 Bug Reports
Help us identify and fix issues by reporting bugs with detailed information.

### ✨ Feature Requests
Suggest new features or improvements to existing functionality.

### 📝 Documentation
Improve README, Wiki pages, code comments, or API documentation.

### 🧪 Testing
Write tests, improve test coverage, or help with manual testing.

### 💻 Code Contributions
Fix bugs, implement features, or improve performance.

### 🌍 Translations
Help make the project accessible to more users (future goal).

### 💬 Community Support
Answer questions in Issues, Discussions, or help other users.

## Getting Started

### Prerequisites

- Python 3.10 or higher
- Git
- Platform-specific requirements:
  - **macOS**: Homebrew Python recommended for SQLite extension support
  - **Windows**: Visual Studio Build Tools for some dependencies
  - **Linux**: Build essentials package

### Setting Up Your Development Environment

1. **Fork the repository** on GitHub

2. **Clone your fork**:
   ```bash
   git clone https://github.com/YOUR_USERNAME/mcp-memory-service.git
   cd mcp-memory-service
   ```

3. **Install dependencies**:
   ```bash
   python install.py
   ```
   This will automatically detect your platform and install appropriate dependencies.

4. **Verify installation**:
   ```bash
   python scripts/verify_environment.py
   ```

5. **Run the service**:
   ```bash
   uv run memory server
   ```

6. **Test with MCP Inspector** (optional):
   ```bash
   npx @modelcontextprotocol/inspector uv run memory server
   ```

### Alternative: Docker Setup

For a containerized environment:
```bash
docker-compose up -d  # For MCP mode
docker-compose -f docker-compose.http.yml up -d  # For HTTP API mode
```

## Development Process

### 1. Create a Feature Branch

```bash
git checkout -b feature/your-feature-name
# or
git checkout -b fix/issue-description
```

Use descriptive branch names:
- `feature/` for new features
- `fix/` for bug fixes
- `docs/` for documentation
- `test/` for test improvements
- `refactor/` for code refactoring

### 2. Make Your Changes

- Write clean, readable code
- Follow the coding standards (see below)
- Add/update tests as needed
- Update documentation if applicable
- Keep commits focused and atomic

### 3. Test Your Changes

```bash
# Run all tests
pytest tests/

# Run specific test file
pytest tests/test_server.py

# Run with coverage
pytest --cov=mcp_memory_service tests/
```

### 4. Commit Your Changes

Use semantic commit messages:
```bash
git commit -m "feat: add memory export functionality"
git commit -m "fix: resolve timezone handling in memory search"
git commit -m "docs: update installation guide for Windows"
git commit -m "test: add coverage for storage backends"
```

Format: `<type>: <description>`

Types:
- `feat`: New feature
- `fix`: Bug fix
- `docs`: Documentation changes
- `test`: Test additions or changes
- `refactor`: Code refactoring
- `perf`: Performance improvements
- `chore`: Maintenance tasks

### 5. Push to Your Fork

```bash
git push origin your-branch-name
```

### 6. Create a Pull Request

Open a PR from your fork to the main repository with:
- Clear title describing the change
- Description of what and why
- Reference to any related issues
- Screenshots/examples if applicable

## Coding Standards

### Python Style Guide

- Follow PEP 8 with these modifications:
  - Line length: 88 characters (Black formatter default)
  - Use double quotes for strings
- Use type hints for all function signatures
- Write descriptive variable and function names
- Add docstrings to all public functions/classes (Google style)

### Code Organization

```python
# Import order
import standard_library
import third_party_libraries
from mcp_memory_service import local_modules

# Type hints
from typing import Optional, List, Dict, Any

# Async functions
async def process_memory(content: str) -> Dict[str, Any]:
    """Process and store memory content.

    Args:
        content: The memory content to process

    Returns:
        Dictionary containing memory metadata
    """
    # Implementation
```

### Error Handling

- Use specific exception types
- Provide helpful error messages
- Log errors appropriately
- Never silently fail

```python
try:
    result = await storage.store(memory)
except StorageError as e:
    logger.error(f"Failed to store memory: {e}")
    raise MemoryServiceError(f"Storage operation failed: {e}") from e
```

## Testing Requirements

### Writing Tests

- Place tests in `tests/` directory
- Name test files with `test_` prefix
- Use descriptive test names
- Include both positive and negative test cases
- Mock external dependencies

Example test:
```python
import pytest
from mcp_memory_service.storage import SqliteVecStorage

@pytest.mark.asyncio
async def test_store_memory_success():
    """Test successful memory storage."""
    storage = SqliteVecStorage(":memory:")
    result = await storage.store("test content", tags=["test"])
    assert result is not None
    assert "hash" in result
```

### Test Coverage

- Aim for >80% code coverage
- Focus on critical paths and edge cases
- Test error handling scenarios
- Include integration tests where appropriate

## Documentation

### Code Documentation

- Add docstrings to all public APIs
- Include type hints
- Provide usage examples in docstrings
- Keep comments concise and relevant

### Project Documentation

When adding features or making significant changes:

1. Update README.md if needed
2. Add/update Wiki pages for detailed guides
3. Update CHANGELOG.md following Keep a Changelog format
4. Update AGENTS.md or CLAUDE.md if development workflow changes

**Advanced Workflow Automation**:
- See [Context Provider Workflow Automation](https://github.com/doobidoo/mcp-memory-service/wiki/Context-Provider-Workflow-Automation) for automating development workflows with intelligent patterns

### API Documentation

- Document new MCP tools in `docs/api/tools.md`
- Include parameter descriptions and examples
- Note any breaking changes

## Submitting Changes

### Pull Request Guidelines

1. **PR Title**: Use semantic format (e.g., "feat: add batch memory operations")

2. **PR Description Template**:
   ```markdown
   ## Description
   Brief description of changes

   ## Motivation
   Why these changes are needed

   ## Changes
   - List of specific changes
   - Breaking changes (if any)

   ## Testing
   - How you tested the changes
   - Test coverage added

   ## Screenshots
   (if applicable)

   ## Related Issues
   Fixes #123
   ```

3. **PR Checklist**:
   - [ ] Tests pass locally
   - [ ] Code follows style guidelines
   - [ ] Documentation updated
   - [ ] CHANGELOG.md updated
   - [ ] No sensitive data exposed

### Review Process

- PRs require at least one review
- Address review feedback promptly
- Keep discussions focused and constructive
- Be patient - reviews may take a few days

## Reporting Issues

### Bug Reports

When reporting bugs, include:

1. **Environment**:
   - OS and version
   - Python version
   - MCP Memory Service version
   - Installation method (pip, Docker, source)

2. **Steps to Reproduce**:
   - Minimal code example
   - Exact commands run
   - Configuration used

3. **Expected vs Actual Behavior**:
   - What you expected to happen
   - What actually happened
   - Error messages/stack traces

4. **Additional Context**:
   - Screenshots if applicable
   - Relevant log output
   - Related issues

### Feature Requests

For feature requests, describe:

- The problem you're trying to solve
- Your proposed solution
- Alternative approaches considered
- Potential impact on existing functionality

## Community & Support

### Getting Help

- **Documentation**: Check the [Wiki](https://github.com/doobidoo/mcp-memory-service/wiki) first
- **Issues**: Search existing [issues](https://github.com/doobidoo/mcp-memory-service/issues) before creating new ones
- **Discussions**: Use [GitHub Discussions](https://github.com/doobidoo/mcp-memory-service/discussions) for questions
- **Response Time**: Maintainers typically respond within 2-3 days

### Communication Channels

- **GitHub Issues**: Bug reports and feature requests
- **GitHub Discussions**: General questions and community discussion
- **Pull Requests**: Code contributions and reviews

### For AI Agents

If you're an AI coding assistant, also check:
- [AGENTS.md](AGENTS.md) - Generic AI agent instructions
- [CLAUDE.md](CLAUDE.md) - Claude-specific guidelines
- [Context Provider Workflow Automation](https://github.com/doobidoo/mcp-memory-service/wiki/Context-Provider-Workflow-Automation) - Automate development workflows with intelligent patterns

## Recognition

We value all contributions! Contributors are:

- Listed in release notes for their contributions
- Mentioned in CHANGELOG.md entries
- Credited in commit messages when providing fixes/solutions
- Welcome to add themselves to a CONTRIBUTORS file (future)

### Types of Recognition

- 🐛 Bug reporters who provide detailed, reproducible issues
- 💻 Code contributors who submit PRs
- 📝 Documentation improvers
- 🧪 Test writers and reviewers
- 💬 Community helpers who support other users
- 🎨 UI/UX improvers (for dashboard contributions)

---

Thank you for contributing to MCP Memory Service! Your efforts help make AI assistants more capable and useful for everyone. 🚀

If you have questions not covered here, please open a [Discussion](https://github.com/doobidoo/mcp-memory-service/discussions) or check our [Wiki](https://github.com/doobidoo/mcp-memory-service/wiki).
```

--------------------------------------------------------------------------------
/CLAUDE.md:
--------------------------------------------------------------------------------

```markdown
# CLAUDE.md

This file provides guidance to Claude Code (claude.ai/code) when working with this MCP Memory Service repository.

> **📝 Personal Customizations**: You can create `CLAUDE.local.md` (gitignored) for personal notes, custom workflows, or environment-specific instructions. This file contains shared project conventions.

> **Note**: Comprehensive project context has been stored in memory with tags `claude-code-reference`. Use memory retrieval to access detailed information during development.

## Overview

MCP Memory Service is a Model Context Protocol server providing semantic memory and persistent storage for Claude Desktop with SQLite-vec, Cloudflare, and Hybrid storage backends.

> **🆕 v8.42.0**: **Memory Awareness Enhancements** - Added visible memory injection display at session start (top 3 memories with relevance scores), raised session-end quality thresholds to prevent generic boilerplate (200 char min, 0.5 confidence), added optional LLM-powered session summarizer, cleaned 167 generic summaries from database (3352 → 3185 memories). Users now see what memories are being injected into their sessions. See [CHANGELOG.md](CHANGELOG.md) for full version history.
>
> **Note**: When releasing new versions, update this line with current version + brief description. Use `.claude/agents/github-release-manager.md` agent for complete release workflow.

## Essential Commands

| Category | Command | Description |
|----------|---------|-------------|
| **Setup** | `python scripts/installation/install.py --storage-backend hybrid` | Install with hybrid backend (recommended) |
| | `uv run memory server` | Start server |
| | `pytest tests/` | Run tests |
| **Memory Ops** | `claude /memory-store "content"` | Store information |
| | `claude /memory-recall "query"` | Retrieve information |
| | `claude /memory-health` | Check service status |
| **Validation** | `python scripts/validation/validate_configuration_complete.py` | Comprehensive config validation |
| | `python scripts/validation/diagnose_backend_config.py` | Cloudflare diagnostics |
| **Maintenance** | `python scripts/maintenance/consolidate_memory_types.py --dry-run` | Preview type consolidation |
| | `python scripts/maintenance/find_all_duplicates.py` | Find duplicates |
| | `python scripts/sync/check_drift.py` | Check hybrid backend drift (v8.25.0+) |
| **Quality** | `bash scripts/pr/quality_gate.sh 123` | Run PR quality checks |
| | `bash scripts/pr/quality_gate.sh 123 --with-pyscn` | Comprehensive quality analysis (includes pyscn) |
| | `bash scripts/quality/track_pyscn_metrics.sh` | Track quality metrics over time |
| | `bash scripts/quality/weekly_quality_review.sh` | Generate weekly quality review |
| | `pyscn analyze .` | Run pyscn static analysis |
| **Consolidation** | `curl -X POST http://127.0.0.1:8000/api/consolidation/trigger -H "Content-Type: application/json" -d '{"time_horizon":"weekly"}'` | Trigger memory consolidation |
| | `curl http://127.0.0.1:8000/api/consolidation/status` | Check scheduler status |
| | `curl http://127.0.0.1:8000/api/consolidation/recommendations` | Get consolidation recommendations |
| **Backup** | `curl -X POST http://127.0.0.1:8000/api/backup/now` | Trigger manual backup (v8.29.0+) |
| | `curl http://127.0.0.1:8000/api/backup/status` | Check backup status and schedule |
| | `curl http://127.0.0.1:8000/api/backup/list` | List available backups |
| **Sync Controls** | `curl -X POST http://127.0.0.1:8000/api/sync/pause` | Pause hybrid backend sync (v8.29.0+) |
| | `curl -X POST http://127.0.0.1:8000/api/sync/resume` | Resume hybrid backend sync |
| **Service** | `systemctl --user status mcp-memory-http.service` | Check HTTP service status (Linux) |
| | `scripts/service/memory_service_manager.sh status` | Check service status |
| **Debug** | `curl http://127.0.0.1:8000/api/health` | Health check |
| | `npx @modelcontextprotocol/inspector uv run memory server` | MCP Inspector |

See [scripts/README.md](scripts/README.md) for complete command reference.

## Architecture

**Core Components:**
- **Server Layer**: MCP protocol with async handlers, global caches (`src/mcp_memory_service/server.py:1`)
- **Storage Backends**: SQLite-Vec (5ms reads), Cloudflare (edge), Hybrid (local + cloud sync)
- **Web Interface**: FastAPI dashboard at `http://127.0.0.1:8000/` with REST API
- **Document Ingestion**: PDF, DOCX, PPTX loaders (see [docs/document-ingestion.md](docs/document-ingestion.md))
- **Memory Hooks**: Natural Memory Triggers v7.1.3+ with 85%+ accuracy (see below)

**Key Patterns:**
- Async/await for I/O, type safety (Python 3.10+), platform hardware optimization (CUDA/MPS/DirectML/ROCm)

## Document Ingestion

Supports PDF, DOCX, PPTX, TXT/MD with optional [semtools](https://github.com/run-llama/semtools) for enhanced quality.

```bash
claude /memory-ingest document.pdf --tags documentation
claude /memory-ingest-dir ./docs --tags knowledge-base
```

See [docs/document-ingestion.md](docs/document-ingestion.md) for full configuration and usage.

## Interactive Dashboard

Web interface at `http://127.0.0.1:8000/` with CRUD operations, semantic/tag/time search, real-time updates (SSE), mobile responsive. Performance: 25ms page load, <100ms search.

**API Endpoints:** `/api/search`, `/api/search/by-tag`, `/api/search/by-time`, `/api/events`

## Memory Consolidation System 🆕

**Dream-inspired memory consolidation** with automatic scheduling and Code Execution API (v8.23.0+).

### Architecture

**Consolidation Scheduler Location**: HTTP Server (v8.23.0+)
- Runs 24/7 with HTTP server (independent of MCP server/Claude Desktop)
- Uses APScheduler for automatic scheduling
- Accessible via both HTTP API and MCP tools
- **Benefits**: Persistent, reliable, no dependency on Claude Desktop being open

**Code Execution API** (token-efficient operations):
```python
from mcp_memory_service.api import consolidate, scheduler_status

# Trigger consolidation (15 tokens vs 150 MCP tool - 90% reduction)
result = consolidate('weekly')

# Check scheduler (10 tokens vs 125 - 92% reduction)
status = scheduler_status()
```

### HTTP API Endpoints

| Endpoint | Method | Description | Response Time |
|----------|--------|-------------|---------------|
| `/api/consolidation/trigger` | POST | Trigger consolidation | ~10-30s |
| `/api/consolidation/status` | GET | Scheduler status | <5ms |
| `/api/consolidation/recommendations/{horizon}` | GET | Get recommendations | ~50ms |

**Example Usage:**
```bash
# Trigger weekly consolidation
curl -X POST http://127.0.0.1:8000/api/consolidation/trigger \
  -H "Content-Type: application/json" \
  -d '{"time_horizon": "weekly"}'

# Check scheduler status
curl http://127.0.0.1:8000/api/consolidation/status

# Get recommendations
curl http://127.0.0.1:8000/api/consolidation/recommendations/weekly
```

### Configuration

```bash
# Enable consolidation (default: true)
export MCP_CONSOLIDATION_ENABLED=true

# Scheduler configuration (in config.py)
CONSOLIDATION_SCHEDULE = {
    'daily': '02:00',              # Daily at 2 AM
    'weekly': 'SUN 03:00',         # Weekly on Sunday at 3 AM
    'monthly': '01 04:00',         # Monthly on 1st at 4 AM
    'quarterly': 'disabled',       # Disabled
    'yearly': 'disabled'           # Disabled
}
```

### Features

- **Exponential decay scoring** - Prioritize recent, frequently accessed memories
- **Creative association discovery** - Find semantic connections (0.3-0.7 similarity)
- **Semantic clustering** - Group related memories (DBSCAN algorithm)
- **Compression** - Summarize redundant information (preserves originals)
- **Controlled forgetting** - Archive low-relevance memories (90+ days inactive)

### Performance Expectations

**Real-world metrics** (based on v8.23.1 test with 2,495 memories):

| Backend | First Run | Subsequent Runs | Notes |
|---------|-----------|----------------|-------|
| **SQLite-Vec** | 5-25s | 5-25s | Fast, local-only |
| **Cloudflare** | 2-4min | 1-3min | Network-dependent, cloud-only |
| **Hybrid** | 4-6min | 2-4min | Slower but provides multi-device sync |

**Why Hybrid takes longer**: Local SQLite operations (~5ms) + Cloudflare cloud sync (~150ms per update). Trade-off: Processing time for data persistence across devices.

**Recommendation**: Hybrid backend is recommended for production despite longer consolidation time - multi-device sync capability is worth it.

**📖 See [Memory Consolidation Guide](docs/guides/memory-consolidation-guide.md)** for detailed operational guide, monitoring procedures, and troubleshooting. Wiki version will be available at: [Memory Consolidation System Guide](https://github.com/doobidoo/mcp-memory-service/wiki/Memory-Consolidation-System-Guide)

### Migration from MCP-only Mode (v8.22.x → v8.23.0+)

**No action required** - Consolidation automatically runs in HTTP server if enabled.

**For users without HTTP server:**
```bash
# Enable HTTP server in .env
export MCP_HTTP_ENABLED=true

# Restart service
systemctl --user restart mcp-memory-http.service
```

**MCP tools continue working** (backward compatible via internal API calls).

## Environment Variables

**Essential Configuration:**
```bash
# Storage Backend (Hybrid is RECOMMENDED for production)
export MCP_MEMORY_STORAGE_BACKEND=hybrid  # hybrid|cloudflare|sqlite_vec

# Cloudflare Configuration (REQUIRED for hybrid/cloudflare backends)
export CLOUDFLARE_API_TOKEN="your-token"      # Required for Cloudflare backend
export CLOUDFLARE_ACCOUNT_ID="your-account"   # Required for Cloudflare backend
export CLOUDFLARE_D1_DATABASE_ID="your-d1-id" # Required for Cloudflare backend
export CLOUDFLARE_VECTORIZE_INDEX="mcp-memory-index" # Required for Cloudflare backend

# Web Interface (Optional)
export MCP_HTTP_ENABLED=true                  # Enable HTTP server
export MCP_HTTPS_ENABLED=true                 # Enable HTTPS (production)
export MCP_API_KEY="$(openssl rand -base64 32)" # Generate secure API key
```

**Configuration Precedence:** Environment variables > .env file > Global Claude Config > defaults

**✅ Automatic Configuration Loading (v6.16.0+):** The service now automatically loads `.env` files and respects environment variable precedence. CLI defaults no longer override environment configuration.

**⚠️  Important:** When using hybrid or cloudflare backends, ensure Cloudflare credentials are properly configured. If health checks show "sqlite-vec" when you expect "cloudflare" or "hybrid", this indicates a configuration issue that needs to be resolved.

**Platform Support:** macOS (MPS/CPU), Windows (CUDA/DirectML/CPU), Linux (CUDA/ROCm/CPU)

## Claude Code Hooks Configuration 🆕

> **🚨 CRITICAL - Windows Users**: SessionStart hooks with `matchers: ["*"]` cause Claude Code to hang indefinitely on Windows. This is a confirmed bug (#160). **Workaround**: Disable SessionStart hooks or use UserPromptSubmit hooks instead. See [Windows SessionStart Hook Issue](#windows-sessionstart-hook-issue) below.

### Natural Memory Triggers v7.1.3 (Latest)

**Intelligent automatic memory retrieval** with advanced semantic analysis and multi-tier performance optimization:

```bash
# Installation (Zero-restart required)
cd claude-hooks && python install_hooks.py --natural-triggers

# CLI Management
node ~/.claude/hooks/memory-mode-controller.js status
node ~/.claude/hooks/memory-mode-controller.js profile balanced
node ~/.claude/hooks/memory-mode-controller.js sensitivity 0.6
```

**Key Features:**
- ✅ **85%+ trigger accuracy** for memory-seeking pattern detection
- ✅ **Multi-tier processing**: 50ms instant → 150ms fast → 500ms intensive
- ✅ **CLI management system** for real-time configuration without restart
- ✅ **Git-aware context** integration for enhanced memory relevance
- ✅ **Adaptive learning** based on user preferences and usage patterns

**Configuration (`~/.claude/hooks/config.json`):**
```json
{
  "naturalTriggers": {
    "enabled": true,
    "triggerThreshold": 0.6,
    "cooldownPeriod": 30000,
    "maxMemoriesPerTrigger": 5
  },
  "performance": {
    "defaultProfile": "balanced",
    "enableMonitoring": true,
    "autoAdjust": true
  }
}
```

**Performance Profiles:**
- `speed_focused`: <100ms, instant tier only - minimal memory awareness for speed
- `balanced`: <200ms, instant + fast tiers - optimal for general development (recommended)
- `memory_aware`: <500ms, all tiers - maximum context awareness for complex work
- `adaptive`: Dynamic adjustment based on usage patterns and user feedback

### Context-Provider Integration 🆕

**Rule-based context management** that complements Natural Memory Triggers with structured, project-specific patterns:

```bash
# Context-Provider Commands
mcp context list                                # List available contexts
mcp context status                             # Check session initialization status
mcp context optimize                           # Get optimization suggestions
```

#### **Available Contexts:**

**1. Python MCP Memory Service Context** (`python_mcp_memory`)
- Project-specific patterns for FastAPI, MCP protocol, and storage backends
- Auto-store: MCP protocol changes, backend configs, performance optimizations
- Auto-retrieve: Troubleshooting, setup queries, implementation examples
- Smart tagging: Auto-detects tools (fastapi, cloudflare, sqlite-vec, hybrid, etc.)

**2. Release Workflow Context** 🆕 (`mcp_memory_release_workflow`)
- **PR Review Cycle**: Iterative Gemini Code Assist workflow (Fix → Comment → /gemini review → Wait 1min → Repeat)
- **Version Management**: Four-file procedure (__init__.py → pyproject.toml → README.md → uv lock)
- **CHANGELOG Management**: Format guidelines, conflict resolution (combine PR entries)
- **Documentation Matrix**: When to use CHANGELOG vs Wiki vs CLAUDE.md vs code comments
- **Release Procedure**: Merge → Tag → Push → Verify workflows (Docker Publish, Publish and Test, HTTP-MCP Bridge)
- **Issue Management** 🆕: Auto-tracking, post-release workflow, smart closing comments
  - **Auto-Detection**: Tracks "fixes #", "closes #", "resolves #" patterns in PRs
  - **Post-Release Workflow**: Retrieves issues from release, suggests closures with context
  - **Smart Comments**: Auto-generates closing comments with PR links, CHANGELOG entries, wiki references
  - **Triage Intelligence**: Auto-categorizes issues (bug, feature, docs, performance) based on patterns

**Auto-Store Patterns:**
- **Technical**: `MCP protocol`, `tool handler`, `storage backend switch`, `25ms page load`, `embedding cache`
- **Configuration**: `cloudflare configuration`, `hybrid backend setup`, `oauth integration`
- **Release Workflow** 🆕: `merged PR`, `gemini review`, `created tag`, `CHANGELOG conflict`, `version bump`
- **Documentation** 🆕: `updated CHANGELOG`, `wiki page created`, `CLAUDE.md updated`
- **Issue Tracking** 🆕: `fixes #`, `closes #`, `resolves #`, `created issue`, `closed issue #`

**Auto-Retrieve Patterns:**
- **Troubleshooting**: `cloudflare backend error`, `MCP client connection`, `storage backend failed`
- **Setup**: `backend configuration`, `environment setup`, `claude desktop config`
- **Development**: `MCP handler example`, `API endpoint pattern`, `async error handling`
- **Release Workflow** 🆕: `how to release`, `PR workflow`, `gemini iteration`, `version bump procedure`, `where to document`
- **Issue Management** 🆕: `review open issues`, `what issues fixed`, `can we close`, `issue status`, `which issues resolved`

**Documentation Decision Matrix:**
| Change Type | CHANGELOG | CLAUDE.md | Wiki | Code Comments |
|-------------|-----------|-----------|------|---------------|
| Bug fix | ✅ Always | If affects workflow | If complex | ✅ Non-obvious |
| New feature | ✅ Always | If adds commands | ✅ Major features | ✅ API changes |
| Performance | ✅ Always | If measurable | If >20% improvement | Rationale |
| Config change | ✅ Always | ✅ User-facing | If requires migration | Validation logic |
| Troubleshooting | In notes | If common | ✅ Detailed guide | For maintainers |

**Integration Benefits:**
- **Structured Memory Management**: Rule-based triggers complement AI-based Natural Memory Triggers
- **Project-Specific Intelligence**: Captures MCP Memory Service-specific terminology and workflows
- **Enhanced Git Workflow**: Automatic semantic commit formatting and branch naming conventions
- **Release Automation** 🆕: Never miss version bumps, CHANGELOG updates, or workflow verification
- **Knowledge Retention** 🆕: Capture what works/doesn't work in PR review cycles
- **Intelligent Issue Management** 🆕: Auto-track issue-PR relationships, suggest closures after releases, generate smart closing comments
- **Post-Release Efficiency** 🆕: Automated checklist retrieves related issues, suggests verification steps, includes all context
- **Zero Performance Impact**: Lightweight rule processing with minimal overhead

**Legacy Hook Configuration**: See [docs/legacy/dual-protocol-hooks.md](docs/legacy/dual-protocol-hooks.md) for v7.0.0 dual protocol configuration (superseded by Natural Memory Triggers).

## Storage Backends

| Backend | Performance | Use Case | Installation |
|---------|-------------|----------|--------------|
| **Hybrid** ⚡ | **Fast (5ms read)** | **🌟 Production (Recommended)** | `install.py --storage-backend hybrid` |
| **Cloudflare** ☁️ | Network dependent | Cloud-only deployment | `install.py --storage-backend cloudflare` |
| **SQLite-Vec** 🪶 | Fast (5ms read) | Development, single-user local | `install.py --storage-backend sqlite_vec` |

### ⚠️ **Database Lock Prevention (v8.9.0+)**

**CRITICAL**: After adding `MCP_MEMORY_SQLITE_PRAGMAS` to `.env`, you **MUST restart all servers**:
- HTTP server: `kill <PID>` then restart with `uv run python scripts/server/run_http_server.py`
- MCP servers: Use `/mcp` in Claude Code to reconnect, or restart Claude Desktop
- Verify: Check logs for `Custom pragma from env: busy_timeout=15000`

SQLite pragmas are **per-connection**, not global. Long-running servers (days/weeks old) won't pick up new `.env` settings automatically.

**Symptoms of missing pragmas**:
- "database is locked" errors despite v8.9.0+ installation
- `PRAGMA busy_timeout` returns `0` instead of `15000`
- Concurrent HTTP + MCP access fails

### 🚀 **Hybrid Backend (v6.21.0+) - RECOMMENDED**

The **Hybrid backend** provides the best of both worlds - **SQLite-vec speed with Cloudflare persistence**:

```bash
# Enable hybrid backend
export MCP_MEMORY_STORAGE_BACKEND=hybrid

# Hybrid-specific configuration
export MCP_HYBRID_SYNC_INTERVAL=300    # Background sync every 5 minutes
export MCP_HYBRID_BATCH_SIZE=50        # Sync 50 operations at a time
export MCP_HYBRID_SYNC_ON_STARTUP=true # Initial sync on startup

# Drift detection configuration (v8.25.0+)
export MCP_HYBRID_SYNC_UPDATES=true              # Enable metadata sync (default: true)
export MCP_HYBRID_DRIFT_CHECK_INTERVAL=3600      # Seconds between drift checks (default: 1 hour)
export MCP_HYBRID_DRIFT_BATCH_SIZE=100           # Memories to check per scan (default: 100)

# Requires Cloudflare credentials (same as cloudflare backend)
export CLOUDFLARE_API_TOKEN="your-token"
export CLOUDFLARE_ACCOUNT_ID="your-account"
export CLOUDFLARE_D1_DATABASE_ID="your-d1-id"
export CLOUDFLARE_VECTORIZE_INDEX="mcp-memory-index"
```

**Key Benefits:**
- ✅ **5ms read/write performance** (SQLite-vec speed)
- ✅ **Zero user-facing latency** - Cloud sync happens in background
- ✅ **Multi-device synchronization** - Access memories everywhere
- ✅ **Graceful offline operation** - Works without internet, syncs when available
- ✅ **Automatic failover** - Falls back to SQLite-only if Cloudflare unavailable
- ✅ **Drift detection (v8.25.0+)** - Automatic metadata sync prevents data loss across backends

**Architecture:**
- **Primary Storage**: SQLite-vec (all user operations)
- **Secondary Storage**: Cloudflare (background sync)
- **Background Service**: Async queue with retry logic and health monitoring

**v6.16.0+ Installer Enhancements:**
- **Interactive backend selection** with usage-based recommendations
- **Automatic Cloudflare credential setup** and `.env` file generation
- **Connection testing** during installation to validate configuration
- **Graceful fallbacks** from cloud to local backends if setup fails

## Development Guidelines

### 🔧 **Development Setup (CRITICAL)**

**⚠️ ALWAYS use editable install for development** to avoid stale package issues:

```bash
# REQUIRED for development - loads code from source, not site-packages
pip install -e .

# Or with uv (preferred)
uv pip install -e .

# Verify installation mode (CRITICAL CHECK)
pip show mcp-memory-service | grep Location
# Should show: Location: /path/to/mcp-memory-service/src
# NOT: Location: /path/to/venv/lib/python3.x/site-packages
```

**Why This Matters:**
- MCP servers load from `site-packages`, not source files
- Without `-e`, source changes won't be reflected until reinstall
- System restart won't help - it relaunches with stale package
- **Common symptom**: Code shows v8.23.0 but server reports v8.5.3

**Development Workflow:**
1. Clone repo: `git clone https://github.com/doobidoo/mcp-memory-service.git`
2. Create venv: `python -m venv venv && source venv/bin/activate`
3. **Editable install**: `pip install -e .` ← CRITICAL STEP
4. Verify: `python -c "import mcp_memory_service; print(mcp_memory_service.__version__)"`
5. Start coding - changes take effect after server restart (no reinstall needed)

**Version Mismatch Detection:**
```bash
# Quick check script - detects stale venv vs source code
python scripts/validation/check_dev_setup.py

# Manual verification (both should match):
grep '__version__' src/mcp_memory_service/__init__.py
python -c "import mcp_memory_service; print(mcp_memory_service.__version__)"
```

**Fix Stale Installation:**
```bash
# If you see version mismatch or non-editable install:
pip uninstall mcp-memory-service
pip install -e .

# Restart MCP servers (in Claude Code):
# Run: /mcp
```

### 🧠 **Memory & Documentation**
- Use `claude /memory-store` to capture decisions during development
- Memory operations handle duplicates via content hashing
- Time parsing supports natural language ("yesterday", "last week")
- Use semantic commit messages for version management

#### **Memory Type Taxonomy**
Use 24 core types: `note`, `reference`, `document`, `guide`, `session`, `implementation`, `analysis`, `troubleshooting`, `test`, `fix`, `feature`, `release`, `deployment`, `milestone`, `status`, `configuration`, `infrastructure`, `process`, `security`, `architecture`, `documentation`, `solution`, `achievement`. Avoid creating variations. See [scripts/maintenance/memory-types.md](scripts/maintenance/memory-types.md) for full taxonomy and consolidation guidelines.

### 🏗️ **Architecture & Testing**
- Storage backends must implement abstract base class
- All features require corresponding tests
- **Comprehensive UI Testing**: Validate performance benchmarks (page load <2s, operations <1s)
- **Security Validation**: Verify XSS protection, input validation, and OAuth integration
- **Mobile Testing**: Confirm responsive design at 768px and 1024px breakpoints

### 🚀 **Version Management**

**⚠️ CRITICAL**: **ALWAYS use the github-release-manager agent for ALL releases** (major, minor, patch, and hotfixes). Manual release workflows miss steps and are error-prone.

**Four-File Version Bump Procedure:**
1. Update `src/mcp_memory_service/__init__.py` (line 50: `__version__ = "X.Y.Z"`)
2. Update `pyproject.toml` (line 7: `version = "X.Y.Z"`)
3. Update `README.md` (line 19: Latest Release section)
4. Run `uv lock` to update dependency lock file
5. Commit all four files together

**Release Workflow:**
- **ALWAYS** use `.claude/agents/github-release-manager.md` agent for complete release procedure
- Agent ensures: README.md updates, GitHub Release creation, proper issue tracking
- Manual workflows miss documentation steps (see v8.20.1 lesson learned)
- Document milestones in CHANGELOG.md with performance metrics
- Create descriptive git tags: `git tag -a vX.Y.Z -m "description"`
- See [docs/development/release-checklist.md](docs/development/release-checklist.md) for full checklist

**Hotfix Workflow (Critical Bugs):**
- **Speed target**: 8-10 minutes from bug report to release (achievable with AI assistance)
- **Process**: Fix → Test → Four-file bump → Commit → github-release-manager agent
- **Issue management**: Post detailed root cause analysis, don't close until user confirms fix works
- **Example**: v8.20.1 (8 minutes: bug report → fix → release → user notification)

### 🤖 **Agent-First Development**

**Principle**: Use agents for workflows, not manual steps. Manual workflows are error-prone and miss documentation updates.

**Agent Usage Matrix:**
| Task | Agent | Why |
|------|-------|-----|
| **Any release** (major/minor/patch/hotfix) | github-release-manager | Ensures README.md, CHANGELOG.md, GitHub Release, issue tracking |
| **Batch code fixes** | amp-bridge | Fast parallel execution, syntax validation |
| **PR review automation** | gemini-pr-automator | Saves 10-30 min/PR, auto-resolves threads |
| **Code quality checks** | code-quality-guard | Pre-commit complexity/security scanning |

**Manual vs Agent Comparison:**
- ❌ Manual v8.20.1: Forgot README.md, incomplete GitHub Release
- ✅ With agent v8.20.1: All files updated, proper release created
- **Lesson**: Always use agents, even for "simple" hotfixes

### 🔧 **Configuration & Deployment**
- Run `python scripts/validation/validate_configuration_complete.py` when troubleshooting setup issues
- Use sync utilities for hybrid Cloudflare/SQLite deployments
- Test both OAuth enabled/disabled modes for web interface
- Validate search endpoints: semantic (`/api/search`), tag (`/api/search/by-tag`), time (`/api/search/by-time`)

## Code Quality Monitoring

### Multi-Layer Quality Strategy

The QA workflow uses three complementary layers for comprehensive code quality assurance:

**Layer 1: Pre-commit (Fast - <5s)**
- Groq/Gemini LLM complexity checks
- Security scanning (SQL injection, XSS, command injection)
- Dev environment validation
- **Blocking**: Complexity >8, any security issues

**Layer 2: PR Quality Gate (Moderate - 10-60s)**
- Standard checks: complexity, security, test coverage, breaking changes
- Comprehensive checks (`--with-pyscn`): + duplication, dead code, architecture
- **Blocking**: Security issues, health score <50

**Layer 3: Periodic Review (Weekly)**
- pyscn codebase-wide analysis
- Trend tracking and regression detection
- Refactoring sprint planning

### pyscn Integration

[pyscn](https://github.com/ludo-technologies/pyscn) provides comprehensive static analysis:

**Capabilities:**
- Cyclomatic complexity scoring
- Dead code detection
- Clone detection (duplication)
- Coupling metrics (CBO)
- Dependency graph analysis
- Architecture violation detection

**Usage:**

```bash
# PR creation (automated)
bash scripts/pr/quality_gate.sh 123 --with-pyscn

# Local pre-PR check
pyscn analyze .
open .pyscn/reports/analyze_*.html

# Track metrics over time
bash scripts/quality/track_pyscn_metrics.sh

# Weekly review
bash scripts/quality/weekly_quality_review.sh
```

### Health Score Thresholds

| Score | Status | Action Required |
|-------|--------|----------------|
| **<50** | 🔴 **Release Blocker** | Cannot merge - immediate refactoring required |
| **50-69** | 🟡 **Action Required** | Plan refactoring sprint within 2 weeks |
| **70-84** | ✅ **Good** | Monitor trends, continue development |
| **85+** | 🎯 **Excellent** | Maintain current standards |

### Quality Standards

**Release Blockers** (Health Score <50):
- ❌ Cannot merge to main
- ❌ Cannot create release
- 🔧 Required: Immediate refactoring

**Action Required** (Health Score 50-69):
- ⚠️ Plan refactoring sprint within 2 weeks
- 📊 Track on project board
- 🎯 Focus on top 5 complexity offenders

**Acceptable** (Health Score ≥70):
- ✅ Continue normal development
- 📈 Monitor trends monthly
- 🎯 Address new issues proactively

### Tool Complementarity

| Tool | Speed | Scope | Use Case | Blocking |
|------|-------|-------|----------|----------|
| **Groq/Gemini (pre-commit)** | <5s | Changed files | Every commit | Yes (complexity >8) |
| **quality_gate.sh** | 10-30s | PR files | PR creation | Yes (security) |
| **pyscn (PR)** | 30-60s | Full codebase | PR + periodic | Yes (health <50) |
| **code-quality-guard** | Manual | Targeted | Refactoring | No (advisory) |

**Integration Points:**
- Pre-commit: Fast LLM checks (Groq primary, Gemini fallback)
- PR Quality Gate: `--with-pyscn` flag for comprehensive analysis
- Periodic: Weekly pyscn analysis with trend tracking

See [`.claude/agents/code-quality-guard.md`](.claude/agents/code-quality-guard.md) for detailed workflows and [docs/development/code-quality-workflow.md](docs/development/code-quality-workflow.md) for complete documentation.

## Configuration Management

**Quick Validation:**
```bash
python scripts/validation/validate_configuration_complete.py  # Comprehensive validation
python scripts/validation/diagnose_backend_config.py          # Cloudflare diagnostics
```

**Configuration Hierarchy:**
- Global: `~/.claude.json` (authoritative)
- Project: `.env` file (Cloudflare credentials)
- **Avoid**: Local `.mcp.json` overrides

**Common Issues & Quick Fixes:**

| Issue | Quick Fix |
|-------|-----------|
| Wrong backend showing | `python scripts/validation/diagnose_backend_config.py` |
| Port mismatch (hooks timeout) | Verify same port in `~/.claude/hooks/config.json` and server (default: 8000) |
| Schema validation errors after PR merge | Run `/mcp` in Claude Code to reconnect with new schema |
| Accidental `data/memory.db` | Delete safely: `rm -rf data/` (gitignored) |

See [docs/troubleshooting/hooks-quick-reference.md](docs/troubleshooting/hooks-quick-reference.md) for comprehensive troubleshooting.

## Hook Troubleshooting

**SessionEnd Hooks:**
- Trigger on `/exit`, terminal close (NOT Ctrl+C)
- Require 100+ characters, confidence > 0.1
- Memory creation: topics, decisions, insights, code changes

**Windows SessionStart Issue (#160):**
- CRITICAL: SessionStart hooks hang Claude Code on Windows
- Workaround: Use `/session-start` slash command or UserPromptSubmit hooks

See [docs/troubleshooting/hooks-quick-reference.md](docs/troubleshooting/hooks-quick-reference.md) for full troubleshooting guide.

## Agent Integrations

Workflow automation agents using Gemini CLI, Groq API, and Amp CLI. All agents in `.claude/agents/` directory.

| Agent | Tool | Purpose | Priority | Usage |
|-------|------|---------|----------|-------|
| **github-release-manager** | GitHub CLI | Complete release workflow | Production | Proactive on feature completion |
| **amp-bridge** | Amp CLI | Research without Claude credits | Production | File-based prompts |
| **code-quality-guard** | Gemini CLI / Groq API | Fast code quality analysis | Active | Pre-commit, pre-PR |
| **gemini-pr-automator** | Gemini CLI | Automated PR review loops | Active | Post-PR creation |

**Groq Bridge** (RECOMMENDED): Ultra-fast inference for code-quality-guard agent (~10x faster than Gemini, 200-300ms vs 2-3s). Supports multiple models including Kimi K2 (256K context, excellent for agentic coding). **Pre-commit hooks now use Groq as primary LLM** with Gemini fallback, avoiding OAuth browser authentication interruptions. See `docs/integrations/groq-bridge.md` for setup.

### GitHub Release Manager

Proactive release workflow automation with issue tracking, version management, and documentation updates.

```bash
# Proactive usage - invokes automatically on feature completion
# Manual usage - invoke @agent when ready to release
@agent github-release-manager "Check if we need a release"
@agent github-release-manager "Create release for v8.20.0"
```

**Capabilities:**
- **Version Management**: Four-file procedure (__init__.py → pyproject.toml → README.md → uv lock)
- **CHANGELOG Management**: Format guidelines, conflict resolution (combine PR entries)
- **Documentation Matrix**: Automatic CHANGELOG, CLAUDE.md, README.md updates
- **Issue Tracking**: Auto-detects "fixes #", suggests closures with smart comments
- **Release Procedure**: Merge → Tag → Push → Verify workflows (Docker Publish, HTTP-MCP Bridge)

**Post-Release Workflow:** Retrieves issues from release, suggests closures with PR links and CHANGELOG entries.

See [.claude/agents/github-release-manager.md](.claude/agents/github-release-manager.md) for complete workflows.

### Code Quality Guard (Gemini CLI / Groq API)

Fast automated analysis for complexity scoring, security scanning, and refactoring suggestions.

```bash
# Complexity check (Gemini CLI - default)
gemini "Complexity 1-10 per function, list high (>7) first: $(cat file.py)"

# Complexity check (Groq API - 10x faster, default model)
./scripts/utils/groq "Complexity 1-10 per function, list high (>7) first: $(cat file.py)"

# Complexity check (Kimi K2 - best for complex code analysis)
./scripts/utils/groq "Complexity 1-10 per function, list high (>7) first: $(cat file.py)" --model moonshotai/kimi-k2-instruct

# Security scan
gemini "Security check (SQL injection, XSS, command injection): $(cat file.py)"

# TODO prioritization
bash scripts/maintenance/scan_todos.sh

# Pre-commit hook (auto-install)
ln -s ../../scripts/hooks/pre-commit .git/hooks/pre-commit

# Pre-commit hook setup (RECOMMENDED: Groq for fast, non-interactive checks)
export GROQ_API_KEY="your-groq-api-key"  # Primary (200-300ms, no OAuth)
# Falls back to Gemini CLI if Groq unavailable
# Skips checks gracefully if neither available
```

**Pre-commit Hook LLM Priority:**
1. **Groq API** (Primary) - Fast (200-300ms), simple API key auth, no browser interruption
2. **Gemini CLI** (Fallback) - Slower (2-3s), OAuth browser flow may interrupt commits
3. **Skip checks** - If neither available, commit proceeds without quality gates

See [.claude/agents/code-quality-guard.md](.claude/agents/code-quality-guard.md) for complete workflows and quality standards.

### Gemini PR Automator

Eliminates manual "Wait 1min → /gemini review" cycles with fully automated review iteration.

```bash
# Full automated review (5 iterations, safe fixes enabled)
bash scripts/pr/auto_review.sh <PR_NUMBER>

# Quality gate checks before review
bash scripts/pr/quality_gate.sh <PR_NUMBER>

# Generate tests for new code
bash scripts/pr/generate_tests.sh <PR_NUMBER>

# Breaking change detection
bash scripts/pr/detect_breaking_changes.sh main <BRANCH>
```

**Time Savings:** ~10-30 minutes per PR vs manual iteration. See [.claude/agents/gemini-pr-automator.md](.claude/agents/gemini-pr-automator.md) for workflows.

### Amp CLI Bridge

File-based workflow for external research without consuming Claude Code credits.

```bash
# Claude creates prompt → You run command → Amp writes response
amp @.claude/amp/prompts/pending/{uuid}.json
```

**Use cases:** Web research, codebase analysis, documentation generation. See [docs/amp-cli-bridge.md](docs/amp-cli-bridge.md) for architecture.

> **For detailed troubleshooting, architecture, and deployment guides:**
> - **Backend Configuration Issues**: See [Wiki Troubleshooting Guide](https://github.com/doobidoo/mcp-memory-service/wiki/07-TROUBLESHOOTING#backend-configuration-issues) for comprehensive solutions to missing memories, environment variable issues, Cloudflare auth, hooks timeouts, and more
> - **Historical Context**: Retrieve memories tagged with `claude-code-reference`
> - **Quick Diagnostic**: Run `python scripts/validation/diagnose_backend_config.py`

```

--------------------------------------------------------------------------------
/tests/consolidation/__init__.py:
--------------------------------------------------------------------------------

```python
# Consolidation tests module
```

--------------------------------------------------------------------------------
/archive/deployment-configs/empty_config.yml:
--------------------------------------------------------------------------------

```yaml
# Empty Litestream config
dbs: []
```

--------------------------------------------------------------------------------
/scripts/run/run-with-uv.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash
echo "Running MCP Memory Service with UV..."
python uv_wrapper.py "$@"

```

--------------------------------------------------------------------------------
/scripts/linux/service_status.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash
echo "MCP Memory Service Status:"
echo "-" | tr '-' '='
systemctl --user status mcp-memory

```

--------------------------------------------------------------------------------
/scripts/linux/view_logs.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash
echo "Viewing MCP Memory Service logs (press Ctrl+C to exit)..."
journalctl -u mcp-memory -f

```

--------------------------------------------------------------------------------
/scripts/.claude/settings.local.json:
--------------------------------------------------------------------------------

```json
{
  "permissions": {
    "allow": [
      "mcp__code-context__index_codebase"
    ],
    "deny": [],
    "ask": []
  }
}
```

--------------------------------------------------------------------------------
/docs/statistics/data/activity_by_hour.csv:
--------------------------------------------------------------------------------

```
hour,commits
00,22
01,6
06,19
07,76
08,90
09,73
10,43
11,71
12,73
13,92
14,97
15,41
16,73
17,85
18,65
19,98
20,138
21,160
22,150
23,64

```

--------------------------------------------------------------------------------
/docs/statistics/data/contributors.csv:
--------------------------------------------------------------------------------

```
contributor,commits,percentage
Heinrich Krupp,1418,94.8%
zod,20,1.3%
Salih Ergüt,16,1.1%
3dyuval,10,0.7%
muxammadreza,8,0.5%
Henry Mao,6,0.4%

```

--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------

```python
"""
Test suite for MCP Memory Service.
This package contains all test modules for verifying the functionality
of the memory service components.
"""
```

--------------------------------------------------------------------------------
/docs/statistics/data/activity_by_day.csv:
--------------------------------------------------------------------------------

```
day_of_week,commits,percentage
Sunday,314,20.4%
Saturday,285,18.6%
Monday,271,17.6%
Friday,231,15.0%
Tuesday,177,11.5%
Thursday,131,8.5%
Wednesday,127,8.3%

```

--------------------------------------------------------------------------------
/docs/statistics/data/monthly_activity.csv:
--------------------------------------------------------------------------------

```
month,commits,releases
2024-12,55,1
2025-01,34,0
2025-02,2,0
2025-03,66,0
2025-04,102,0
2025-05,4,0
2025-06,36,0
2025-07,351,9
2025-08,330,64
2025-09,246,34
2025-10,310,65

```

--------------------------------------------------------------------------------
/scripts/linux/stop_service.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash
echo "Stopping MCP Memory Service..."
systemctl --user stop mcp-memory
if [ $? -eq 0 ]; then
    echo "✅ Service stopped successfully!"
else
    echo "❌ Failed to stop service"
fi

```

--------------------------------------------------------------------------------
/scripts/linux/start_service.sh:
--------------------------------------------------------------------------------

```bash
#!/bin/bash
echo "Starting MCP Memory Service..."
systemctl --user start mcp-memory
if [ $? -eq 0 ]; then
    echo "✅ Service started successfully!"
else
    echo "❌ Failed to start service"
fi

```

--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------

```
[pytest]
testpaths = tests
python_files = test_*.py
python_classes = Test*
python_functions = test_*
markers =
    unit: unit tests
    integration: integration tests
    performance: performance tests
    asyncio: mark test as async

```

--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/requirements-cloudflare.txt:
--------------------------------------------------------------------------------

```
# Additional dependencies for Cloudflare backend support
# These are installed automatically when using the cloudflare backend

# HTTP client for Cloudflare API calls
httpx>=0.24.0

# Optional: Cloudflare Python SDK (if available)
# cloudflare>=2.15.0
```

--------------------------------------------------------------------------------
/test_document.txt:
--------------------------------------------------------------------------------

```
This is a test document for MCP Memory Service document ingestion.

It contains some sample content to test the chunking and embedding functionality.

Features:
- Multiple paragraphs
- Some technical content
- Test data for verification

End of document.

```

--------------------------------------------------------------------------------
/archive/litestream-configs-v6.3.0/litestream_replica_simple.yml:
--------------------------------------------------------------------------------

```yaml
# Simple Litestream replica configuration 
# Note: Litestream replicas typically push TO destinations, not pull FROM them
# For pulling from HTTP, we'll use restore commands instead
dbs:
  - path: /Users/hkr/Library/Application Support/mcp-memory/sqlite_vec.db
```
Page 1/35FirstPrevNextLast