#
tokens: 49506/50000 29/416 files (page 4/27)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 4 of 27. Use http://codebase.md/basicmachines-co/basic-memory?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .claude
│   ├── commands
│   │   ├── release
│   │   │   ├── beta.md
│   │   │   ├── changelog.md
│   │   │   ├── release-check.md
│   │   │   └── release.md
│   │   ├── spec.md
│   │   └── test-live.md
│   └── settings.json
├── .dockerignore
├── .env.example
├── .github
│   ├── dependabot.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.md
│   │   ├── config.yml
│   │   ├── documentation.md
│   │   └── feature_request.md
│   └── workflows
│       ├── claude-code-review.yml
│       ├── claude-issue-triage.yml
│       ├── claude.yml
│       ├── dev-release.yml
│       ├── docker.yml
│       ├── pr-title.yml
│       ├── release.yml
│       └── test.yml
├── .gitignore
├── .python-version
├── CHANGELOG.md
├── CITATION.cff
├── CLA.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── docker-compose-postgres.yml
├── docker-compose.yml
├── Dockerfile
├── docs
│   ├── ai-assistant-guide-extended.md
│   ├── ARCHITECTURE.md
│   ├── character-handling.md
│   ├── cloud-cli.md
│   ├── Docker.md
│   └── testing-coverage.md
├── justfile
├── LICENSE
├── llms-install.md
├── pyproject.toml
├── README.md
├── SECURITY.md
├── smithery.yaml
├── specs
│   ├── SPEC-1 Specification-Driven Development Process.md
│   ├── SPEC-10 Unified Deployment Workflow and Event Tracking.md
│   ├── SPEC-11 Basic Memory API Performance Optimization.md
│   ├── SPEC-12 OpenTelemetry Observability.md
│   ├── SPEC-13 CLI Authentication with Subscription Validation.md
│   ├── SPEC-14 Cloud Git Versioning & GitHub Backup.md
│   ├── SPEC-14- Cloud Git Versioning & GitHub Backup.md
│   ├── SPEC-15 Configuration Persistence via Tigris for Cloud Tenants.md
│   ├── SPEC-16 MCP Cloud Service Consolidation.md
│   ├── SPEC-17 Semantic Search with ChromaDB.md
│   ├── SPEC-18 AI Memory Management Tool.md
│   ├── SPEC-19 Sync Performance and Memory Optimization.md
│   ├── SPEC-2 Slash Commands Reference.md
│   ├── SPEC-20 Simplified Project-Scoped Rclone Sync.md
│   ├── SPEC-3 Agent Definitions.md
│   ├── SPEC-4 Notes Web UI Component Architecture.md
│   ├── SPEC-5 CLI Cloud Upload via WebDAV.md
│   ├── SPEC-6 Explicit Project Parameter Architecture.md
│   ├── SPEC-7 POC to spike Tigris Turso for local access to cloud data.md
│   ├── SPEC-8 TigrisFS Integration.md
│   ├── SPEC-9 Multi-Project Bidirectional Sync Architecture.md
│   ├── SPEC-9 Signed Header Tenant Information.md
│   └── SPEC-9-1 Follow-Ups- Conflict, Sync, and Observability.md
├── src
│   └── basic_memory
│       ├── __init__.py
│       ├── alembic
│       │   ├── alembic.ini
│       │   ├── env.py
│       │   ├── migrations.py
│       │   ├── script.py.mako
│       │   └── versions
│       │       ├── 314f1ea54dc4_add_postgres_full_text_search_support_.py
│       │       ├── 3dae7c7b1564_initial_schema.py
│       │       ├── 502b60eaa905_remove_required_from_entity_permalink.py
│       │       ├── 5fe1ab1ccebe_add_projects_table.py
│       │       ├── 647e7a75e2cd_project_constraint_fix.py
│       │       ├── 6830751f5fb6_merge_multiple_heads.py
│       │       ├── 9d9c1cb7d8f5_add_mtime_and_size_columns_to_entity_.py
│       │       ├── a1b2c3d4e5f6_fix_project_foreign_keys.py
│       │       ├── a2b3c4d5e6f7_add_search_index_entity_cascade.py
│       │       ├── b3c3938bacdb_relation_to_name_unique_index.py
│       │       ├── cc7172b46608_update_search_index_schema.py
│       │       ├── e7e1f4367280_add_scan_watermark_tracking_to_project.py
│       │       ├── f8a9b2c3d4e5_add_pg_trgm_for_fuzzy_link_resolution.py
│       │       └── g9a0b3c4d5e6_add_external_id_to_project_and_entity.py
│       ├── api
│       │   ├── __init__.py
│       │   ├── app.py
│       │   ├── container.py
│       │   ├── routers
│       │   │   ├── __init__.py
│       │   │   ├── directory_router.py
│       │   │   ├── importer_router.py
│       │   │   ├── knowledge_router.py
│       │   │   ├── management_router.py
│       │   │   ├── memory_router.py
│       │   │   ├── project_router.py
│       │   │   ├── prompt_router.py
│       │   │   ├── resource_router.py
│       │   │   ├── search_router.py
│       │   │   └── utils.py
│       │   ├── template_loader.py
│       │   └── v2
│       │       ├── __init__.py
│       │       └── routers
│       │           ├── __init__.py
│       │           ├── directory_router.py
│       │           ├── importer_router.py
│       │           ├── knowledge_router.py
│       │           ├── memory_router.py
│       │           ├── project_router.py
│       │           ├── prompt_router.py
│       │           ├── resource_router.py
│       │           └── search_router.py
│       ├── cli
│       │   ├── __init__.py
│       │   ├── app.py
│       │   ├── auth.py
│       │   ├── commands
│       │   │   ├── __init__.py
│       │   │   ├── cloud
│       │   │   │   ├── __init__.py
│       │   │   │   ├── api_client.py
│       │   │   │   ├── bisync_commands.py
│       │   │   │   ├── cloud_utils.py
│       │   │   │   ├── core_commands.py
│       │   │   │   ├── rclone_commands.py
│       │   │   │   ├── rclone_config.py
│       │   │   │   ├── rclone_installer.py
│       │   │   │   ├── upload_command.py
│       │   │   │   └── upload.py
│       │   │   ├── command_utils.py
│       │   │   ├── db.py
│       │   │   ├── format.py
│       │   │   ├── import_chatgpt.py
│       │   │   ├── import_claude_conversations.py
│       │   │   ├── import_claude_projects.py
│       │   │   ├── import_memory_json.py
│       │   │   ├── mcp.py
│       │   │   ├── project.py
│       │   │   ├── status.py
│       │   │   ├── telemetry.py
│       │   │   └── tool.py
│       │   ├── container.py
│       │   └── main.py
│       ├── config.py
│       ├── db.py
│       ├── deps
│       │   ├── __init__.py
│       │   ├── config.py
│       │   ├── db.py
│       │   ├── importers.py
│       │   ├── projects.py
│       │   ├── repositories.py
│       │   └── services.py
│       ├── deps.py
│       ├── file_utils.py
│       ├── ignore_utils.py
│       ├── importers
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── chatgpt_importer.py
│       │   ├── claude_conversations_importer.py
│       │   ├── claude_projects_importer.py
│       │   ├── memory_json_importer.py
│       │   └── utils.py
│       ├── markdown
│       │   ├── __init__.py
│       │   ├── entity_parser.py
│       │   ├── markdown_processor.py
│       │   ├── plugins.py
│       │   ├── schemas.py
│       │   └── utils.py
│       ├── mcp
│       │   ├── __init__.py
│       │   ├── async_client.py
│       │   ├── clients
│       │   │   ├── __init__.py
│       │   │   ├── directory.py
│       │   │   ├── knowledge.py
│       │   │   ├── memory.py
│       │   │   ├── project.py
│       │   │   ├── resource.py
│       │   │   └── search.py
│       │   ├── container.py
│       │   ├── project_context.py
│       │   ├── prompts
│       │   │   ├── __init__.py
│       │   │   ├── ai_assistant_guide.py
│       │   │   ├── continue_conversation.py
│       │   │   ├── recent_activity.py
│       │   │   ├── search.py
│       │   │   └── utils.py
│       │   ├── resources
│       │   │   ├── ai_assistant_guide.md
│       │   │   └── project_info.py
│       │   ├── server.py
│       │   └── tools
│       │       ├── __init__.py
│       │       ├── build_context.py
│       │       ├── canvas.py
│       │       ├── chatgpt_tools.py
│       │       ├── delete_note.py
│       │       ├── edit_note.py
│       │       ├── list_directory.py
│       │       ├── move_note.py
│       │       ├── project_management.py
│       │       ├── read_content.py
│       │       ├── read_note.py
│       │       ├── recent_activity.py
│       │       ├── search.py
│       │       ├── utils.py
│       │       ├── view_note.py
│       │       └── write_note.py
│       ├── models
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── knowledge.py
│       │   ├── project.py
│       │   └── search.py
│       ├── project_resolver.py
│       ├── repository
│       │   ├── __init__.py
│       │   ├── entity_repository.py
│       │   ├── observation_repository.py
│       │   ├── postgres_search_repository.py
│       │   ├── project_info_repository.py
│       │   ├── project_repository.py
│       │   ├── relation_repository.py
│       │   ├── repository.py
│       │   ├── search_index_row.py
│       │   ├── search_repository_base.py
│       │   ├── search_repository.py
│       │   └── sqlite_search_repository.py
│       ├── runtime.py
│       ├── schemas
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── cloud.py
│       │   ├── delete.py
│       │   ├── directory.py
│       │   ├── importer.py
│       │   ├── memory.py
│       │   ├── project_info.py
│       │   ├── prompt.py
│       │   ├── request.py
│       │   ├── response.py
│       │   ├── search.py
│       │   ├── sync_report.py
│       │   └── v2
│       │       ├── __init__.py
│       │       ├── entity.py
│       │       └── resource.py
│       ├── services
│       │   ├── __init__.py
│       │   ├── context_service.py
│       │   ├── directory_service.py
│       │   ├── entity_service.py
│       │   ├── exceptions.py
│       │   ├── file_service.py
│       │   ├── initialization.py
│       │   ├── link_resolver.py
│       │   ├── project_service.py
│       │   ├── search_service.py
│       │   └── service.py
│       ├── sync
│       │   ├── __init__.py
│       │   ├── background_sync.py
│       │   ├── coordinator.py
│       │   ├── sync_service.py
│       │   └── watch_service.py
│       ├── telemetry.py
│       ├── templates
│       │   └── prompts
│       │       ├── continue_conversation.hbs
│       │       └── search.hbs
│       └── utils.py
├── test-int
│   ├── BENCHMARKS.md
│   ├── cli
│   │   ├── test_project_commands_integration.py
│   │   └── test_version_integration.py
│   ├── conftest.py
│   ├── mcp
│   │   ├── test_build_context_underscore.py
│   │   ├── test_build_context_validation.py
│   │   ├── test_chatgpt_tools_integration.py
│   │   ├── test_default_project_mode_integration.py
│   │   ├── test_delete_note_integration.py
│   │   ├── test_edit_note_integration.py
│   │   ├── test_lifespan_shutdown_sync_task_cancellation_integration.py
│   │   ├── test_list_directory_integration.py
│   │   ├── test_move_note_integration.py
│   │   ├── test_project_management_integration.py
│   │   ├── test_project_state_sync_integration.py
│   │   ├── test_read_content_integration.py
│   │   ├── test_read_note_integration.py
│   │   ├── test_search_integration.py
│   │   ├── test_single_project_mcp_integration.py
│   │   └── test_write_note_integration.py
│   ├── test_db_wal_mode.py
│   └── test_disable_permalinks_integration.py
├── tests
│   ├── __init__.py
│   ├── api
│   │   ├── conftest.py
│   │   ├── test_api_container.py
│   │   ├── test_async_client.py
│   │   ├── test_continue_conversation_template.py
│   │   ├── test_directory_router.py
│   │   ├── test_importer_router.py
│   │   ├── test_knowledge_router.py
│   │   ├── test_management_router.py
│   │   ├── test_memory_router.py
│   │   ├── test_project_router_operations.py
│   │   ├── test_project_router.py
│   │   ├── test_prompt_router.py
│   │   ├── test_relation_background_resolution.py
│   │   ├── test_resource_router.py
│   │   ├── test_search_router.py
│   │   ├── test_search_template.py
│   │   ├── test_template_loader_helpers.py
│   │   ├── test_template_loader.py
│   │   └── v2
│   │       ├── __init__.py
│   │       ├── conftest.py
│   │       ├── test_directory_router.py
│   │       ├── test_importer_router.py
│   │       ├── test_knowledge_router.py
│   │       ├── test_memory_router.py
│   │       ├── test_project_router.py
│   │       ├── test_prompt_router.py
│   │       ├── test_resource_router.py
│   │       └── test_search_router.py
│   ├── cli
│   │   ├── cloud
│   │   │   ├── test_cloud_api_client_and_utils.py
│   │   │   ├── test_rclone_config_and_bmignore_filters.py
│   │   │   └── test_upload_path.py
│   │   ├── conftest.py
│   │   ├── test_auth_cli_auth.py
│   │   ├── test_cli_container.py
│   │   ├── test_cli_exit.py
│   │   ├── test_cli_tool_exit.py
│   │   ├── test_cli_tools.py
│   │   ├── test_cloud_authentication.py
│   │   ├── test_ignore_utils.py
│   │   ├── test_import_chatgpt.py
│   │   ├── test_import_claude_conversations.py
│   │   ├── test_import_claude_projects.py
│   │   ├── test_import_memory_json.py
│   │   ├── test_project_add_with_local_path.py
│   │   └── test_upload.py
│   ├── conftest.py
│   ├── db
│   │   └── test_issue_254_foreign_key_constraints.py
│   ├── importers
│   │   ├── test_conversation_indexing.py
│   │   ├── test_importer_base.py
│   │   └── test_importer_utils.py
│   ├── markdown
│   │   ├── __init__.py
│   │   ├── test_date_frontmatter_parsing.py
│   │   ├── test_entity_parser_error_handling.py
│   │   ├── test_entity_parser.py
│   │   ├── test_markdown_plugins.py
│   │   ├── test_markdown_processor.py
│   │   ├── test_observation_edge_cases.py
│   │   ├── test_parser_edge_cases.py
│   │   ├── test_relation_edge_cases.py
│   │   └── test_task_detection.py
│   ├── mcp
│   │   ├── clients
│   │   │   ├── __init__.py
│   │   │   └── test_clients.py
│   │   ├── conftest.py
│   │   ├── test_async_client_modes.py
│   │   ├── test_mcp_container.py
│   │   ├── test_obsidian_yaml_formatting.py
│   │   ├── test_permalink_collision_file_overwrite.py
│   │   ├── test_project_context.py
│   │   ├── test_prompts.py
│   │   ├── test_recent_activity_prompt_modes.py
│   │   ├── test_resources.py
│   │   ├── test_server_lifespan_branches.py
│   │   ├── test_tool_build_context.py
│   │   ├── test_tool_canvas.py
│   │   ├── test_tool_delete_note.py
│   │   ├── test_tool_edit_note.py
│   │   ├── test_tool_list_directory.py
│   │   ├── test_tool_move_note.py
│   │   ├── test_tool_project_management.py
│   │   ├── test_tool_read_content.py
│   │   ├── test_tool_read_note.py
│   │   ├── test_tool_recent_activity.py
│   │   ├── test_tool_resource.py
│   │   ├── test_tool_search.py
│   │   ├── test_tool_utils.py
│   │   ├── test_tool_view_note.py
│   │   ├── test_tool_write_note_kebab_filenames.py
│   │   ├── test_tool_write_note.py
│   │   └── tools
│   │       └── test_chatgpt_tools.py
│   ├── Non-MarkdownFileSupport.pdf
│   ├── README.md
│   ├── repository
│   │   ├── test_entity_repository_upsert.py
│   │   ├── test_entity_repository.py
│   │   ├── test_entity_upsert_issue_187.py
│   │   ├── test_observation_repository.py
│   │   ├── test_postgres_search_repository.py
│   │   ├── test_project_info_repository.py
│   │   ├── test_project_repository.py
│   │   ├── test_relation_repository.py
│   │   ├── test_repository.py
│   │   ├── test_search_repository_edit_bug_fix.py
│   │   └── test_search_repository.py
│   ├── schemas
│   │   ├── test_base_timeframe_minimum.py
│   │   ├── test_memory_serialization.py
│   │   ├── test_memory_url_validation.py
│   │   ├── test_memory_url.py
│   │   ├── test_relation_response_reference_resolution.py
│   │   ├── test_schemas.py
│   │   └── test_search.py
│   ├── Screenshot.png
│   ├── services
│   │   ├── test_context_service.py
│   │   ├── test_directory_service.py
│   │   ├── test_entity_service_disable_permalinks.py
│   │   ├── test_entity_service.py
│   │   ├── test_file_service.py
│   │   ├── test_initialization_cloud_mode_branches.py
│   │   ├── test_initialization.py
│   │   ├── test_link_resolver.py
│   │   ├── test_project_removal_bug.py
│   │   ├── test_project_service_operations.py
│   │   ├── test_project_service.py
│   │   └── test_search_service.py
│   ├── sync
│   │   ├── test_character_conflicts.py
│   │   ├── test_coordinator.py
│   │   ├── test_sync_service_incremental.py
│   │   ├── test_sync_service.py
│   │   ├── test_sync_wikilink_issue.py
│   │   ├── test_tmp_files.py
│   │   ├── test_watch_service_atomic_adds.py
│   │   ├── test_watch_service_edge_cases.py
│   │   ├── test_watch_service_reload.py
│   │   └── test_watch_service.py
│   ├── test_config.py
│   ├── test_deps.py
│   ├── test_production_cascade_delete.py
│   ├── test_project_resolver.py
│   ├── test_rclone_commands.py
│   ├── test_runtime.py
│   ├── test_telemetry.py
│   └── utils
│       ├── test_file_utils.py
│       ├── test_frontmatter_obsidian_compatible.py
│       ├── test_parse_tags.py
│       ├── test_permalink_formatting.py
│       ├── test_timezone_utils.py
│       ├── test_utf8_handling.py
│       └── test_validate_project_path.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/tests/mcp/test_tool_delete_note.py:
--------------------------------------------------------------------------------

```python
 1 | """Tests for delete_note MCP tool."""
 2 | 
 3 | from basic_memory.mcp.tools.delete_note import _format_delete_error_response
 4 | 
 5 | 
 6 | class TestDeleteNoteErrorFormatting:
 7 |     """Test the error formatting function for better user experience."""
 8 | 
 9 |     def test_format_delete_error_note_not_found(self, test_project):
10 |         """Test formatting for note not found errors."""
11 |         result = _format_delete_error_response(test_project.name, "entity not found", "test-note")
12 | 
13 |         assert "# Delete Failed - Note Not Found" in result
14 |         assert "The note 'test-note' could not be found" in result
15 |         assert 'search_notes("test-project", "test-note")' in result
16 |         assert "Already deleted" in result
17 |         assert "Wrong identifier" in result
18 | 
19 |     def test_format_delete_error_permission_denied(self, test_project):
20 |         """Test formatting for permission errors."""
21 |         result = _format_delete_error_response(test_project.name, "permission denied", "test-note")
22 | 
23 |         assert "# Delete Failed - Permission Error" in result
24 |         assert "You don't have permission to delete 'test-note'" in result
25 |         assert "Check permissions" in result
26 |         assert "File locks" in result
27 |         assert "list_memory_projects()" in result
28 | 
29 |     def test_format_delete_error_access_forbidden(self, test_project):
30 |         """Test formatting for access forbidden errors."""
31 |         result = _format_delete_error_response(test_project.name, "access forbidden", "test-note")
32 | 
33 |         assert "# Delete Failed - Permission Error" in result
34 |         assert "You don't have permission to delete 'test-note'" in result
35 | 
36 |     def test_format_delete_error_server_error(self, test_project):
37 |         """Test formatting for server errors."""
38 |         result = _format_delete_error_response(
39 |             test_project.name, "server error occurred", "test-note"
40 |         )
41 | 
42 |         assert "# Delete Failed - System Error" in result
43 |         assert "A system error occurred while deleting 'test-note'" in result
44 |         assert "Try again" in result
45 |         assert "Check file status" in result
46 | 
47 |     def test_format_delete_error_filesystem_error(self, test_project):
48 |         """Test formatting for filesystem errors."""
49 |         result = _format_delete_error_response(test_project.name, "filesystem error", "test-note")
50 | 
51 |         assert "# Delete Failed - System Error" in result
52 |         assert "A system error occurred while deleting 'test-note'" in result
53 | 
54 |     def test_format_delete_error_disk_error(self, test_project):
55 |         """Test formatting for disk errors."""
56 |         result = _format_delete_error_response(test_project.name, "disk full", "test-note")
57 | 
58 |         assert "# Delete Failed - System Error" in result
59 |         assert "A system error occurred while deleting 'test-note'" in result
60 | 
61 |     def test_format_delete_error_database_error(self, test_project):
62 |         """Test formatting for database errors."""
63 |         result = _format_delete_error_response(test_project.name, "database error", "test-note")
64 | 
65 |         assert "# Delete Failed - Database Error" in result
66 |         assert "A database error occurred while deleting 'test-note'" in result
67 |         assert "Sync conflict" in result
68 |         assert "Database lock" in result
69 | 
70 |     def test_format_delete_error_sync_error(self, test_project):
71 |         """Test formatting for sync errors."""
72 |         result = _format_delete_error_response(test_project.name, "sync failed", "test-note")
73 | 
74 |         assert "# Delete Failed - Database Error" in result
75 |         assert "A database error occurred while deleting 'test-note'" in result
76 | 
77 |     def test_format_delete_error_generic(self, test_project):
78 |         """Test formatting for generic errors."""
79 |         result = _format_delete_error_response(test_project.name, "unknown error", "test-note")
80 | 
81 |         assert "# Delete Failed" in result
82 |         assert "Error deleting note 'test-note': unknown error" in result
83 |         assert "General troubleshooting" in result
84 |         assert "Verify the note exists" in result
85 | 
86 |     def test_format_delete_error_with_complex_identifier(self, test_project):
87 |         """Test formatting with complex identifiers (permalinks)."""
88 |         result = _format_delete_error_response(
89 |             test_project.name, "entity not found", "folder/note-title"
90 |         )
91 | 
92 |         assert 'search_notes("test-project", "note-title")' in result
93 |         assert "Note Title" in result  # Title format
94 |         assert "folder/note-title" in result  # Permalink format
95 | 
96 | 
97 | # Integration tests removed to focus on error formatting coverage
98 | # The error formatting tests above provide the necessary coverage for MCP tool error messaging
99 | 
```

--------------------------------------------------------------------------------
/.claude/commands/release/changelog.md:
--------------------------------------------------------------------------------

```markdown
  1 | # /changelog - Generate or Update Changelog Entry
  2 | 
  3 | Analyze commits and generate formatted changelog entry for a version.
  4 | 
  5 | ## Usage
  6 | ```
  7 | /changelog <version> [type]
  8 | ```
  9 | 
 10 | **Parameters:**
 11 | - `version` (required): Version like `v0.14.0` or `v0.14.0b1`
 12 | - `type` (optional): `beta`, `rc`, or `stable` (default: `stable`)
 13 | 
 14 | ## Implementation
 15 | 
 16 | You are an expert technical writer for the Basic Memory project. When the user runs `/changelog`, execute the following steps:
 17 | 
 18 | ### Step 1: Version Analysis
 19 | 1. **Determine Commit Range**
 20 |    ```bash
 21 |    # Find last release tag
 22 |    git tag -l "v*" --sort=-version:refname | grep -v "b\|rc" | head -1
 23 |    
 24 |    # Get commits since last release
 25 |    git log --oneline ${last_tag}..HEAD
 26 |    ```
 27 | 
 28 | 2. **Parse Conventional Commits**
 29 |    - Extract feat: (features)
 30 |    - Extract fix: (bug fixes)  
 31 |    - Extract BREAKING CHANGE: (breaking changes)
 32 |    - Extract chore:, docs:, test: (other improvements)
 33 | 
 34 | ### Step 2: Categorize Changes
 35 | 1. **Features (feat:)**
 36 |    - New MCP tools
 37 |    - New CLI commands
 38 |    - New API endpoints
 39 |    - Major functionality additions
 40 | 
 41 | 2. **Bug Fixes (fix:)**
 42 |    - User-facing bug fixes
 43 |    - Critical issues resolved
 44 |    - Performance improvements
 45 |    - Security fixes
 46 | 
 47 | 3. **Technical Improvements**
 48 |    - Test coverage improvements
 49 |    - Code quality enhancements
 50 |    - Dependency updates
 51 |    - Documentation updates
 52 | 
 53 | 4. **Breaking Changes**
 54 |    - API changes
 55 |    - Configuration changes
 56 |    - Behavior changes
 57 |    - Migration requirements
 58 | 
 59 | ### Step 3: Generate Changelog Entry
 60 | Create formatted entry following existing CHANGELOG.md style:
 61 | 
 62 | Example:
 63 | ```markdown
 64 | ## <version> (<date>)
 65 | 
 66 | ### Features
 67 | 
 68 | - **Multi-Project Management System** - Switch between projects instantly during conversations
 69 |   ([`993e88a`](https://github.com/basicmachines-co/basic-memory/commit/993e88a)) 
 70 |   - Instant project switching with session context
 71 |   - Project-specific operations and isolation
 72 |   - Project discovery and management tools
 73 | 
 74 | - **Advanced Note Editing** - Incremental editing with append, prepend, find/replace, and section operations
 75 |   ([`6fc3904`](https://github.com/basicmachines-co/basic-memory/commit/6fc3904))
 76 |   - `edit_note` tool with multiple operation types
 77 |   - Smart frontmatter-aware editing
 78 |   - Validation and error handling
 79 | 
 80 | ### Bug Fixes
 81 | 
 82 | - **#118**: Fix YAML tag formatting to follow standard specification
 83 |   ([`2dc7e27`](https://github.com/basicmachines-co/basic-memory/commit/2dc7e27))
 84 | 
 85 | - **#110**: Make --project flag work consistently across CLI commands
 86 |   ([`02dd91a`](https://github.com/basicmachines-co/basic-memory/commit/02dd91a))
 87 | 
 88 | ### Technical Improvements
 89 | 
 90 | - **Comprehensive Testing** - 100% test coverage with integration testing
 91 |   ([`468a22f`](https://github.com/basicmachines-co/basic-memory/commit/468a22f))
 92 |   - MCP integration test suite
 93 |   - End-to-end testing framework
 94 |   - Performance and edge case validation
 95 | 
 96 | ### Breaking Changes
 97 | 
 98 | - **Database Migration**: Automatic migration from per-project to unified database. 
 99 |     Data will be re-index from the filesystem, resulting in no data loss. 
100 | - **Configuration Changes**: Projects now synced between config.json and database
101 | - **Full Backward Compatibility**: All existing setups continue to work seamlessly
102 | ```
103 | 
104 | ### Step 4: Integration
105 | 1. **Update CHANGELOG.md**
106 |    - Insert new entry at top
107 |    - Maintain consistent formatting
108 |    - Include commit links and issue references
109 | 
110 | 2. **Validation**
111 |    - Check all major changes are captured
112 |    - Verify commit links work
113 |    - Ensure issue numbers are correct
114 | 
115 | ## Smart Analysis Features
116 | 
117 | ### Automatic Classification
118 | - Detect feature additions from file changes
119 | - Identify bug fixes from commit messages
120 | - Find breaking changes from code analysis
121 | - Extract issue numbers from commit messages
122 | 
123 | ### Content Enhancement
124 | - Add context for technical changes
125 | - Include migration guidance for breaking changes
126 | - Suggest installation/upgrade instructions
127 | - Link to relevant documentation
128 | 
129 | ## Output Format
130 | 
131 | ### For Beta Releases
132 | 
133 | Example: 
134 | ```markdown
135 | ## v0.13.0b4 (2025-06-03)
136 | 
137 | ### Beta Changes Since v0.13.0b3
138 | 
139 | - Fix FastMCP API compatibility issues
140 | - Update dependencies to latest versions  
141 | - Resolve setuptools import error
142 | 
143 | ### Installation
144 | ```bash
145 | uv tool install basic-memory --prerelease=allow
146 | ```
147 | 
148 | ### Known Issues
149 | - [List any known issues for beta testing]
150 | ```
151 | 
152 | ### For Stable Releases
153 | Full changelog with complete feature list, organized by impact and category.
154 | 
155 | ## Context
156 | - Follows existing CHANGELOG.md format and style
157 | - Uses conventional commit standards
158 | - Includes GitHub commit links for traceability
159 | - Focuses on user-facing changes and value
160 | - Maintains consistency with previous entries
```

--------------------------------------------------------------------------------
/src/basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py:
--------------------------------------------------------------------------------

```python
  1 | """project constraint fix
  2 | 
  3 | Revision ID: 647e7a75e2cd
  4 | Revises: 5fe1ab1ccebe
  5 | Create Date: 2025-06-03 12:48:30.162566
  6 | 
  7 | """
  8 | 
  9 | from typing import Sequence, Union
 10 | 
 11 | from alembic import op
 12 | import sqlalchemy as sa
 13 | 
 14 | 
 15 | # revision identifiers, used by Alembic.
 16 | revision: str = "647e7a75e2cd"
 17 | down_revision: Union[str, None] = "5fe1ab1ccebe"
 18 | branch_labels: Union[str, Sequence[str], None] = None
 19 | depends_on: Union[str, Sequence[str], None] = None
 20 | 
 21 | 
 22 | def upgrade() -> None:
 23 |     """Remove the problematic UNIQUE constraint on is_default column.
 24 | 
 25 |     The UNIQUE constraint prevents multiple projects from having is_default=FALSE,
 26 |     which breaks project creation when the service sets is_default=False.
 27 | 
 28 |     SQLite: Recreate the table without the constraint (no ALTER TABLE support)
 29 |     Postgres: Use ALTER TABLE to drop the constraint directly
 30 |     """
 31 |     connection = op.get_bind()
 32 |     is_sqlite = connection.dialect.name == "sqlite"
 33 | 
 34 |     if is_sqlite:
 35 |         # For SQLite, we need to recreate the table without the UNIQUE constraint
 36 |         # Create a new table without the UNIQUE constraint on is_default
 37 |         op.create_table(
 38 |             "project_new",
 39 |             sa.Column("id", sa.Integer(), nullable=False),
 40 |             sa.Column("name", sa.String(), nullable=False),
 41 |             sa.Column("description", sa.Text(), nullable=True),
 42 |             sa.Column("permalink", sa.String(), nullable=False),
 43 |             sa.Column("path", sa.String(), nullable=False),
 44 |             sa.Column("is_active", sa.Boolean(), nullable=False),
 45 |             sa.Column("is_default", sa.Boolean(), nullable=True),  # No UNIQUE constraint!
 46 |             sa.Column("created_at", sa.DateTime(), nullable=False),
 47 |             sa.Column("updated_at", sa.DateTime(), nullable=False),
 48 |             sa.PrimaryKeyConstraint("id"),
 49 |             sa.UniqueConstraint("name"),
 50 |             sa.UniqueConstraint("permalink"),
 51 |         )
 52 | 
 53 |         # Copy data from old table to new table
 54 |         op.execute("INSERT INTO project_new SELECT * FROM project")
 55 | 
 56 |         # Drop the old table
 57 |         op.drop_table("project")
 58 | 
 59 |         # Rename the new table
 60 |         op.rename_table("project_new", "project")
 61 | 
 62 |         # Recreate the indexes
 63 |         with op.batch_alter_table("project", schema=None) as batch_op:
 64 |             batch_op.create_index("ix_project_created_at", ["created_at"], unique=False)
 65 |             batch_op.create_index("ix_project_name", ["name"], unique=True)
 66 |             batch_op.create_index("ix_project_path", ["path"], unique=False)
 67 |             batch_op.create_index("ix_project_permalink", ["permalink"], unique=True)
 68 |             batch_op.create_index("ix_project_updated_at", ["updated_at"], unique=False)
 69 |     else:
 70 |         # For Postgres, we can simply drop the constraint
 71 |         with op.batch_alter_table("project", schema=None) as batch_op:
 72 |             batch_op.drop_constraint("project_is_default_key", type_="unique")
 73 | 
 74 | 
 75 | def downgrade() -> None:
 76 |     """Add back the UNIQUE constraint on is_default column.
 77 | 
 78 |     WARNING: This will break project creation again if multiple projects
 79 |     have is_default=FALSE.
 80 |     """
 81 |     # Recreate the table with the UNIQUE constraint
 82 |     op.create_table(
 83 |         "project_old",
 84 |         sa.Column("id", sa.Integer(), nullable=False),
 85 |         sa.Column("name", sa.String(), nullable=False),
 86 |         sa.Column("description", sa.Text(), nullable=True),
 87 |         sa.Column("permalink", sa.String(), nullable=False),
 88 |         sa.Column("path", sa.String(), nullable=False),
 89 |         sa.Column("is_active", sa.Boolean(), nullable=False),
 90 |         sa.Column("is_default", sa.Boolean(), nullable=True),
 91 |         sa.Column("created_at", sa.DateTime(), nullable=False),
 92 |         sa.Column("updated_at", sa.DateTime(), nullable=False),
 93 |         sa.PrimaryKeyConstraint("id"),
 94 |         sa.UniqueConstraint("is_default"),  # Add back the problematic constraint
 95 |         sa.UniqueConstraint("name"),
 96 |         sa.UniqueConstraint("permalink"),
 97 |     )
 98 | 
 99 |     # Copy data (this may fail if multiple FALSE values exist)
100 |     op.execute("INSERT INTO project_old SELECT * FROM project")
101 | 
102 |     # Drop the current table and rename
103 |     op.drop_table("project")
104 |     op.rename_table("project_old", "project")
105 | 
106 |     # Recreate indexes
107 |     with op.batch_alter_table("project", schema=None) as batch_op:
108 |         batch_op.create_index("ix_project_created_at", ["created_at"], unique=False)
109 |         batch_op.create_index("ix_project_name", ["name"], unique=True)
110 |         batch_op.create_index("ix_project_path", ["path"], unique=False)
111 |         batch_op.create_index("ix_project_permalink", ["permalink"], unique=True)
112 |         batch_op.create_index("ix_project_updated_at", ["updated_at"], unique=False)
113 | 
```

--------------------------------------------------------------------------------
/tests/importers/test_conversation_indexing.py:
--------------------------------------------------------------------------------

```python
  1 | """Test that imported conversations are properly indexed with correct permalink and title.
  2 | 
  3 | This test verifies issue #452 - Imported conversations not indexed correctly.
  4 | """
  5 | 
  6 | import pytest
  7 | 
  8 | from basic_memory.config import ProjectConfig
  9 | from basic_memory.importers.claude_conversations_importer import ClaudeConversationsImporter
 10 | from basic_memory.markdown import EntityParser
 11 | from basic_memory.markdown.markdown_processor import MarkdownProcessor
 12 | from basic_memory.repository import EntityRepository
 13 | from basic_memory.services import EntityService
 14 | from basic_memory.services.file_service import FileService
 15 | from basic_memory.services.search_service import SearchService
 16 | from basic_memory.schemas.search import SearchQuery
 17 | from basic_memory.sync.sync_service import SyncService
 18 | 
 19 | 
 20 | @pytest.mark.asyncio
 21 | async def test_imported_conversations_have_correct_permalink_and_title(
 22 |     project_config: ProjectConfig,
 23 |     sync_service: SyncService,
 24 |     entity_service: EntityService,
 25 |     entity_repository: EntityRepository,
 26 |     search_service: SearchService,
 27 | ):
 28 |     """Test that imported conversations have correct permalink and title after sync.
 29 | 
 30 |     Issue #452: Imported conversations show permalink: null in search results
 31 |     and title shows as filename instead of frontmatter title.
 32 |     """
 33 |     base_path = project_config.home
 34 | 
 35 |     # Create parser, processor, and file_service for importer
 36 |     parser = EntityParser(base_path)
 37 |     processor = MarkdownProcessor(parser)
 38 |     file_service = FileService(base_path, processor)
 39 | 
 40 |     # Create importer
 41 |     importer = ClaudeConversationsImporter(base_path, processor, file_service)
 42 | 
 43 |     # Sample conversation data
 44 |     conversations = [
 45 |         {
 46 |             "uuid": "test-123",
 47 |             "name": "My Test Conversation Title",
 48 |             "created_at": "2025-01-15T10:00:00Z",
 49 |             "updated_at": "2025-01-15T11:00:00Z",
 50 |             "chat_messages": [
 51 |                 {
 52 |                     "uuid": "msg-1",
 53 |                     "sender": "human",
 54 |                     "created_at": "2025-01-15T10:00:00Z",
 55 |                     "text": "Hello world",
 56 |                     "content": [{"type": "text", "text": "Hello world"}],
 57 |                     "attachments": [],
 58 |                 },
 59 |                 {
 60 |                     "uuid": "msg-2",
 61 |                     "sender": "assistant",
 62 |                     "created_at": "2025-01-15T10:01:00Z",
 63 |                     "text": "Hello!",
 64 |                     "content": [{"type": "text", "text": "Hello!"}],
 65 |                     "attachments": [],
 66 |                 },
 67 |             ],
 68 |         }
 69 |     ]
 70 | 
 71 |     # Run import
 72 |     result = await importer.import_data(conversations, "conversations")
 73 |     assert result.success, f"Import failed: {result}"
 74 |     assert result.conversations == 1
 75 | 
 76 |     # Verify the file was created with correct content
 77 |     conv_path = base_path / "conversations" / "20250115-My_Test_Conversation_Title.md"
 78 |     assert conv_path.exists(), f"Expected file at {conv_path}"
 79 | 
 80 |     content = conv_path.read_text()
 81 |     assert "---" in content, "File should have frontmatter markers"
 82 |     assert "title: My Test Conversation Title" in content, "File should have title in frontmatter"
 83 |     assert "permalink: conversations/20250115-My_Test_Conversation_Title" in content, (
 84 |         "File should have permalink in frontmatter"
 85 |     )
 86 | 
 87 |     # Run sync to index the imported file
 88 |     await sync_service.sync(base_path, project_config.name)
 89 | 
 90 |     # Verify entity in database
 91 |     entities = await entity_repository.find_all()
 92 |     assert len(entities) == 1, f"Expected 1 entity, got {len(entities)}"
 93 | 
 94 |     entity = entities[0]
 95 | 
 96 |     # These are the key assertions for issue #452
 97 |     assert entity.title == "My Test Conversation Title", (
 98 |         f"Title should be from frontmatter, got: {entity.title}"
 99 |     )
100 |     assert entity.permalink == "conversations/20250115-My_Test_Conversation_Title", (
101 |         f"Permalink should be from frontmatter, got: {entity.permalink}"
102 |     )
103 | 
104 |     # Verify search index also has correct data
105 |     results = await search_service.search(SearchQuery(text="Test Conversation"))
106 |     assert len(results) >= 1, "Should find the conversation in search"
107 | 
108 |     # Find our entity in search results
109 |     search_result = next((r for r in results if r.entity_id == entity.id), None)
110 |     assert search_result is not None, "Entity should be in search results"
111 |     assert search_result.title == "My Test Conversation Title", (
112 |         f"Search title should be from frontmatter, got: {search_result.title}"
113 |     )
114 |     assert search_result.permalink == "conversations/20250115-My_Test_Conversation_Title", (
115 |         f"Search permalink should not be null, got: {search_result.permalink}"
116 |     )
117 | 
```

--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------

```toml
  1 | [project]
  2 | name = "basic-memory"
  3 | dynamic = ["version"]
  4 | description = "Local-first knowledge management combining Zettelkasten with knowledge graphs"
  5 | readme = "README.md"
  6 | requires-python = ">=3.12"
  7 | license = { text = "AGPL-3.0-or-later" }
  8 | authors = [
  9 |     { name = "Basic Machines", email = "[email protected]" }
 10 | ]
 11 | dependencies = [
 12 |     "sqlalchemy>=2.0.0",
 13 |     "pyyaml>=6.0.1",
 14 |     "typer>=0.9.0",
 15 |     "aiosqlite>=0.20.0",
 16 |     "greenlet>=3.1.1",
 17 |     "pydantic[email,timezone]>=2.12.0",
 18 |     "mcp>=1.23.1",
 19 |     "pydantic-settings>=2.6.1",
 20 |     "loguru>=0.7.3",
 21 |     "pyright>=1.1.390",
 22 |     "markdown-it-py>=3.0.0",
 23 |     "python-frontmatter>=1.1.0",
 24 |     "rich>=13.9.4",
 25 |     "unidecode>=1.3.8",
 26 |     "dateparser>=1.2.0",
 27 |     "watchfiles>=1.0.4",
 28 |     "fastapi[standard]>=0.115.8",
 29 |     "alembic>=1.14.1",
 30 |     "pillow>=11.1.0",
 31 |     "pybars3>=0.9.7",
 32 |     "fastmcp==2.12.3", # Pinned - 2.14.x breaks MCP tools visibility (issue #463)
 33 |     "pyjwt>=2.10.1",
 34 |     "python-dotenv>=1.1.0",
 35 |     "pytest-aio>=1.9.0",
 36 |     "aiofiles>=24.1.0", # Optional observability (disabled by default via config)
 37 |     "asyncpg>=0.30.0",
 38 |     "nest-asyncio>=1.6.0", # For Alembic migrations with Postgres
 39 |     "pytest-asyncio>=1.2.0",
 40 |     "psycopg==3.3.1",
 41 |     "mdformat>=0.7.22",
 42 |     "mdformat-gfm>=0.3.7",
 43 |     "mdformat-frontmatter>=2.0.8",
 44 |     "openpanel>=0.0.1", # Anonymous usage telemetry (Homebrew-style opt-out)
 45 |     "sniffio>=1.3.1",
 46 |     "anyio>=4.10.0",
 47 |     "httpx>=0.28.0",
 48 | ]
 49 | 
 50 | 
 51 | [project.urls]
 52 | Homepage = "https://github.com/basicmachines-co/basic-memory"
 53 | Repository = "https://github.com/basicmachines-co/basic-memory"
 54 | Documentation = "https://github.com/basicmachines-co/basic-memory#readme"
 55 | 
 56 | [project.scripts]
 57 | basic-memory = "basic_memory.cli.main:app"
 58 | bm = "basic_memory.cli.main:app"
 59 | 
 60 | [build-system]
 61 | requires = ["hatchling", "uv-dynamic-versioning>=0.7.0"]
 62 | build-backend = "hatchling.build"
 63 | 
 64 | [tool.pytest.ini_options]
 65 | pythonpath = ["src", "tests"]
 66 | addopts = "--cov=basic_memory --cov-report term-missing"
 67 | testpaths = ["tests", "test-int"]
 68 | asyncio_mode = "strict"
 69 | asyncio_default_fixture_loop_scope = "function"
 70 | markers = [
 71 |     "benchmark: Performance benchmark tests (deselect with '-m \"not benchmark\"')",
 72 |     "slow: Slow-running tests (deselect with '-m \"not slow\"')",
 73 |     "postgres: Tests that run against Postgres backend (deselect with '-m \"not postgres\"')",
 74 |     "windows: Windows-specific tests (deselect with '-m \"not windows\"')",
 75 | ]
 76 | 
 77 | [tool.ruff]
 78 | line-length = 100
 79 | target-version = "py312"
 80 | 
 81 | [dependency-groups]
 82 | dev = [
 83 |     "gevent>=24.11.1",
 84 |     "icecream>=2.1.3",
 85 |     "pytest>=8.3.4",
 86 |     "pytest-cov>=4.1.0",
 87 |     "pytest-mock>=3.12.0",
 88 |     "pytest-asyncio>=0.24.0",
 89 |     "pytest-xdist>=3.0.0",
 90 |     "ruff>=0.1.6",
 91 |     "freezegun>=1.5.5",
 92 |     "testcontainers[postgres]>=4.0.0",
 93 |     "psycopg>=3.2.0",
 94 |     "pyright>=1.1.408",
 95 | ]
 96 | 
 97 | [tool.hatch.version]
 98 | source = "uv-dynamic-versioning"
 99 | 
100 | [tool.uv-dynamic-versioning]
101 | vcs = "git"
102 | style = "pep440"
103 | bump = true
104 | fallback-version = "0.0.0"
105 | 
106 | [tool.pyright]
107 | include = ["src/"]
108 | exclude = ["**/__pycache__"]
109 | ignore = ["test/"]
110 | defineConstant = { DEBUG = true }
111 | reportMissingImports = "error"
112 | reportMissingTypeStubs = false
113 | pythonVersion = "3.12"
114 | 
115 | 
116 | 
117 | [tool.coverage.run]
118 | concurrency = ["thread", "gevent"]
119 | parallel = true
120 | source = ["basic_memory"]
121 | 
122 | [tool.coverage.report]
123 | exclude_lines = [
124 |     "pragma: no cover",
125 |     "def __repr__",
126 |     "if self.debug:",
127 |     "if settings.DEBUG",
128 |     "raise AssertionError",
129 |     "raise NotImplementedError",
130 |     "if 0:",
131 |     "if __name__ == .__main__.:",
132 |     "class .*\\bProtocol\\):",
133 |     "@(abc\\.)?abstractmethod",
134 | ]
135 | 
136 | # Exclude specific modules that are difficult to test comprehensively
137 | omit = [
138 |     "*/external_auth_provider.py",  # External HTTP calls to OAuth providers
139 |     "*/supabase_auth_provider.py",  # External HTTP calls to Supabase APIs
140 |     "*/watch_service.py",           # File system watching - complex integration testing
141 |     "*/background_sync.py",         # Background processes
142 |     "*/cli/**",                    # CLI is an interactive wrapper; core logic is covered via API/MCP/service tests
143 |     "*/db.py",                     # Backend/runtime-dependent (sqlite/postgres/windows tuning); validated via integration tests
144 |     "*/services/initialization.py", # Startup orchestration + background tasks (watchers); exercised indirectly in entrypoints
145 |     "*/sync/sync_service.py",      # Heavy filesystem/db integration; covered by integration suite, not enforced in unit coverage
146 |     "*/telemetry.py",              # External analytics; tested lightly, excluded from strict coverage target
147 |     "*/services/migration_service.py", # Complex migration scenarios
148 | ]
149 | 
150 | [tool.logfire]
151 | ignore_no_config = true
152 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/mcp/tools/build_context.py:
--------------------------------------------------------------------------------

```python
  1 | """Build context tool for Basic Memory MCP server."""
  2 | 
  3 | from typing import Optional
  4 | 
  5 | from loguru import logger
  6 | from fastmcp import Context
  7 | 
  8 | from basic_memory.mcp.async_client import get_client
  9 | from basic_memory.mcp.project_context import get_active_project
 10 | from basic_memory.mcp.server import mcp
 11 | from basic_memory.telemetry import track_mcp_tool
 12 | from basic_memory.schemas.base import TimeFrame
 13 | from basic_memory.schemas.memory import (
 14 |     GraphContext,
 15 |     MemoryUrl,
 16 |     memory_url_path,
 17 | )
 18 | 
 19 | 
 20 | @mcp.tool(
 21 |     description="""Build context from a memory:// URI to continue conversations naturally.
 22 | 
 23 |     Use this to follow up on previous discussions or explore related topics.
 24 | 
 25 |     Memory URL Format:
 26 |     - Use paths like "folder/note" or "memory://folder/note"
 27 |     - Pattern matching: "folder/*" matches all notes in folder
 28 |     - Valid characters: letters, numbers, hyphens, underscores, forward slashes
 29 |     - Avoid: double slashes (//), angle brackets (<>), quotes, pipes (|)
 30 |     - Examples: "specs/search", "projects/basic-memory", "notes/*"
 31 | 
 32 |     Timeframes support natural language like:
 33 |     - "2 days ago", "last week", "today", "3 months ago"
 34 |     - Or standard formats like "7d", "24h"
 35 |     """,
 36 | )
 37 | async def build_context(
 38 |     url: MemoryUrl,
 39 |     project: Optional[str] = None,
 40 |     depth: str | int | None = 1,
 41 |     timeframe: Optional[TimeFrame] = "7d",
 42 |     page: int = 1,
 43 |     page_size: int = 10,
 44 |     max_related: int = 10,
 45 |     context: Context | None = None,
 46 | ) -> GraphContext:
 47 |     """Get context needed to continue a discussion within a specific project.
 48 | 
 49 |     This tool enables natural continuation of discussions by loading relevant context
 50 |     from memory:// URIs. It uses pattern matching to find relevant content and builds
 51 |     a rich context graph of related information.
 52 | 
 53 |     Project Resolution:
 54 |     Server resolves projects in this order: Single Project Mode → project parameter → default project.
 55 |     If project unknown, use list_memory_projects() or recent_activity() first.
 56 | 
 57 |     Args:
 58 |         project: Project name to build context from. Optional - server will resolve using hierarchy.
 59 |                 If unknown, use list_memory_projects() to discover available projects.
 60 |         url: memory:// URI pointing to discussion content (e.g. memory://specs/search)
 61 |         depth: How many relation hops to traverse (1-3 recommended for performance)
 62 |         timeframe: How far back to look. Supports natural language like "2 days ago", "last week"
 63 |         page: Page number of results to return (default: 1)
 64 |         page_size: Number of results to return per page (default: 10)
 65 |         max_related: Maximum number of related results to return (default: 10)
 66 |         context: Optional FastMCP context for performance caching.
 67 | 
 68 |     Returns:
 69 |         GraphContext containing:
 70 |             - primary_results: Content matching the memory:// URI
 71 |             - related_results: Connected content via relations
 72 |             - metadata: Context building details
 73 | 
 74 |     Examples:
 75 |         # Continue a specific discussion
 76 |         build_context("my-project", "memory://specs/search")
 77 | 
 78 |         # Get deeper context about a component
 79 |         build_context("work-docs", "memory://components/memory-service", depth=2)
 80 | 
 81 |         # Look at recent changes to a specification
 82 |         build_context("research", "memory://specs/document-format", timeframe="today")
 83 | 
 84 |         # Research the history of a feature
 85 |         build_context("dev-notes", "memory://features/knowledge-graph", timeframe="3 months ago")
 86 | 
 87 |     Raises:
 88 |         ToolError: If project doesn't exist or depth parameter is invalid
 89 |     """
 90 |     track_mcp_tool("build_context")
 91 |     logger.info(f"Building context from {url} in project {project}")
 92 | 
 93 |     # Convert string depth to integer if needed
 94 |     if isinstance(depth, str):
 95 |         try:
 96 |             depth = int(depth)
 97 |         except ValueError:
 98 |             from mcp.server.fastmcp.exceptions import ToolError
 99 | 
100 |             raise ToolError(f"Invalid depth parameter: '{depth}' is not a valid integer")
101 | 
102 |     # URL is already validated and normalized by MemoryUrl type annotation
103 | 
104 |     async with get_client() as client:
105 |         # Get the active project using the new stateless approach
106 |         active_project = await get_active_project(client, project, context)
107 | 
108 |         # Import here to avoid circular import
109 |         from basic_memory.mcp.clients import MemoryClient
110 | 
111 |         # Use typed MemoryClient for API calls
112 |         memory_client = MemoryClient(client, active_project.external_id)
113 |         return await memory_client.build_context(
114 |             memory_url_path(url),
115 |             depth=depth or 1,
116 |             timeframe=timeframe,
117 |             page=page,
118 |             page_size=page_size,
119 |             max_related=max_related,
120 |         )
121 | 
```

--------------------------------------------------------------------------------
/tests/cli/test_auth_cli_auth.py:
--------------------------------------------------------------------------------

```python
  1 | import json
  2 | import os
  3 | import stat
  4 | import time
  5 | from contextlib import asynccontextmanager
  6 | 
  7 | import httpx
  8 | import pytest
  9 | 
 10 | from basic_memory.cli.auth import CLIAuth
 11 | 
 12 | 
 13 | def _make_mock_transport(handler):
 14 |     return httpx.MockTransport(handler)
 15 | 
 16 | 
 17 | @pytest.mark.asyncio
 18 | async def test_cli_auth_request_device_authorization_uses_injected_http_client(
 19 |     tmp_path, monkeypatch
 20 | ):
 21 |     """Integration-style test: exercise the request flow with real httpx plumbing (MockTransport)."""
 22 |     monkeypatch.setenv("HOME", str(tmp_path))
 23 |     monkeypatch.setenv("BASIC_MEMORY_ENV", "test")
 24 | 
 25 |     async def handler(request: httpx.Request) -> httpx.Response:
 26 |         assert request.url.path.endswith("/oauth2/device_authorization")
 27 |         body = (await request.aread()).decode()
 28 |         # sanity: client_id should be in form data
 29 |         assert "client_id=test-client-id" in body
 30 |         return httpx.Response(
 31 |             200,
 32 |             json={
 33 |                 "device_code": "devcode",
 34 |                 "user_code": "usercode",
 35 |                 "verification_uri": "https://example.test/verify",
 36 |                 "interval": 1,
 37 |             },
 38 |         )
 39 | 
 40 |     transport = _make_mock_transport(handler)
 41 | 
 42 |     @asynccontextmanager
 43 |     async def client_factory():
 44 |         async with httpx.AsyncClient(transport=transport) as client:
 45 |             yield client
 46 | 
 47 |     auth = CLIAuth(
 48 |         client_id="test-client-id",
 49 |         authkit_domain="https://example.test",
 50 |         http_client_factory=client_factory,
 51 |     )
 52 | 
 53 |     result = await auth.request_device_authorization()
 54 |     assert result is not None
 55 |     assert result["device_code"] == "devcode"
 56 | 
 57 | 
 58 | def test_cli_auth_generate_pkce_pair_format(tmp_path, monkeypatch):
 59 |     monkeypatch.setenv("HOME", str(tmp_path))
 60 |     monkeypatch.setenv("BASIC_MEMORY_ENV", "test")
 61 | 
 62 |     auth = CLIAuth(client_id="cid", authkit_domain="https://example.test")
 63 |     verifier, challenge = auth.generate_pkce_pair()
 64 | 
 65 |     # PKCE verifier/challenge should be URL-safe base64 without padding.
 66 |     assert verifier
 67 |     assert challenge
 68 |     assert "=" not in verifier
 69 |     assert "=" not in challenge
 70 |     # code verifier length should be in recommended bounds (rough sanity).
 71 |     assert 43 <= len(verifier) <= 128
 72 | 
 73 | 
 74 | @pytest.mark.asyncio
 75 | async def test_cli_auth_save_load_and_get_valid_token_roundtrip(tmp_path, monkeypatch):
 76 |     monkeypatch.setenv("HOME", str(tmp_path))
 77 |     monkeypatch.setenv("BASIC_MEMORY_ENV", "test")
 78 | 
 79 |     auth = CLIAuth(client_id="cid", authkit_domain="https://example.test")
 80 | 
 81 |     tokens = {
 82 |         "access_token": "at",
 83 |         "refresh_token": "rt",
 84 |         "expires_in": 3600,
 85 |         "token_type": "Bearer",
 86 |     }
 87 |     auth.save_tokens(tokens)
 88 | 
 89 |     loaded = auth.load_tokens()
 90 |     assert loaded is not None
 91 |     assert loaded["access_token"] == "at"
 92 |     assert loaded["refresh_token"] == "rt"
 93 |     assert auth.is_token_valid(loaded) is True
 94 | 
 95 |     valid = await auth.get_valid_token()
 96 |     assert valid == "at"
 97 | 
 98 |     # Permission should be 600 on POSIX systems
 99 |     if os.name != "nt":
100 |         mode = auth.token_file.stat().st_mode
101 |         assert stat.S_IMODE(mode) == 0o600
102 | 
103 | 
104 | @pytest.mark.asyncio
105 | async def test_cli_auth_refresh_flow_uses_injected_http_client(tmp_path, monkeypatch):
106 |     monkeypatch.setenv("HOME", str(tmp_path))
107 |     monkeypatch.setenv("BASIC_MEMORY_ENV", "test")
108 | 
109 |     async def handler(request: httpx.Request) -> httpx.Response:
110 |         if request.url.path.endswith("/oauth2/token"):
111 |             body = (await request.aread()).decode()
112 |             assert "grant_type=refresh_token" in body
113 |             return httpx.Response(
114 |                 200,
115 |                 json={
116 |                     "access_token": "new-at",
117 |                     "refresh_token": "new-rt",
118 |                     "expires_in": 3600,
119 |                     "token_type": "Bearer",
120 |                 },
121 |             )
122 |         raise AssertionError(f"Unexpected request: {request.method} {request.url}")
123 | 
124 |     transport = _make_mock_transport(handler)
125 | 
126 |     @asynccontextmanager
127 |     async def client_factory():
128 |         async with httpx.AsyncClient(transport=transport) as client:
129 |             yield client
130 | 
131 |     auth = CLIAuth(
132 |         client_id="cid",
133 |         authkit_domain="https://example.test",
134 |         http_client_factory=client_factory,
135 |     )
136 | 
137 |     # Write an expired token file manually (so we control expires_at precisely).
138 |     auth.token_file.parent.mkdir(parents=True, exist_ok=True)
139 |     auth.token_file.write_text(
140 |         json.dumps(
141 |             {
142 |                 "access_token": "old-at",
143 |                 "refresh_token": "old-rt",
144 |                 "expires_at": int(time.time()) - 10,
145 |                 "token_type": "Bearer",
146 |             }
147 |         ),
148 |         encoding="utf-8",
149 |     )
150 | 
151 |     token = await auth.get_valid_token()
152 |     assert token == "new-at"
153 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py:
--------------------------------------------------------------------------------

```python
  1 | """add projects table
  2 | 
  3 | Revision ID: 5fe1ab1ccebe
  4 | Revises: cc7172b46608
  5 | Create Date: 2025-05-14 09:05:18.214357
  6 | 
  7 | """
  8 | 
  9 | from typing import Sequence, Union
 10 | 
 11 | from alembic import op
 12 | import sqlalchemy as sa
 13 | 
 14 | 
 15 | # revision identifiers, used by Alembic.
 16 | revision: str = "5fe1ab1ccebe"
 17 | down_revision: Union[str, None] = "cc7172b46608"
 18 | branch_labels: Union[str, Sequence[str], None] = None
 19 | depends_on: Union[str, Sequence[str], None] = None
 20 | 
 21 | 
 22 | def upgrade() -> None:
 23 |     # ### commands auto generated by Alembic - please adjust! ###
 24 | 
 25 |     # SQLite FTS5 virtual table handling is SQLite-specific
 26 |     # For Postgres, search_index is a regular table managed by ORM
 27 |     connection = op.get_bind()
 28 |     is_sqlite = connection.dialect.name == "sqlite"
 29 | 
 30 |     op.create_table(
 31 |         "project",
 32 |         sa.Column("id", sa.Integer(), nullable=False),
 33 |         sa.Column("name", sa.String(), nullable=False),
 34 |         sa.Column("description", sa.Text(), nullable=True),
 35 |         sa.Column("permalink", sa.String(), nullable=False),
 36 |         sa.Column("path", sa.String(), nullable=False),
 37 |         sa.Column("is_active", sa.Boolean(), nullable=False),
 38 |         sa.Column("is_default", sa.Boolean(), nullable=True),
 39 |         sa.Column("created_at", sa.DateTime(), nullable=False),
 40 |         sa.Column("updated_at", sa.DateTime(), nullable=False),
 41 |         sa.PrimaryKeyConstraint("id"),
 42 |         sa.UniqueConstraint("is_default"),
 43 |         sa.UniqueConstraint("name"),
 44 |         sa.UniqueConstraint("permalink"),
 45 |         if_not_exists=True,
 46 |     )
 47 |     with op.batch_alter_table("project", schema=None) as batch_op:
 48 |         batch_op.create_index(
 49 |             "ix_project_created_at", ["created_at"], unique=False, if_not_exists=True
 50 |         )
 51 |         batch_op.create_index("ix_project_name", ["name"], unique=True, if_not_exists=True)
 52 |         batch_op.create_index("ix_project_path", ["path"], unique=False, if_not_exists=True)
 53 |         batch_op.create_index(
 54 |             "ix_project_permalink", ["permalink"], unique=True, if_not_exists=True
 55 |         )
 56 |         batch_op.create_index(
 57 |             "ix_project_updated_at", ["updated_at"], unique=False, if_not_exists=True
 58 |         )
 59 | 
 60 |     with op.batch_alter_table("entity", schema=None) as batch_op:
 61 |         batch_op.add_column(sa.Column("project_id", sa.Integer(), nullable=False))
 62 |         batch_op.drop_index(
 63 |             "uix_entity_permalink",
 64 |             sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL")
 65 |             if is_sqlite
 66 |             else None,
 67 |         )
 68 |         batch_op.drop_index("ix_entity_file_path")
 69 |         batch_op.create_index(batch_op.f("ix_entity_file_path"), ["file_path"], unique=False)
 70 |         batch_op.create_index("ix_entity_project_id", ["project_id"], unique=False)
 71 |         batch_op.create_index(
 72 |             "uix_entity_file_path_project", ["file_path", "project_id"], unique=True
 73 |         )
 74 |         batch_op.create_index(
 75 |             "uix_entity_permalink_project",
 76 |             ["permalink", "project_id"],
 77 |             unique=True,
 78 |             sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL")
 79 |             if is_sqlite
 80 |             else None,
 81 |         )
 82 |         batch_op.create_foreign_key("fk_entity_project_id", "project", ["project_id"], ["id"])
 83 | 
 84 |     # drop the search index table. it will be recreated
 85 |     # Only drop for SQLite - Postgres uses regular table managed by ORM
 86 |     if is_sqlite:
 87 |         op.drop_table("search_index")
 88 | 
 89 |     # ### end Alembic commands ###
 90 | 
 91 | 
 92 | def downgrade() -> None:
 93 |     # ### commands auto generated by Alembic - please adjust! ###
 94 |     with op.batch_alter_table("entity", schema=None) as batch_op:
 95 |         batch_op.drop_constraint("fk_entity_project_id", type_="foreignkey")
 96 |         batch_op.drop_index(
 97 |             "uix_entity_permalink_project",
 98 |             sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL"),
 99 |         )
100 |         batch_op.drop_index("uix_entity_file_path_project")
101 |         batch_op.drop_index("ix_entity_project_id")
102 |         batch_op.drop_index(batch_op.f("ix_entity_file_path"))
103 |         batch_op.create_index("ix_entity_file_path", ["file_path"], unique=1)
104 |         batch_op.create_index(
105 |             "uix_entity_permalink",
106 |             ["permalink"],
107 |             unique=1,
108 |             sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL"),
109 |         )
110 |         batch_op.drop_column("project_id")
111 | 
112 |     with op.batch_alter_table("project", schema=None) as batch_op:
113 |         batch_op.drop_index("ix_project_updated_at")
114 |         batch_op.drop_index("ix_project_permalink")
115 |         batch_op.drop_index("ix_project_path")
116 |         batch_op.drop_index("ix_project_name")
117 |         batch_op.drop_index("ix_project_created_at")
118 | 
119 |     op.drop_table("project")
120 |     # ### end Alembic commands ###
121 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/schemas/v2/entity.py:
--------------------------------------------------------------------------------

```python
  1 | """V2 entity and project schemas with ID-first design."""
  2 | 
  3 | from datetime import datetime
  4 | from typing import Dict, List, Literal, Optional
  5 | 
  6 | from pydantic import BaseModel, Field, ConfigDict
  7 | 
  8 | from basic_memory.schemas.response import ObservationResponse, RelationResponse
  9 | 
 10 | 
 11 | class EntityResolveRequest(BaseModel):
 12 |     """Request to resolve a string identifier to an entity ID.
 13 | 
 14 |     Supports resolution of:
 15 |     - Permalinks (e.g., "specs/search")
 16 |     - Titles (e.g., "Search Specification")
 17 |     - File paths (e.g., "specs/search.md")
 18 |     """
 19 | 
 20 |     identifier: str = Field(
 21 |         ...,
 22 |         description="Entity identifier to resolve (permalink, title, or file path)",
 23 |         min_length=1,
 24 |         max_length=500,
 25 |     )
 26 | 
 27 | 
 28 | class EntityResolveResponse(BaseModel):
 29 |     """Response from identifier resolution.
 30 | 
 31 |     Returns the entity ID and associated metadata for the resolved entity.
 32 |     """
 33 | 
 34 |     external_id: str = Field(..., description="External UUID (primary API identifier)")
 35 |     entity_id: int = Field(..., description="Numeric entity ID (internal identifier)")
 36 |     permalink: Optional[str] = Field(None, description="Entity permalink")
 37 |     file_path: str = Field(..., description="Relative file path")
 38 |     title: str = Field(..., description="Entity title")
 39 |     resolution_method: Literal["external_id", "permalink", "title", "path", "search"] = Field(
 40 |         ..., description="How the identifier was resolved"
 41 |     )
 42 | 
 43 | 
 44 | class MoveEntityRequestV2(BaseModel):
 45 |     """V2 request schema for moving an entity to a new file location.
 46 | 
 47 |     In V2 API, the entity ID is provided in the URL path, so this request
 48 |     only needs the destination path.
 49 |     """
 50 | 
 51 |     destination_path: str = Field(
 52 |         ...,
 53 |         description="New file path for the entity (relative to project root)",
 54 |         min_length=1,
 55 |         max_length=500,
 56 |     )
 57 | 
 58 | 
 59 | class EntityResponseV2(BaseModel):
 60 |     """V2 entity response with external_id as the primary API identifier.
 61 | 
 62 |     This response format emphasizes the external_id (UUID) as the primary API identifier,
 63 |     with the numeric id maintained for internal reference.
 64 |     """
 65 | 
 66 |     # External UUID first - this is the primary API identifier in v2
 67 |     external_id: str = Field(..., description="External UUID (primary API identifier)")
 68 |     # Internal numeric ID
 69 |     id: int = Field(..., description="Numeric entity ID (internal identifier)")
 70 | 
 71 |     # Core entity fields
 72 |     title: str = Field(..., description="Entity title")
 73 |     entity_type: str = Field(..., description="Entity type")
 74 |     content_type: str = Field(default="text/markdown", description="Content MIME type")
 75 | 
 76 |     # Secondary identifiers (for compatibility and convenience)
 77 |     permalink: Optional[str] = Field(None, description="Entity permalink (may change)")
 78 |     file_path: str = Field(..., description="Relative file path (may change)")
 79 | 
 80 |     # Content and metadata
 81 |     content: Optional[str] = Field(None, description="Entity content")
 82 |     entity_metadata: Optional[Dict] = Field(None, description="Entity metadata")
 83 | 
 84 |     # Relationships
 85 |     observations: List[ObservationResponse] = Field(
 86 |         default_factory=list, description="Entity observations"
 87 |     )
 88 |     relations: List[RelationResponse] = Field(default_factory=list, description="Entity relations")
 89 | 
 90 |     # Timestamps
 91 |     created_at: datetime = Field(..., description="Creation timestamp")
 92 |     updated_at: datetime = Field(..., description="Last update timestamp")
 93 | 
 94 |     # V2-specific metadata
 95 |     api_version: Literal["v2"] = Field(
 96 |         default="v2", description="API version (always 'v2' for this response)"
 97 |     )
 98 | 
 99 |     model_config = ConfigDict(from_attributes=True)
100 | 
101 | 
102 | class ProjectResolveRequest(BaseModel):
103 |     """Request to resolve a project identifier to a project ID.
104 | 
105 |     Supports resolution of:
106 |     - Project names (e.g., "my-project")
107 |     - Permalinks (e.g., "my-project")
108 |     """
109 | 
110 |     identifier: str = Field(
111 |         ...,
112 |         description="Project identifier to resolve (name or permalink)",
113 |         min_length=1,
114 |         max_length=255,
115 |     )
116 | 
117 | 
118 | class ProjectResolveResponse(BaseModel):
119 |     """Response from project identifier resolution.
120 | 
121 |     Returns the project ID and associated metadata for the resolved project.
122 |     """
123 | 
124 |     external_id: str = Field(..., description="External UUID (primary API identifier)")
125 |     project_id: int = Field(..., description="Numeric project ID (internal identifier)")
126 |     name: str = Field(..., description="Project name")
127 |     permalink: str = Field(..., description="Project permalink")
128 |     path: str = Field(..., description="Project file path")
129 |     is_active: bool = Field(..., description="Whether the project is active")
130 |     is_default: bool = Field(..., description="Whether the project is the default")
131 |     resolution_method: Literal["external_id", "name", "permalink"] = Field(
132 |         ..., description="How the identifier was resolved"
133 |     )
134 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/api/v2/routers/memory_router.py:
--------------------------------------------------------------------------------

```python
  1 | """V2 routes for memory:// URI operations.
  2 | 
  3 | This router uses external_id UUIDs for stable, API-friendly routing.
  4 | V1 uses string-based project names which are less efficient and less stable.
  5 | """
  6 | 
  7 | from typing import Annotated, Optional
  8 | 
  9 | from fastapi import APIRouter, Query, Path
 10 | from loguru import logger
 11 | 
 12 | from basic_memory.deps import ContextServiceV2ExternalDep, EntityRepositoryV2ExternalDep
 13 | from basic_memory.schemas.base import TimeFrame, parse_timeframe
 14 | from basic_memory.schemas.memory import (
 15 |     GraphContext,
 16 |     normalize_memory_url,
 17 | )
 18 | from basic_memory.schemas.search import SearchItemType
 19 | from basic_memory.api.routers.utils import to_graph_context
 20 | 
 21 | # Note: No prefix here - it's added during registration as /v2/{project_id}/memory
 22 | router = APIRouter(tags=["memory"])
 23 | 
 24 | 
 25 | @router.get("/memory/recent", response_model=GraphContext)
 26 | async def recent(
 27 |     context_service: ContextServiceV2ExternalDep,
 28 |     entity_repository: EntityRepositoryV2ExternalDep,
 29 |     project_id: str = Path(..., description="Project external UUID"),
 30 |     type: Annotated[list[SearchItemType] | None, Query()] = None,
 31 |     depth: int = 1,
 32 |     timeframe: TimeFrame = "7d",
 33 |     page: int = 1,
 34 |     page_size: int = 10,
 35 |     max_related: int = 10,
 36 | ) -> GraphContext:
 37 |     """Get recent activity context for a project.
 38 | 
 39 |     Args:
 40 |         project_id: Project external UUID from URL path
 41 |         context_service: Context service scoped to project
 42 |         entity_repository: Entity repository scoped to project
 43 |         type: Types of items to include (entities, relations, observations)
 44 |         depth: How many levels of related entities to include
 45 |         timeframe: Time window for recent activity (e.g., "7d", "1 week")
 46 |         page: Page number for pagination
 47 |         page_size: Number of items per page
 48 |         max_related: Maximum related entities to include per item
 49 | 
 50 |     Returns:
 51 |         GraphContext with recent activity and related entities
 52 |     """
 53 |     # return all types by default
 54 |     types = (
 55 |         [SearchItemType.ENTITY, SearchItemType.RELATION, SearchItemType.OBSERVATION]
 56 |         if not type
 57 |         else type
 58 |     )
 59 | 
 60 |     logger.debug(
 61 |         f"V2 Getting recent context for project {project_id}: `{types}` depth: `{depth}` timeframe: `{timeframe}` page: `{page}` page_size: `{page_size}` max_related: `{max_related}`"
 62 |     )
 63 |     # Parse timeframe
 64 |     since = parse_timeframe(timeframe)
 65 |     limit = page_size
 66 |     offset = (page - 1) * page_size
 67 | 
 68 |     # Build context
 69 |     context = await context_service.build_context(
 70 |         types=types, depth=depth, since=since, limit=limit, offset=offset, max_related=max_related
 71 |     )
 72 |     recent_context = await to_graph_context(
 73 |         context, entity_repository=entity_repository, page=page, page_size=page_size
 74 |     )
 75 |     logger.debug(f"V2 Recent context: {recent_context.model_dump_json()}")
 76 |     return recent_context
 77 | 
 78 | 
 79 | # get_memory_context needs to be declared last so other paths can match
 80 | 
 81 | 
 82 | @router.get("/memory/{uri:path}", response_model=GraphContext)
 83 | async def get_memory_context(
 84 |     context_service: ContextServiceV2ExternalDep,
 85 |     entity_repository: EntityRepositoryV2ExternalDep,
 86 |     uri: str,
 87 |     project_id: str = Path(..., description="Project external UUID"),
 88 |     depth: int = 1,
 89 |     timeframe: Optional[TimeFrame] = None,
 90 |     page: int = 1,
 91 |     page_size: int = 10,
 92 |     max_related: int = 10,
 93 | ) -> GraphContext:
 94 |     """Get rich context from memory:// URI.
 95 | 
 96 |     V2 supports both legacy path-based URIs and new ID-based URIs:
 97 |     - Legacy: memory://path/to/note
 98 |     - ID-based: memory://id/123 or memory://123
 99 | 
100 |     Args:
101 |         project_id: Project external UUID from URL path
102 |         context_service: Context service scoped to project
103 |         entity_repository: Entity repository scoped to project
104 |         uri: Memory URI path (e.g., "id/123", "123", or "path/to/note")
105 |         depth: How many levels of related entities to include
106 |         timeframe: Optional time window for filtering related content
107 |         page: Page number for pagination
108 |         page_size: Number of items per page
109 |         max_related: Maximum related entities to include
110 | 
111 |     Returns:
112 |         GraphContext with the entity and its related context
113 |     """
114 |     logger.debug(
115 |         f"V2 Getting context for project {project_id}, URI: `{uri}` depth: `{depth}` timeframe: `{timeframe}` page: `{page}` page_size: `{page_size}` max_related: `{max_related}`"
116 |     )
117 |     memory_url = normalize_memory_url(uri)
118 | 
119 |     # Parse timeframe
120 |     since = parse_timeframe(timeframe) if timeframe else None
121 |     limit = page_size
122 |     offset = (page - 1) * page_size
123 | 
124 |     # Build context
125 |     context = await context_service.build_context(
126 |         memory_url, depth=depth, since=since, limit=limit, offset=offset, max_related=max_related
127 |     )
128 |     return await to_graph_context(
129 |         context, entity_repository=entity_repository, page=page, page_size=page_size
130 |     )
131 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/cli/commands/cloud/api_client.py:
--------------------------------------------------------------------------------

```python
  1 | """Cloud API client utilities."""
  2 | 
  3 | from collections.abc import AsyncIterator
  4 | from typing import Optional
  5 | from contextlib import asynccontextmanager
  6 | from typing import AsyncContextManager, Callable
  7 | 
  8 | import httpx
  9 | import typer
 10 | from rich.console import Console
 11 | 
 12 | from basic_memory.cli.auth import CLIAuth
 13 | from basic_memory.config import ConfigManager
 14 | 
 15 | console = Console()
 16 | 
 17 | HttpClientFactory = Callable[[], AsyncContextManager[httpx.AsyncClient]]
 18 | 
 19 | 
 20 | class CloudAPIError(Exception):
 21 |     """Exception raised for cloud API errors."""
 22 | 
 23 |     def __init__(
 24 |         self, message: str, status_code: Optional[int] = None, detail: Optional[dict] = None
 25 |     ):
 26 |         super().__init__(message)
 27 |         self.status_code = status_code
 28 |         self.detail = detail or {}
 29 | 
 30 | 
 31 | class SubscriptionRequiredError(CloudAPIError):
 32 |     """Exception raised when user needs an active subscription."""
 33 | 
 34 |     def __init__(self, message: str, subscribe_url: str):
 35 |         super().__init__(message, status_code=403, detail={"error": "subscription_required"})
 36 |         self.subscribe_url = subscribe_url
 37 | 
 38 | 
 39 | def get_cloud_config() -> tuple[str, str, str]:
 40 |     """Get cloud OAuth configuration from config."""
 41 |     config_manager = ConfigManager()
 42 |     config = config_manager.config
 43 |     return config.cloud_client_id, config.cloud_domain, config.cloud_host
 44 | 
 45 | 
 46 | async def get_authenticated_headers(auth: CLIAuth | None = None) -> dict[str, str]:
 47 |     """
 48 |     Get authentication headers with JWT token.
 49 |     handles jwt refresh if needed.
 50 |     """
 51 |     client_id, domain, _ = get_cloud_config()
 52 |     auth_obj = auth or CLIAuth(client_id=client_id, authkit_domain=domain)
 53 |     token = await auth_obj.get_valid_token()
 54 |     if not token:
 55 |         console.print("[red]Not authenticated. Please run 'basic-memory cloud login' first.[/red]")
 56 |         raise typer.Exit(1)
 57 | 
 58 |     return {"Authorization": f"Bearer {token}"}
 59 | 
 60 | 
 61 | @asynccontextmanager
 62 | async def _default_http_client(timeout: float) -> AsyncIterator[httpx.AsyncClient]:
 63 |     async with httpx.AsyncClient(timeout=timeout) as client:
 64 |         yield client
 65 | 
 66 | 
 67 | async def make_api_request(
 68 |     method: str,
 69 |     url: str,
 70 |     headers: Optional[dict] = None,
 71 |     json_data: Optional[dict] = None,
 72 |     timeout: float = 30.0,
 73 |     *,
 74 |     auth: CLIAuth | None = None,
 75 |     http_client_factory: HttpClientFactory | None = None,
 76 | ) -> httpx.Response:
 77 |     """Make an API request to the cloud service."""
 78 |     headers = headers or {}
 79 |     auth_headers = await get_authenticated_headers(auth=auth)
 80 |     headers.update(auth_headers)
 81 |     # Add debug headers to help with compression issues
 82 |     headers.setdefault("Accept-Encoding", "identity")  # Disable compression for debugging
 83 | 
 84 |     client_factory = http_client_factory or (lambda: _default_http_client(timeout))
 85 |     async with client_factory() as client:
 86 |         try:
 87 |             response = await client.request(method=method, url=url, headers=headers, json=json_data)
 88 |             response.raise_for_status()
 89 |             return response
 90 |         except httpx.HTTPError as e:
 91 |             # Check if this is a response error with response details
 92 |             if hasattr(e, "response") and e.response is not None:  # pyright: ignore [reportAttributeAccessIssue]
 93 |                 response = e.response  # type: ignore
 94 | 
 95 |                 # Try to parse error detail from response
 96 |                 error_detail = None
 97 |                 try:
 98 |                     error_detail = response.json()
 99 |                 except Exception:
100 |                     # If JSON parsing fails, we'll handle it as a generic error
101 |                     pass
102 | 
103 |                 # Check for subscription_required error (403)
104 |                 if response.status_code == 403 and isinstance(error_detail, dict):
105 |                     # Handle both FastAPI HTTPException format (nested under "detail")
106 |                     # and direct format
107 |                     detail_obj = error_detail.get("detail", error_detail)
108 |                     if (
109 |                         isinstance(detail_obj, dict)
110 |                         and detail_obj.get("error") == "subscription_required"
111 |                     ):
112 |                         message = detail_obj.get("message", "Active subscription required")
113 |                         subscribe_url = detail_obj.get(
114 |                             "subscribe_url", "https://basicmemory.com/subscribe"
115 |                         )
116 |                         raise SubscriptionRequiredError(
117 |                             message=message, subscribe_url=subscribe_url
118 |                         ) from e
119 | 
120 |                 # Raise generic CloudAPIError with status code and detail
121 |                 raise CloudAPIError(
122 |                     f"API request failed: {e}",
123 |                     status_code=response.status_code,
124 |                     detail=error_detail if isinstance(error_detail, dict) else {},
125 |                 ) from e
126 | 
127 |             raise CloudAPIError(f"API request failed: {e}") from e
128 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/repository/project_repository.py:
--------------------------------------------------------------------------------

```python
  1 | """Repository for managing projects in Basic Memory."""
  2 | 
  3 | from pathlib import Path
  4 | from typing import Optional, Sequence, Union
  5 | 
  6 | 
  7 | from sqlalchemy import text
  8 | from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
  9 | 
 10 | from basic_memory import db
 11 | from basic_memory.models.project import Project
 12 | from basic_memory.repository.repository import Repository
 13 | 
 14 | 
 15 | class ProjectRepository(Repository[Project]):
 16 |     """Repository for Project model.
 17 | 
 18 |     Projects represent collections of knowledge entities grouped together.
 19 |     Each entity, observation, and relation belongs to a specific project.
 20 |     """
 21 | 
 22 |     def __init__(self, session_maker: async_sessionmaker[AsyncSession]):
 23 |         """Initialize with session maker."""
 24 |         super().__init__(session_maker, Project)
 25 | 
 26 |     async def get_by_name(self, name: str) -> Optional[Project]:
 27 |         """Get project by name (exact match).
 28 | 
 29 |         Args:
 30 |             name: Unique name of the project
 31 |         """
 32 |         query = self.select().where(Project.name == name)
 33 |         return await self.find_one(query)
 34 | 
 35 |     async def get_by_name_case_insensitive(self, name: str) -> Optional[Project]:
 36 |         """Get project by name (case-insensitive match).
 37 | 
 38 |         Args:
 39 |             name: Project name (case-insensitive)
 40 | 
 41 |         Returns:
 42 |             Project if found, None otherwise
 43 |         """
 44 |         query = self.select().where(Project.name.ilike(name))
 45 |         return await self.find_one(query)
 46 | 
 47 |     async def get_by_permalink(self, permalink: str) -> Optional[Project]:
 48 |         """Get project by permalink.
 49 | 
 50 |         Args:
 51 |             permalink: URL-friendly identifier for the project
 52 |         """
 53 |         query = self.select().where(Project.permalink == permalink)
 54 |         return await self.find_one(query)
 55 | 
 56 |     async def get_by_path(self, path: Union[Path, str]) -> Optional[Project]:
 57 |         """Get project by filesystem path.
 58 | 
 59 |         Args:
 60 |             path: Path to the project directory (will be converted to string internally)
 61 |         """
 62 |         query = self.select().where(Project.path == Path(path).as_posix())
 63 |         return await self.find_one(query)
 64 | 
 65 |     async def get_by_id(self, project_id: int) -> Optional[Project]:
 66 |         """Get project by numeric ID.
 67 | 
 68 |         Args:
 69 |             project_id: Numeric project ID
 70 | 
 71 |         Returns:
 72 |             Project if found, None otherwise
 73 |         """
 74 |         async with db.scoped_session(self.session_maker) as session:
 75 |             return await self.select_by_id(session, project_id)
 76 | 
 77 |     async def get_by_external_id(self, external_id: str) -> Optional[Project]:
 78 |         """Get project by external UUID.
 79 | 
 80 |         Args:
 81 |             external_id: External UUID identifier
 82 | 
 83 |         Returns:
 84 |             Project if found, None otherwise
 85 |         """
 86 |         query = self.select().where(Project.external_id == external_id)
 87 |         return await self.find_one(query)
 88 | 
 89 |     async def get_default_project(self) -> Optional[Project]:
 90 |         """Get the default project (the one marked as is_default=True)."""
 91 |         query = self.select().where(Project.is_default.is_not(None))
 92 |         return await self.find_one(query)
 93 | 
 94 |     async def get_active_projects(self) -> Sequence[Project]:
 95 |         """Get all active projects."""
 96 |         query = self.select().where(Project.is_active == True)  # noqa: E712
 97 |         result = await self.execute_query(query)
 98 |         return list(result.scalars().all())
 99 | 
100 |     async def set_as_default(self, project_id: int) -> Optional[Project]:
101 |         """Set a project as the default and unset previous default.
102 | 
103 |         Args:
104 |             project_id: ID of the project to set as default
105 | 
106 |         Returns:
107 |             The updated project if found, None otherwise
108 |         """
109 |         async with db.scoped_session(self.session_maker) as session:
110 |             # First, clear the default flag for all projects using direct SQL
111 |             await session.execute(
112 |                 text("UPDATE project SET is_default = NULL WHERE is_default IS NOT NULL")
113 |             )
114 |             await session.flush()
115 | 
116 |             # Set the new default project
117 |             target_project = await self.select_by_id(session, project_id)
118 |             if target_project:
119 |                 target_project.is_default = True
120 |                 await session.flush()
121 |                 return target_project
122 |             return None  # pragma: no cover
123 | 
124 |     async def update_path(self, project_id: int, new_path: str) -> Optional[Project]:
125 |         """Update project path.
126 | 
127 |         Args:
128 |             project_id: ID of the project to update
129 |             new_path: New filesystem path for the project
130 | 
131 |         Returns:
132 |             The updated project if found, None otherwise
133 |         """
134 |         async with db.scoped_session(self.session_maker) as session:
135 |             project = await self.select_by_id(session, project_id)
136 |             if project:
137 |                 project.path = new_path
138 |                 await session.flush()
139 |                 return project
140 |             return None
141 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/sync/coordinator.py:
--------------------------------------------------------------------------------

```python
  1 | """SyncCoordinator - centralized sync/watch lifecycle management.
  2 | 
  3 | This module provides a single coordinator that manages the lifecycle of
  4 | file synchronization and watch services across all entry points (API, MCP, CLI).
  5 | 
  6 | The coordinator handles:
  7 | - Starting/stopping watch service
  8 | - Scheduling background sync
  9 | - Reporting status
 10 | - Clean shutdown behavior
 11 | """
 12 | 
 13 | import asyncio
 14 | from dataclasses import dataclass, field
 15 | from enum import Enum, auto
 16 | from typing import Optional
 17 | 
 18 | from loguru import logger
 19 | 
 20 | from basic_memory.config import BasicMemoryConfig
 21 | 
 22 | 
 23 | class SyncStatus(Enum):
 24 |     """Status of the sync coordinator."""
 25 | 
 26 |     NOT_STARTED = auto()
 27 |     STARTING = auto()
 28 |     RUNNING = auto()
 29 |     STOPPING = auto()
 30 |     STOPPED = auto()
 31 |     ERROR = auto()
 32 | 
 33 | 
 34 | @dataclass
 35 | class SyncCoordinator:
 36 |     """Centralized coordinator for sync/watch lifecycle.
 37 | 
 38 |     Manages the lifecycle of file synchronization services, providing:
 39 |     - Unified start/stop interface
 40 |     - Status tracking
 41 |     - Clean shutdown with proper task cancellation
 42 | 
 43 |     Args:
 44 |         config: BasicMemoryConfig with sync settings
 45 |         should_sync: Whether sync should be enabled (from container decision)
 46 |         skip_reason: Human-readable reason if sync is skipped
 47 | 
 48 |     Usage:
 49 |         coordinator = SyncCoordinator(config=config, should_sync=True)
 50 |         await coordinator.start()
 51 |         # ... application runs ...
 52 |         await coordinator.stop()
 53 |     """
 54 | 
 55 |     config: BasicMemoryConfig
 56 |     should_sync: bool = True
 57 |     skip_reason: Optional[str] = None
 58 | 
 59 |     # Internal state (not constructor args)
 60 |     _status: SyncStatus = field(default=SyncStatus.NOT_STARTED, init=False)
 61 |     _sync_task: Optional[asyncio.Task] = field(default=None, init=False)
 62 | 
 63 |     @property
 64 |     def status(self) -> SyncStatus:
 65 |         """Current status of the coordinator."""
 66 |         return self._status
 67 | 
 68 |     @property
 69 |     def is_running(self) -> bool:
 70 |         """Whether sync is currently running."""
 71 |         return self._status == SyncStatus.RUNNING
 72 | 
 73 |     async def start(self) -> None:
 74 |         """Start the sync/watch service if enabled.
 75 | 
 76 |         This is a non-blocking call that starts the sync task in the background.
 77 |         Use stop() to cleanly shut down.
 78 |         """
 79 |         if not self.should_sync:
 80 |             if self.skip_reason:
 81 |                 logger.info(f"{self.skip_reason} - skipping local file sync")
 82 |             self._status = SyncStatus.STOPPED
 83 |             return
 84 | 
 85 |         if self._status in (SyncStatus.RUNNING, SyncStatus.STARTING):
 86 |             logger.warning("Sync coordinator already running or starting")
 87 |             return
 88 | 
 89 |         self._status = SyncStatus.STARTING
 90 |         logger.info("Starting file sync in background")
 91 | 
 92 |         try:
 93 |             # Deferred import to avoid circular dependency
 94 |             from basic_memory.services.initialization import initialize_file_sync
 95 | 
 96 |             async def _file_sync_runner() -> None:  # pragma: no cover
 97 |                 """Run the file sync service."""
 98 |                 try:
 99 |                     await initialize_file_sync(self.config)
100 |                 except asyncio.CancelledError:
101 |                     logger.debug("File sync cancelled")
102 |                     raise
103 |                 except Exception as e:
104 |                     logger.error(f"Error in file sync: {e}")
105 |                     self._status = SyncStatus.ERROR
106 |                     raise
107 | 
108 |             self._sync_task = asyncio.create_task(_file_sync_runner())
109 |             self._status = SyncStatus.RUNNING
110 |             logger.info("Sync coordinator started successfully")
111 | 
112 |         except Exception as e:  # pragma: no cover
113 |             logger.error(f"Failed to start sync coordinator: {e}")
114 |             self._status = SyncStatus.ERROR
115 |             raise
116 | 
117 |     async def stop(self) -> None:
118 |         """Stop the sync/watch service cleanly.
119 | 
120 |         Cancels the background task and waits for it to complete.
121 |         Safe to call even if not running.
122 |         """
123 |         if self._status in (SyncStatus.NOT_STARTED, SyncStatus.STOPPED):
124 |             return
125 | 
126 |         if self._sync_task is None:  # pragma: no cover
127 |             self._status = SyncStatus.STOPPED
128 |             return
129 | 
130 |         self._status = SyncStatus.STOPPING
131 |         logger.info("Stopping sync coordinator...")
132 | 
133 |         self._sync_task.cancel()
134 |         try:
135 |             await self._sync_task
136 |         except asyncio.CancelledError:
137 |             logger.info("File sync task cancelled successfully")
138 | 
139 |         self._sync_task = None
140 |         self._status = SyncStatus.STOPPED
141 |         logger.info("Sync coordinator stopped")
142 | 
143 |     def get_status_info(self) -> dict:
144 |         """Get status information for reporting.
145 | 
146 |         Returns:
147 |             Dictionary with status details for diagnostics
148 |         """
149 |         return {
150 |             "status": self._status.name,
151 |             "should_sync": self.should_sync,
152 |             "skip_reason": self.skip_reason,
153 |             "has_task": self._sync_task is not None,
154 |         }
155 | 
156 | 
157 | __all__ = [
158 |     "SyncCoordinator",
159 |     "SyncStatus",
160 | ]
161 | 
```

--------------------------------------------------------------------------------
/tests/api/test_prompt_router.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for the prompt router endpoints."""
  2 | 
  3 | import pytest
  4 | import pytest_asyncio
  5 | from httpx import AsyncClient
  6 | 
  7 | from basic_memory.services.context_service import ContextService
  8 | 
  9 | 
 10 | @pytest_asyncio.fixture
 11 | async def context_service(entity_repository, search_service, observation_repository):
 12 |     """Create a real context service for testing."""
 13 |     return ContextService(entity_repository, search_service, observation_repository)
 14 | 
 15 | 
 16 | @pytest.mark.asyncio
 17 | async def test_continue_conversation_endpoint(
 18 |     client: AsyncClient,
 19 |     entity_service,
 20 |     search_service,
 21 |     context_service,
 22 |     entity_repository,
 23 |     test_graph,
 24 |     project_url,
 25 | ):
 26 |     """Test the continue_conversation endpoint with real services."""
 27 |     # Create request data
 28 |     request_data = {
 29 |         "topic": "Root",  # This should match our test entity in test_graph
 30 |         "timeframe": "7d",
 31 |         "depth": 1,
 32 |         "related_items_limit": 2,
 33 |     }
 34 | 
 35 |     # Call the endpoint
 36 |     response = await client.post(f"{project_url}/prompt/continue-conversation", json=request_data)
 37 | 
 38 |     # Verify response
 39 |     assert response.status_code == 200
 40 |     result = response.json()
 41 |     assert "prompt" in result
 42 |     assert "context" in result
 43 | 
 44 |     # Check content of context
 45 |     context = result["context"]
 46 |     assert context["topic"] == "Root"
 47 |     assert context["timeframe"] == "7d"
 48 |     assert context["has_results"] is True
 49 |     assert len(context["hierarchical_results"]) > 0
 50 | 
 51 |     # Check content of prompt
 52 |     prompt = result["prompt"]
 53 |     assert "Continuing conversation on: Root" in prompt
 54 |     assert "memory retrieval session" in prompt
 55 | 
 56 |     # Test without topic - should use recent activity
 57 |     request_data = {"timeframe": "1d", "depth": 1, "related_items_limit": 2}
 58 | 
 59 |     response = await client.post(f"{project_url}/prompt/continue-conversation", json=request_data)
 60 | 
 61 |     assert response.status_code == 200
 62 |     result = response.json()
 63 |     assert "Recent Activity" in result["context"]["topic"]
 64 | 
 65 | 
 66 | @pytest.mark.asyncio
 67 | async def test_search_prompt_endpoint(
 68 |     client: AsyncClient, entity_service, search_service, test_graph, project_url
 69 | ):
 70 |     """Test the search_prompt endpoint with real services."""
 71 |     # Create request data
 72 |     request_data = {
 73 |         "query": "Root",  # This should match our test entity
 74 |         "timeframe": "7d",
 75 |     }
 76 | 
 77 |     # Call the endpoint
 78 |     response = await client.post(f"{project_url}/prompt/search", json=request_data)
 79 | 
 80 |     # Verify response
 81 |     assert response.status_code == 200
 82 |     result = response.json()
 83 |     assert "prompt" in result
 84 |     assert "context" in result
 85 | 
 86 |     # Check content of context
 87 |     context = result["context"]
 88 |     assert context["query"] == "Root"
 89 |     assert context["timeframe"] == "7d"
 90 |     assert context["has_results"] is True
 91 |     assert len(context["results"]) > 0
 92 | 
 93 |     # Check content of prompt
 94 |     prompt = result["prompt"]
 95 |     assert 'Search Results for: "Root"' in prompt
 96 |     assert "This is a memory search session" in prompt
 97 | 
 98 | 
 99 | @pytest.mark.asyncio
100 | async def test_search_prompt_no_results(
101 |     client: AsyncClient, entity_service, search_service, project_url
102 | ):
103 |     """Test the search_prompt endpoint with a query that returns no results."""
104 |     # Create request data with a query that shouldn't match anything
105 |     request_data = {"query": "NonExistentQuery12345", "timeframe": "7d"}
106 | 
107 |     # Call the endpoint
108 |     response = await client.post(f"{project_url}/prompt/search", json=request_data)
109 | 
110 |     # Verify response
111 |     assert response.status_code == 200
112 |     result = response.json()
113 | 
114 |     # Check content of context
115 |     context = result["context"]
116 |     assert context["query"] == "NonExistentQuery12345"
117 |     assert context["has_results"] is False
118 |     assert len(context["results"]) == 0
119 | 
120 |     # Check content of prompt
121 |     prompt = result["prompt"]
122 |     assert 'Search Results for: "NonExistentQuery12345"' in prompt
123 |     assert "I couldn't find any results for this query" in prompt
124 |     assert "Opportunity to Capture Knowledge" in prompt
125 | 
126 | 
127 | @pytest.mark.asyncio
128 | async def test_error_handling(client: AsyncClient, monkeypatch, project_url):
129 |     """Test error handling in the endpoints by breaking the template loader."""
130 | 
131 |     # Patch the template loader to raise an exception
132 |     def mock_render(*args, **kwargs):
133 |         raise Exception("Template error")
134 | 
135 |     # Apply the patch
136 |     monkeypatch.setattr("basic_memory.api.template_loader.TemplateLoader.render", mock_render)
137 | 
138 |     # Test continue_conversation error handling
139 |     response = await client.post(
140 |         f"{project_url}/prompt/continue-conversation",
141 |         json={"topic": "test error", "timeframe": "7d"},
142 |     )
143 | 
144 |     assert response.status_code == 500
145 |     assert "detail" in response.json()
146 |     assert "Template error" in response.json()["detail"]
147 | 
148 |     # Test search_prompt error handling
149 |     response = await client.post(
150 |         f"{project_url}/prompt/search", json={"query": "test error", "timeframe": "7d"}
151 |     )
152 | 
153 |     assert response.status_code == 500
154 |     assert "detail" in response.json()
155 |     assert "Template error" in response.json()["detail"]
156 | 
```

--------------------------------------------------------------------------------
/tests/cli/test_import_memory_json.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for import_memory_json command."""
  2 | 
  3 | import json
  4 | 
  5 | import pytest
  6 | from typer.testing import CliRunner
  7 | 
  8 | from basic_memory.cli.app import import_app
  9 | from basic_memory.cli.commands import import_memory_json  # noqa
 10 | from basic_memory.markdown import MarkdownProcessor
 11 | from basic_memory.services.file_service import FileService
 12 | 
 13 | # Set up CLI runner
 14 | runner = CliRunner()
 15 | 
 16 | 
 17 | @pytest.fixture
 18 | def sample_entities():
 19 |     """Sample entities for testing."""
 20 |     return [
 21 |         {
 22 |             "type": "entity",
 23 |             "name": "test_entity",
 24 |             "entityType": "test",
 25 |             "observations": ["Test observation 1", "Test observation 2"],
 26 |         },
 27 |         {
 28 |             "type": "relation",
 29 |             "from": "test_entity",
 30 |             "to": "related_entity",
 31 |             "relationType": "test_relation",
 32 |         },
 33 |     ]
 34 | 
 35 | 
 36 | @pytest.fixture
 37 | def sample_json_file(tmp_path, sample_entities):
 38 |     """Create a sample memory.json file."""
 39 |     json_file = tmp_path / "memory.json"
 40 |     with open(json_file, "w", encoding="utf-8") as f:
 41 |         for entity in sample_entities:
 42 |             f.write(json.dumps(entity) + "\n")
 43 |     return json_file
 44 | 
 45 | 
 46 | @pytest.mark.asyncio
 47 | async def test_get_importer_dependencies(tmp_path, monkeypatch):
 48 |     """Test getting importer dependencies (MarkdownProcessor and FileService)."""
 49 |     monkeypatch.setenv("HOME", str(tmp_path))
 50 |     processor, file_service = await import_memory_json.get_importer_dependencies()
 51 |     assert isinstance(processor, MarkdownProcessor)
 52 |     assert isinstance(file_service, FileService)
 53 | 
 54 | 
 55 | def test_import_json_command_file_not_found(tmp_path):
 56 |     """Test error handling for nonexistent file."""
 57 |     nonexistent = tmp_path / "nonexistent.json"
 58 |     result = runner.invoke(import_app, ["memory-json", str(nonexistent)])
 59 |     assert result.exit_code == 1
 60 |     assert "File not found" in result.output
 61 | 
 62 | 
 63 | def test_import_json_command_success(tmp_path, sample_json_file, monkeypatch):
 64 |     """Test successful JSON import via command."""
 65 |     # Set up test environment
 66 |     monkeypatch.setenv("HOME", str(tmp_path))
 67 | 
 68 |     # Run import
 69 |     result = runner.invoke(import_app, ["memory-json", str(sample_json_file)])
 70 |     assert result.exit_code == 0
 71 |     assert "Import complete" in result.output
 72 |     assert "Created 1 entities" in result.output
 73 |     assert "Added 1 relations" in result.output
 74 | 
 75 | 
 76 | def test_import_json_command_invalid_json(tmp_path):
 77 |     """Test error handling for invalid JSON."""
 78 |     # Create invalid JSON file
 79 |     invalid_file = tmp_path / "invalid.json"
 80 |     invalid_file.write_text("not json")
 81 | 
 82 |     result = runner.invoke(import_app, ["memory-json", str(invalid_file)])
 83 |     assert result.exit_code == 1
 84 |     assert "Error during import" in result.output
 85 | 
 86 | 
 87 | def test_import_json_command_handle_old_format(tmp_path):
 88 |     """Test handling old format JSON with from_id/to_id."""
 89 |     # Create JSON with old format
 90 |     old_format = [
 91 |         {
 92 |             "type": "entity",
 93 |             "name": "test_entity",
 94 |             "entityType": "test",
 95 |             "observations": ["Test observation"],
 96 |         },
 97 |         {
 98 |             "type": "relation",
 99 |             "from_id": "test_entity",
100 |             "to_id": "other_entity",
101 |             "relation_type": "test_relation",
102 |         },
103 |     ]
104 | 
105 |     json_file = tmp_path / "old_format.json"
106 |     with open(json_file, "w", encoding="utf-8") as f:
107 |         for item in old_format:
108 |             f.write(json.dumps(item) + "\n")
109 | 
110 |     # Set up test environment
111 |     monkeypatch = pytest.MonkeyPatch()
112 |     monkeypatch.setenv("HOME", str(tmp_path))
113 | 
114 |     # Run import
115 |     result = runner.invoke(import_app, ["memory-json", str(json_file)])
116 |     assert result.exit_code == 0
117 |     assert "Import complete" in result.output
118 | 
119 | 
120 | def test_import_json_command_missing_name_key(tmp_path):
121 |     """Test handling JSON with missing 'name' key using 'id' instead."""
122 |     # Create JSON with id instead of name (common in Knowledge Graph Memory Server)
123 |     data_with_id = [
124 |         {
125 |             "type": "entity",
126 |             "id": "test_entity_id",
127 |             "entityType": "test",
128 |             "observations": ["Test observation with id"],
129 |         },
130 |         {
131 |             "type": "entity",
132 |             "entityName": "test_entity_2",
133 |             "entityType": "test",
134 |             "observations": ["Test observation with entityName"],
135 |         },
136 |         {
137 |             "type": "entity",
138 |             "name": "test_entity_title",
139 |             "entityType": "test",
140 |             "observations": ["Test observation with name"],
141 |         },
142 |     ]
143 | 
144 |     json_file = tmp_path / "missing_name.json"
145 |     with open(json_file, "w", encoding="utf-8") as f:
146 |         for item in data_with_id:
147 |             f.write(json.dumps(item) + "\n")
148 | 
149 |     # Set up test environment
150 |     monkeypatch = pytest.MonkeyPatch()
151 |     monkeypatch.setenv("HOME", str(tmp_path))
152 | 
153 |     # Run import - should not fail even without 'name' key
154 |     result = runner.invoke(import_app, ["memory-json", str(json_file)])
155 |     assert result.exit_code == 0
156 |     assert "Import complete" in result.output
157 |     assert "Created 3 entities" in result.output
158 | 
```

--------------------------------------------------------------------------------
/tests/markdown/test_parser_edge_cases.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for markdown parser edge cases."""
  2 | 
  3 | from pathlib import Path
  4 | from textwrap import dedent
  5 | 
  6 | import pytest
  7 | 
  8 | from basic_memory.markdown.entity_parser import EntityParser
  9 | 
 10 | 
 11 | @pytest.mark.asyncio
 12 | async def test_unicode_content(tmp_path):
 13 |     """Test handling of Unicode content including emoji and non-Latin scripts."""
 14 |     content = dedent("""
 15 |         ---
 16 |         type: test
 17 |         id: test/unicode
 18 |         created: 2024-12-21T14:00:00Z
 19 |         modified: 2024-12-21T14:00:00Z
 20 |         tags: [unicode, 测试]
 21 |         ---
 22 |         
 23 |         # Unicode Test 🧪
 24 |         
 25 |         ## Observations
 26 |         - [test] Emoji test 👍 #emoji #test (Testing emoji)
 27 |         - [中文] Chinese text 测试 #language (Script test)
 28 |         - [русский] Russian привет #language (More scripts)
 29 |         - [note] Emoji in text 😀 #meta (Category test)
 30 |         
 31 |         ## Relations
 32 |         - tested_by [[测试组件]] (Unicode test)
 33 |         - depends_on [[компонент]] (Another test)
 34 |         """)
 35 | 
 36 |     test_file = tmp_path / "unicode.md"
 37 |     test_file.write_text(content, encoding="utf-8")
 38 | 
 39 |     parser = EntityParser(tmp_path)
 40 |     entity = await parser.parse_file(test_file)
 41 | 
 42 |     assert "测试" in entity.frontmatter.metadata["tags"]
 43 |     assert "chinese" not in entity.frontmatter.metadata["tags"]
 44 |     assert "🧪" in entity.content
 45 | 
 46 |     # Verify Unicode in observations
 47 |     assert any(o.content == "Emoji test 👍 #emoji #test" for o in entity.observations)
 48 |     assert any(o.category == "中文" for o in entity.observations)
 49 |     assert any(o.category == "русский" for o in entity.observations)
 50 | 
 51 |     # Verify Unicode in relations
 52 |     assert any(r.target == "测试组件" for r in entity.relations)
 53 |     assert any(r.target == "компонент" for r in entity.relations)
 54 | 
 55 | 
 56 | @pytest.mark.asyncio
 57 | async def test_empty_file(tmp_path):
 58 |     """Test handling of empty files."""
 59 |     empty_file = tmp_path / "empty.md"
 60 |     empty_file.write_text("")
 61 | 
 62 |     parser = EntityParser(tmp_path)
 63 |     entity = await parser.parse_file(empty_file)
 64 |     assert entity.observations == []
 65 |     assert entity.relations == []
 66 | 
 67 | 
 68 | @pytest.mark.asyncio
 69 | async def test_missing_sections(tmp_path):
 70 |     """Test handling of files with missing sections."""
 71 |     content = dedent("""
 72 |         ---
 73 |         type: test
 74 |         id: test/missing
 75 |         created: 2024-01-09
 76 |         modified: 2024-01-09
 77 |         tags: []
 78 |         ---
 79 |         
 80 |         Just some content
 81 |         with [[links]] but no sections
 82 |         """)
 83 | 
 84 |     test_file = tmp_path / "missing.md"
 85 |     test_file.write_text(content)
 86 | 
 87 |     parser = EntityParser(tmp_path)
 88 |     entity = await parser.parse_file(test_file)
 89 |     assert len(entity.relations) == 1
 90 |     assert entity.relations[0].target == "links"
 91 |     assert entity.relations[0].type == "links_to"
 92 | 
 93 | 
 94 | @pytest.mark.asyncio
 95 | async def test_tasks_are_not_observations(tmp_path):
 96 |     """Test handling of plain observations without categories."""
 97 |     content = dedent("""
 98 |         ---
 99 |         type: test
100 |         id: test/missing
101 |         created: 2024-01-09
102 |         modified: 2024-01-09
103 |         tags: []
104 |         ---
105 | 
106 |         - [ ] one
107 |         -[ ] two
108 |         - [x] done
109 |         - [-] not done
110 |         """)
111 | 
112 |     test_file = tmp_path / "missing.md"
113 |     test_file.write_text(content)
114 | 
115 |     parser = EntityParser(tmp_path)
116 |     entity = await parser.parse_file(test_file)
117 |     assert len(entity.observations) == 0
118 | 
119 | 
120 | @pytest.mark.asyncio
121 | async def test_nested_content(tmp_path):
122 |     """Test handling of deeply nested content."""
123 |     content = dedent("""
124 |         ---
125 |         type: test
126 |         id: test/nested
127 |         created: 2024-01-09
128 |         modified: 2024-01-09
129 |         tags: []
130 |         ---
131 |         
132 |         # Test
133 |         
134 |         ## Level 1
135 |         - [test] Level 1 #test (First level)
136 |         - implements [[One]]
137 |             
138 |             ### Level 2
139 |             - [test] Level 2 #test (Second level)
140 |             - uses [[Two]]
141 |                 
142 |                 #### Level 3
143 |                 - [test] Level 3 #test (Third level)
144 |                 - needs [[Three]]
145 |         """)
146 | 
147 |     test_file = tmp_path / "nested.md"
148 |     test_file.write_text(content)
149 | 
150 |     parser = EntityParser(tmp_path)
151 |     entity = await parser.parse_file(test_file)
152 | 
153 |     # Should find all observations and relations regardless of nesting
154 |     assert len(entity.observations) == 3
155 |     assert len(entity.relations) == 3
156 |     assert {r.target for r in entity.relations} == {"One", "Two", "Three"}
157 | 
158 | 
159 | @pytest.mark.asyncio
160 | async def test_malformed_frontmatter(tmp_path):
161 |     """Test handling of malformed frontmatter."""
162 |     # Missing fields
163 |     content = dedent("""
164 |         ---
165 |         type: test
166 |         ---
167 |         
168 |         # Test
169 |         """)
170 | 
171 |     test_file = tmp_path / "malformed.md"
172 |     test_file.write_text(content)
173 | 
174 |     parser = EntityParser(tmp_path)
175 |     entity = await parser.parse_file(test_file)
176 |     assert entity.frontmatter.permalink is None
177 | 
178 | 
179 | @pytest.mark.asyncio
180 | async def test_file_not_found():
181 |     """Test handling of non-existent files."""
182 |     parser = EntityParser(Path("/tmp"))
183 |     with pytest.raises(FileNotFoundError):
184 |         await parser.parse_file(Path("nonexistent.md"))
185 | 
```

--------------------------------------------------------------------------------
/tests/mcp/test_tool_build_context.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for discussion context MCP tool."""
  2 | 
  3 | import pytest
  4 | from datetime import datetime
  5 | 
  6 | from mcp.server.fastmcp.exceptions import ToolError
  7 | 
  8 | from basic_memory.mcp.tools import build_context
  9 | from basic_memory.schemas.memory import (
 10 |     GraphContext,
 11 | )
 12 | 
 13 | 
 14 | @pytest.mark.asyncio
 15 | async def test_get_basic_discussion_context(client, test_graph, test_project):
 16 |     """Test getting basic discussion context."""
 17 |     context = await build_context.fn(project=test_project.name, url="memory://test/root")
 18 | 
 19 |     assert isinstance(context, GraphContext)
 20 |     assert len(context.results) == 1
 21 |     assert context.results[0].primary_result.permalink == "test/root"
 22 |     assert len(context.results[0].related_results) > 0
 23 | 
 24 |     # Verify metadata
 25 |     assert context.metadata.uri == "test/root"
 26 |     assert context.metadata.depth == 1  # default depth
 27 |     assert context.metadata.timeframe is not None
 28 |     assert isinstance(context.metadata.generated_at, datetime)
 29 |     assert context.metadata.primary_count == 1
 30 |     if context.metadata.related_count:
 31 |         assert context.metadata.related_count > 0
 32 | 
 33 | 
 34 | @pytest.mark.asyncio
 35 | async def test_get_discussion_context_pattern(client, test_graph, test_project):
 36 |     """Test getting context with pattern matching."""
 37 |     context = await build_context.fn(project=test_project.name, url="memory://test/*", depth=1)
 38 | 
 39 |     assert isinstance(context, GraphContext)
 40 |     assert len(context.results) > 1  # Should match multiple test/* paths
 41 |     assert all("test/" in item.primary_result.permalink for item in context.results)  # pyright: ignore [reportOperatorIssue]
 42 |     assert context.metadata.depth == 1
 43 | 
 44 | 
 45 | @pytest.mark.asyncio
 46 | async def test_get_discussion_context_timeframe(client, test_graph, test_project):
 47 |     """Test timeframe parameter filtering."""
 48 |     # Get recent context
 49 |     recent_context = await build_context.fn(
 50 |         project=test_project.name,
 51 |         url="memory://test/root",
 52 |         timeframe="1d",  # Last 24 hours
 53 |     )
 54 | 
 55 |     # Get older context
 56 |     older_context = await build_context.fn(
 57 |         project=test_project.name,
 58 |         url="memory://test/root",
 59 |         timeframe="30d",  # Last 30 days
 60 |     )
 61 | 
 62 |     # Calculate total related items
 63 |     total_recent_related = (
 64 |         sum(len(item.related_results) for item in recent_context.results)
 65 |         if recent_context.results
 66 |         else 0
 67 |     )
 68 |     total_older_related = (
 69 |         sum(len(item.related_results) for item in older_context.results)
 70 |         if older_context.results
 71 |         else 0
 72 |     )
 73 | 
 74 |     assert total_older_related >= total_recent_related
 75 | 
 76 | 
 77 | @pytest.mark.asyncio
 78 | async def test_get_discussion_context_not_found(client, test_project):
 79 |     """Test handling of non-existent URIs."""
 80 |     context = await build_context.fn(project=test_project.name, url="memory://test/does-not-exist")
 81 | 
 82 |     assert isinstance(context, GraphContext)
 83 |     assert len(context.results) == 0
 84 |     assert context.metadata.primary_count == 0
 85 |     assert context.metadata.related_count == 0
 86 | 
 87 | 
 88 | # Test data for different timeframe formats
 89 | valid_timeframes = [
 90 |     "7d",  # Standard format
 91 |     "yesterday",  # Natural language
 92 |     "0d",  # Zero duration
 93 | ]
 94 | 
 95 | invalid_timeframes = [
 96 |     "invalid",  # Nonsense string
 97 |     # NOTE: "tomorrow" now returns 1 day ago due to timezone safety - no longer invalid
 98 | ]
 99 | 
100 | 
101 | @pytest.mark.asyncio
102 | async def test_build_context_timeframe_formats(client, test_graph, test_project):
103 |     """Test that build_context accepts various timeframe formats."""
104 |     test_url = "memory://specs/test"
105 | 
106 |     # Test each valid timeframe
107 |     for timeframe in valid_timeframes:
108 |         try:
109 |             result = await build_context.fn(
110 |                 project=test_project.name,
111 |                 url=test_url,
112 |                 timeframe=timeframe,
113 |                 page=1,
114 |                 page_size=10,
115 |                 max_related=10,
116 |             )
117 |             assert result is not None
118 |         except Exception as e:
119 |             pytest.fail(f"Failed with valid timeframe '{timeframe}': {str(e)}")
120 | 
121 |     # Test invalid timeframes should raise ValidationError
122 |     for timeframe in invalid_timeframes:
123 |         with pytest.raises(ToolError):
124 |             await build_context.fn(project=test_project.name, url=test_url, timeframe=timeframe)
125 | 
126 | 
127 | @pytest.mark.asyncio
128 | async def test_build_context_string_depth_parameter(client, test_graph, test_project):
129 |     """Test that build_context handles string depth parameter correctly."""
130 |     test_url = "memory://test/root"
131 | 
132 |     # Test valid string depth parameter - should either raise ToolError or convert to int
133 |     try:
134 |         result = await build_context.fn(url=test_url, depth="2", project=test_project.name)
135 |         # If it succeeds, verify the depth was converted to an integer
136 |         assert isinstance(result.metadata.depth, int)
137 |         assert result.metadata.depth == 2
138 |     except ToolError:
139 |         # This is also acceptable behavior - type validation should catch it
140 |         pass
141 | 
142 |     # Test invalid string depth parameter - should raise ToolError
143 |     with pytest.raises(ToolError):
144 |         await build_context.fn(test_url, depth="invalid", project=test_project.name)
145 | 
```

--------------------------------------------------------------------------------
/tests/api/test_continue_conversation_template.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for the continue_conversation template rendering."""
  2 | 
  3 | import datetime
  4 | import pytest
  5 | 
  6 | from basic_memory.api.template_loader import TemplateLoader
  7 | from basic_memory.schemas.memory import EntitySummary
  8 | from basic_memory.schemas.search import SearchItemType
  9 | 
 10 | 
 11 | @pytest.fixture
 12 | def template_loader():
 13 |     """Return a TemplateLoader instance for testing."""
 14 |     return TemplateLoader()
 15 | 
 16 | 
 17 | @pytest.fixture
 18 | def entity_summary():
 19 |     """Create a sample EntitySummary for testing."""
 20 |     return EntitySummary(
 21 |         entity_id=1,
 22 |         title="Test Entity",
 23 |         permalink="test/entity",
 24 |         type=SearchItemType.ENTITY,
 25 |         content="This is a test entity with some content.",
 26 |         file_path="/path/to/test/entity.md",
 27 |         created_at=datetime.datetime(2023, 1, 1, 12, 0),
 28 |     )
 29 | 
 30 | 
 31 | @pytest.fixture
 32 | def context_with_results(entity_summary):
 33 |     """Create a sample context with results for testing."""
 34 |     from basic_memory.schemas.memory import ObservationSummary, ContextResult
 35 | 
 36 |     # Create an observation for the entity
 37 |     observation = ObservationSummary(
 38 |         observation_id=1,
 39 |         entity_id=1,
 40 |         title="Test Observation",
 41 |         permalink="test/entity/observations/1",
 42 |         category="test",
 43 |         content="This is a test observation.",
 44 |         file_path="/path/to/test/entity.md",
 45 |         created_at=datetime.datetime(2023, 1, 1, 12, 0),
 46 |     )
 47 | 
 48 |     # Create a context result with primary_result, observations, and related_results
 49 |     context_item = ContextResult(
 50 |         primary_result=entity_summary,
 51 |         observations=[observation],
 52 |         related_results=[entity_summary],
 53 |     )
 54 | 
 55 |     return {
 56 |         "topic": "Test Topic",
 57 |         "timeframe": "7d",
 58 |         "has_results": True,
 59 |         "hierarchical_results": [context_item],
 60 |     }
 61 | 
 62 | 
 63 | @pytest.fixture
 64 | def context_without_results():
 65 |     """Create a sample context without results for testing."""
 66 |     return {
 67 |         "topic": "Empty Topic",
 68 |         "timeframe": "1d",
 69 |         "has_results": False,
 70 |         "hierarchical_results": [],
 71 |     }
 72 | 
 73 | 
 74 | @pytest.mark.asyncio
 75 | async def test_continue_conversation_with_results(template_loader, context_with_results):
 76 |     """Test rendering the continue_conversation template with results."""
 77 |     result = await template_loader.render("prompts/continue_conversation.hbs", context_with_results)
 78 | 
 79 |     # Check that key elements are present
 80 |     assert "Continuing conversation on: Test Topic" in result
 81 |     assert "memory://test/entity" in result
 82 |     assert "Test Entity" in result
 83 |     assert "This is a test entity with some content." in result
 84 |     assert "Related Context" in result
 85 |     assert "read_note" in result
 86 |     assert "Next Steps" in result
 87 |     assert "Knowledge Capture Recommendation" in result
 88 | 
 89 | 
 90 | @pytest.mark.asyncio
 91 | async def test_continue_conversation_without_results(template_loader, context_without_results):
 92 |     """Test rendering the continue_conversation template without results."""
 93 |     result = await template_loader.render(
 94 |         "prompts/continue_conversation.hbs", context_without_results
 95 |     )
 96 | 
 97 |     # Check that key elements are present
 98 |     assert "Continuing conversation on: Empty Topic" in result
 99 |     assert "The supplied query did not return any information" in result
100 |     assert "Opportunity to Capture New Knowledge!" in result
101 |     assert 'title="Empty Topic"' in result
102 |     assert "Next Steps" in result
103 |     assert "Knowledge Capture Recommendation" in result
104 | 
105 | 
106 | @pytest.mark.asyncio
107 | async def test_next_steps_section(template_loader, context_with_results):
108 |     """Test that the next steps section is rendered correctly."""
109 |     result = await template_loader.render("prompts/continue_conversation.hbs", context_with_results)
110 | 
111 |     assert "Next Steps" in result
112 |     assert 'Explore more with: `search_notes("Test Topic")`' in result
113 |     assert (
114 |         f'See what\'s changed: `recent_activity(timeframe="{context_with_results["timeframe"]}")`'
115 |         in result
116 |     )
117 |     assert "Record new learnings or decisions from this conversation" in result
118 | 
119 | 
120 | @pytest.mark.asyncio
121 | async def test_knowledge_capture_recommendation(template_loader, context_with_results):
122 |     """Test that the knowledge capture recommendation is rendered."""
123 |     result = await template_loader.render("prompts/continue_conversation.hbs", context_with_results)
124 | 
125 |     assert "Knowledge Capture Recommendation" in result
126 |     assert "actively look for opportunities to:" in result
127 |     assert "Record key information, decisions, or insights" in result
128 |     assert "Link new knowledge to existing topics" in result
129 |     assert "Suggest capturing important context" in result
130 |     assert "one of the most valuable aspects of Basic Memory" in result
131 | 
132 | 
133 | @pytest.mark.asyncio
134 | async def test_timeframe_default_value(template_loader, context_with_results):
135 |     """Test that the timeframe uses the default value when not provided."""
136 |     # Remove the timeframe from the context
137 |     context_without_timeframe = context_with_results.copy()
138 |     context_without_timeframe["timeframe"] = None
139 | 
140 |     result = await template_loader.render(
141 |         "prompts/continue_conversation.hbs", context_without_timeframe
142 |     )
143 | 
144 |     # Check that the default value is used
145 |     assert 'recent_activity(timeframe="7d")' in result
146 | 
```

--------------------------------------------------------------------------------
/tests/repository/test_entity_upsert_issue_187.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for issue #187 - UNIQUE constraint violation on file_path during sync."""
  2 | 
  3 | import pytest
  4 | from datetime import datetime, timezone
  5 | 
  6 | from basic_memory.models.knowledge import Entity, Observation
  7 | from basic_memory.repository.entity_repository import EntityRepository
  8 | 
  9 | 
 10 | @pytest.mark.asyncio
 11 | async def test_upsert_entity_with_observations_conflict(entity_repository: EntityRepository):
 12 |     """Test upserting an entity that already exists with observations.
 13 | 
 14 |     This reproduces issue #187 where sync fails with UNIQUE constraint violations
 15 |     when trying to update entities that already exist with observations.
 16 |     """
 17 |     # Create initial entity with observations
 18 |     entity1 = Entity(
 19 |         project_id=entity_repository.project_id,
 20 |         title="Original Title",
 21 |         entity_type="note",
 22 |         permalink="debugging/backup-system/coderabbit-feedback-resolution",
 23 |         file_path="debugging/backup-system/CodeRabbit Feedback Resolution - Backup System Issues.md",
 24 |         content_type="text/markdown",
 25 |         created_at=datetime.now(timezone.utc),
 26 |         updated_at=datetime.now(timezone.utc),
 27 |     )
 28 | 
 29 |     # Add observations to the entity
 30 |     obs1 = Observation(
 31 |         project_id=entity_repository.project_id,
 32 |         content="This is a test observation",
 33 |         category="testing",
 34 |         tags=["test"],
 35 |     )
 36 |     entity1.observations.append(obs1)
 37 | 
 38 |     result1 = await entity_repository.upsert_entity(entity1)
 39 |     original_id = result1.id
 40 | 
 41 |     # Verify entity was created with observations
 42 |     assert result1.id is not None
 43 |     assert len(result1.observations) == 1
 44 | 
 45 |     # Now try to upsert the same file_path with different content/observations
 46 |     # This simulates a file being modified and re-synced
 47 |     entity2 = Entity(
 48 |         project_id=entity_repository.project_id,
 49 |         title="Updated Title",
 50 |         entity_type="note",
 51 |         permalink="debugging/backup-system/coderabbit-feedback-resolution",  # Same permalink
 52 |         file_path="debugging/backup-system/CodeRabbit Feedback Resolution - Backup System Issues.md",  # Same file_path
 53 |         content_type="text/markdown",
 54 |         created_at=datetime.now(timezone.utc),
 55 |         updated_at=datetime.now(timezone.utc),
 56 |     )
 57 | 
 58 |     # Add different observations
 59 |     obs2 = Observation(
 60 |         project_id=entity_repository.project_id,
 61 |         content="This is an updated observation",
 62 |         category="updated",
 63 |         tags=["updated"],
 64 |     )
 65 |     obs3 = Observation(
 66 |         project_id=entity_repository.project_id,
 67 |         content="This is a second observation",
 68 |         category="second",
 69 |         tags=["second"],
 70 |     )
 71 |     entity2.observations.extend([obs2, obs3])
 72 | 
 73 |     # This should UPDATE the existing entity, not fail with IntegrityError
 74 |     result2 = await entity_repository.upsert_entity(entity2)
 75 | 
 76 |     # Should update existing entity (same ID)
 77 |     assert result2.id == original_id
 78 |     assert result2.title == "Updated Title"
 79 |     assert result2.file_path == entity1.file_path
 80 |     assert result2.permalink == entity1.permalink
 81 | 
 82 |     # Observations should be updated
 83 |     assert len(result2.observations) == 2
 84 |     assert result2.observations[0].content == "This is an updated observation"
 85 |     assert result2.observations[1].content == "This is a second observation"
 86 | 
 87 | 
 88 | @pytest.mark.asyncio
 89 | async def test_upsert_entity_repeated_sync_same_file(entity_repository: EntityRepository):
 90 |     """Test that syncing the same file multiple times doesn't cause IntegrityError.
 91 | 
 92 |     This tests the specific scenario from issue #187 where files are being
 93 |     synced repeatedly and hitting UNIQUE constraint violations.
 94 |     """
 95 |     file_path = "processes/Complete Process for Uploading New Training Videos.md"
 96 |     permalink = "processes/complete-process-for-uploading-new-training-videos"
 97 | 
 98 |     # Create initial entity
 99 |     entity1 = Entity(
100 |         project_id=entity_repository.project_id,
101 |         title="Complete Process for Uploading New Training Videos",
102 |         entity_type="note",
103 |         permalink=permalink,
104 |         file_path=file_path,
105 |         content_type="text/markdown",
106 |         checksum="abc123",
107 |         created_at=datetime.now(timezone.utc),
108 |         updated_at=datetime.now(timezone.utc),
109 |     )
110 | 
111 |     result1 = await entity_repository.upsert_entity(entity1)
112 |     first_id = result1.id
113 | 
114 |     # Simulate multiple sync attempts (like the infinite retry loop in the issue)
115 |     for i in range(5):
116 |         entity_new = Entity(
117 |             project_id=entity_repository.project_id,
118 |             title="Complete Process for Uploading New Training Videos",
119 |             entity_type="note",
120 |             permalink=permalink,
121 |             file_path=file_path,
122 |             content_type="text/markdown",
123 |             checksum=f"def{456 + i}",  # Different checksum each time
124 |             created_at=datetime.now(timezone.utc),
125 |             updated_at=datetime.now(timezone.utc),
126 |         )
127 | 
128 |         # Each upsert should succeed and update the existing entity
129 |         result = await entity_repository.upsert_entity(entity_new)
130 | 
131 |         # Should always return the same entity (updated)
132 |         assert result.id == first_id
133 |         assert result.checksum == entity_new.checksum
134 |         assert result.file_path == file_path
135 |         assert result.permalink == permalink
136 | 
```

--------------------------------------------------------------------------------
/tests/api/test_search_template.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for the search template rendering."""
  2 | 
  3 | import datetime
  4 | import pytest
  5 | 
  6 | from basic_memory.api.template_loader import TemplateLoader
  7 | from basic_memory.schemas.search import SearchItemType, SearchResult
  8 | 
  9 | 
 10 | @pytest.fixture
 11 | def template_loader():
 12 |     """Return a TemplateLoader instance for testing."""
 13 |     return TemplateLoader()
 14 | 
 15 | 
 16 | @pytest.fixture
 17 | def search_result():
 18 |     """Create a sample SearchResult for testing."""
 19 |     return SearchResult(
 20 |         title="Test Search Result",
 21 |         type=SearchItemType.ENTITY,
 22 |         permalink="test/search-result",
 23 |         score=0.95,
 24 |         content="This is a test search result with some content.",
 25 |         file_path="/path/to/test/search-result.md",
 26 |         metadata={"created_at": datetime.datetime(2023, 2, 1, 12, 0)},
 27 |     )
 28 | 
 29 | 
 30 | @pytest.fixture
 31 | def context_with_results(search_result):
 32 |     """Create a sample context with search results."""
 33 |     return {
 34 |         "query": "test query",
 35 |         "timeframe": "30d",
 36 |         "has_results": True,
 37 |         "result_count": 1,
 38 |         "results": [search_result],
 39 |     }
 40 | 
 41 | 
 42 | @pytest.fixture
 43 | def context_without_results():
 44 |     """Create a sample context without search results."""
 45 |     return {
 46 |         "query": "empty query",
 47 |         "timeframe": None,
 48 |         "has_results": False,
 49 |         "result_count": 0,
 50 |         "results": [],
 51 |     }
 52 | 
 53 | 
 54 | @pytest.mark.asyncio
 55 | async def test_search_with_results(template_loader, context_with_results):
 56 |     """Test rendering the search template with results."""
 57 |     result = await template_loader.render("prompts/search.hbs", context_with_results)
 58 | 
 59 |     # Check that key elements are present
 60 |     assert 'Search Results for: "test query" (after 30d)' in result
 61 |     assert "1.0. Test Search Result" in result
 62 |     assert "Type**: entity" in result
 63 |     assert "Relevance Score**: 0.95" in result
 64 |     assert "This is a test search result with some content." in result
 65 |     assert 'read_note("test/search-result")' in result
 66 |     assert "Next Steps" in result
 67 |     assert "Synthesize and Capture Knowledge" in result
 68 | 
 69 | 
 70 | @pytest.mark.asyncio
 71 | async def test_search_without_results(template_loader, context_without_results):
 72 |     """Test rendering the search template without results."""
 73 |     result = await template_loader.render("prompts/search.hbs", context_without_results)
 74 | 
 75 |     # Check that key elements are present
 76 |     assert 'Search Results for: "empty query"' in result
 77 |     assert "I couldn't find any results for this query." in result
 78 |     assert "Opportunity to Capture Knowledge!" in result
 79 |     assert "write_note(" in result
 80 |     assert 'title="Empty query"' in result
 81 |     assert "Other Suggestions" in result
 82 | 
 83 | 
 84 | @pytest.mark.asyncio
 85 | async def test_multiple_search_results(template_loader):
 86 |     """Test rendering the search template with multiple results."""
 87 |     # Create multiple search results
 88 |     results = []
 89 |     for i in range(1, 6):  # Create 5 results
 90 |         results.append(
 91 |             SearchResult(
 92 |                 title=f"Search Result {i}",
 93 |                 type=SearchItemType.ENTITY,
 94 |                 permalink=f"test/result-{i}",
 95 |                 score=1.0 - (i * 0.1),  # Decreasing scores
 96 |                 content=f"Content for result {i}",
 97 |                 file_path=f"/path/to/result-{i}.md",
 98 |                 metadata={},
 99 |             )
100 |         )
101 | 
102 |     context = {
103 |         "query": "multiple results",
104 |         "timeframe": None,
105 |         "has_results": True,
106 |         "result_count": len(results),
107 |         "results": results,
108 |     }
109 | 
110 |     result = await template_loader.render("prompts/search.hbs", context)
111 | 
112 |     # Check that all results are rendered
113 |     for i in range(1, 6):
114 |         assert f"{i}.0. Search Result {i}" in result
115 |         assert f"Content for result {i}" in result
116 |         assert f'read_note("test/result-{i}")' in result
117 | 
118 | 
119 | @pytest.mark.asyncio
120 | async def test_capitalization_in_write_note_template(template_loader, context_with_results):
121 |     """Test that the query is capitalized in the write_note template."""
122 |     result = await template_loader.render("prompts/search.hbs", context_with_results)
123 | 
124 |     # The query should be capitalized in the suggested write_note call
125 |     assert "Synthesis of Test query Information" in result
126 | 
127 | 
128 | @pytest.mark.asyncio
129 | async def test_timeframe_display(template_loader):
130 |     """Test that the timeframe is displayed correctly when present, and not when absent."""
131 |     # Context with timeframe
132 |     context_with_timeframe = {
133 |         "query": "with timeframe",
134 |         "timeframe": "7d",
135 |         "has_results": True,
136 |         "result_count": 0,
137 |         "results": [],
138 |     }
139 | 
140 |     result_with_timeframe = await template_loader.render(
141 |         "prompts/search.hbs", context_with_timeframe
142 |     )
143 |     assert 'Search Results for: "with timeframe" (after 7d)' in result_with_timeframe
144 | 
145 |     # Context without timeframe
146 |     context_without_timeframe = {
147 |         "query": "without timeframe",
148 |         "timeframe": None,
149 |         "has_results": True,
150 |         "result_count": 0,
151 |         "results": [],
152 |     }
153 | 
154 |     result_without_timeframe = await template_loader.render(
155 |         "prompts/search.hbs", context_without_timeframe
156 |     )
157 |     assert 'Search Results for: "without timeframe"' in result_without_timeframe
158 |     assert 'Search Results for: "without timeframe" (after' not in result_without_timeframe
159 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/mcp/async_client.py:
--------------------------------------------------------------------------------

```python
  1 | from contextlib import asynccontextmanager, AbstractAsyncContextManager
  2 | from typing import AsyncIterator, Callable, Optional
  3 | 
  4 | from httpx import ASGITransport, AsyncClient, Timeout
  5 | from loguru import logger
  6 | 
  7 | from basic_memory.api.app import app as fastapi_app
  8 | from basic_memory.config import ConfigManager
  9 | 
 10 | 
 11 | # Optional factory override for dependency injection
 12 | _client_factory: Optional[Callable[[], AbstractAsyncContextManager[AsyncClient]]] = None
 13 | 
 14 | 
 15 | def set_client_factory(factory: Callable[[], AbstractAsyncContextManager[AsyncClient]]) -> None:
 16 |     """Override the default client factory (for cloud app, testing, etc).
 17 | 
 18 |     Args:
 19 |         factory: An async context manager that yields an AsyncClient
 20 | 
 21 |     Example:
 22 |         @asynccontextmanager
 23 |         async def custom_client_factory():
 24 |             async with AsyncClient(...) as client:
 25 |                 yield client
 26 | 
 27 |         set_client_factory(custom_client_factory)
 28 |     """
 29 |     global _client_factory
 30 |     _client_factory = factory
 31 | 
 32 | 
 33 | @asynccontextmanager
 34 | async def get_client() -> AsyncIterator[AsyncClient]:
 35 |     """Get an AsyncClient as a context manager.
 36 | 
 37 |     This function provides proper resource management for HTTP clients,
 38 |     ensuring connections are closed after use. It supports three modes:
 39 | 
 40 |     1. **Factory injection** (cloud app, tests):
 41 |        If a custom factory is set via set_client_factory(), use that.
 42 | 
 43 |     2. **CLI cloud mode**:
 44 |        When cloud_mode_enabled is True, create HTTP client with auth
 45 |        token from CLIAuth for requests to cloud proxy endpoint.
 46 | 
 47 |     3. **Local mode** (default):
 48 |        Use ASGI transport for in-process requests to local FastAPI app.
 49 | 
 50 |     Usage:
 51 |         async with get_client() as client:
 52 |             response = await client.get("/path")
 53 | 
 54 |     Yields:
 55 |         AsyncClient: Configured HTTP client for the current mode
 56 | 
 57 |     Raises:
 58 |         RuntimeError: If cloud mode is enabled but user is not authenticated
 59 |     """
 60 |     if _client_factory:
 61 |         # Use injected factory (cloud app, tests)
 62 |         async with _client_factory() as client:
 63 |             yield client
 64 |     else:
 65 |         # Default: create based on config
 66 |         config = ConfigManager().config
 67 |         timeout = Timeout(
 68 |             connect=10.0,  # 10 seconds for connection
 69 |             read=30.0,  # 30 seconds for reading response
 70 |             write=30.0,  # 30 seconds for writing request
 71 |             pool=30.0,  # 30 seconds for connection pool
 72 |         )
 73 | 
 74 |         if config.cloud_mode_enabled:
 75 |             # CLI cloud mode: inject auth when creating client
 76 |             from basic_memory.cli.auth import CLIAuth
 77 | 
 78 |             auth = CLIAuth(client_id=config.cloud_client_id, authkit_domain=config.cloud_domain)
 79 |             token = await auth.get_valid_token()
 80 | 
 81 |             if not token:
 82 |                 raise RuntimeError(
 83 |                     "Cloud mode enabled but not authenticated. "
 84 |                     "Run 'basic-memory cloud login' first."
 85 |                 )
 86 | 
 87 |             # Auth header set ONCE at client creation
 88 |             proxy_base_url = f"{config.cloud_host}/proxy"
 89 |             logger.info(f"Creating HTTP client for cloud proxy at: {proxy_base_url}")
 90 |             async with AsyncClient(
 91 |                 base_url=proxy_base_url,
 92 |                 headers={"Authorization": f"Bearer {token}"},
 93 |                 timeout=timeout,
 94 |             ) as client:
 95 |                 yield client
 96 |         else:
 97 |             # Local mode: ASGI transport for in-process calls
 98 |             # Note: ASGI transport does NOT trigger FastAPI lifespan, so no special handling needed
 99 |             logger.info("Creating ASGI client for local Basic Memory API")
100 |             async with AsyncClient(
101 |                 transport=ASGITransport(app=fastapi_app), base_url="http://test", timeout=timeout
102 |             ) as client:
103 |                 yield client
104 | 
105 | 
106 | def create_client() -> AsyncClient:
107 |     """Create an HTTP client based on configuration.
108 | 
109 |     DEPRECATED: Use get_client() context manager instead for proper resource management.
110 | 
111 |     This function is kept for backward compatibility but will be removed in a future version.
112 |     The returned client should be closed manually by calling await client.aclose().
113 | 
114 |     Returns:
115 |         AsyncClient configured for either local ASGI or remote proxy
116 |     """
117 |     config_manager = ConfigManager()
118 |     config = config_manager.config
119 | 
120 |     # Configure timeout for longer operations like write_note
121 |     # Default httpx timeout is 5 seconds which is too short for file operations
122 |     timeout = Timeout(
123 |         connect=10.0,  # 10 seconds for connection
124 |         read=30.0,  # 30 seconds for reading response
125 |         write=30.0,  # 30 seconds for writing request
126 |         pool=30.0,  # 30 seconds for connection pool
127 |     )
128 | 
129 |     if config.cloud_mode_enabled:
130 |         # Use HTTP transport to proxy endpoint
131 |         proxy_base_url = f"{config.cloud_host}/proxy"
132 |         logger.info(f"Creating HTTP client for proxy at: {proxy_base_url}")
133 |         return AsyncClient(base_url=proxy_base_url, timeout=timeout)
134 |     else:
135 |         # Default: use ASGI transport for local API (development mode)
136 |         logger.info("Creating ASGI client for local Basic Memory API")
137 |         return AsyncClient(
138 |             transport=ASGITransport(app=fastapi_app), base_url="http://test", timeout=timeout
139 |         )
140 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/alembic/versions/314f1ea54dc4_add_postgres_full_text_search_support_.py:
--------------------------------------------------------------------------------

```python
  1 | """Add Postgres full-text search support with tsvector and GIN indexes
  2 | 
  3 | Revision ID: 314f1ea54dc4
  4 | Revises: e7e1f4367280
  5 | Create Date: 2025-11-15 18:05:01.025405
  6 | 
  7 | """
  8 | 
  9 | from typing import Sequence, Union
 10 | 
 11 | from alembic import op
 12 | import sqlalchemy as sa
 13 | 
 14 | 
 15 | # revision identifiers, used by Alembic.
 16 | revision: str = "314f1ea54dc4"
 17 | down_revision: Union[str, None] = "e7e1f4367280"
 18 | branch_labels: Union[str, Sequence[str], None] = None
 19 | depends_on: Union[str, Sequence[str], None] = None
 20 | 
 21 | 
 22 | def upgrade() -> None:
 23 |     """Add PostgreSQL full-text search support.
 24 | 
 25 |     This migration:
 26 |     1. Creates search_index table for Postgres (SQLite uses FTS5 virtual table)
 27 |     2. Adds generated tsvector column for full-text search
 28 |     3. Creates GIN index on the tsvector column for fast text queries
 29 |     4. Creates GIN index on metadata JSONB column for fast containment queries
 30 | 
 31 |     Note: These changes only apply to Postgres. SQLite continues to use FTS5 virtual tables.
 32 |     """
 33 |     # Check if we're using Postgres
 34 |     connection = op.get_bind()
 35 |     if connection.dialect.name == "postgresql":
 36 |         # Create search_index table for Postgres
 37 |         # For SQLite, this is a FTS5 virtual table created elsewhere
 38 |         from sqlalchemy.dialects.postgresql import JSONB
 39 | 
 40 |         op.create_table(
 41 |             "search_index",
 42 |             sa.Column("id", sa.Integer(), nullable=False),  # Entity IDs are integers
 43 |             sa.Column("project_id", sa.Integer(), nullable=False),  # Multi-tenant isolation
 44 |             sa.Column("title", sa.Text(), nullable=True),
 45 |             sa.Column("content_stems", sa.Text(), nullable=True),
 46 |             sa.Column("content_snippet", sa.Text(), nullable=True),
 47 |             sa.Column("permalink", sa.String(), nullable=True),  # Nullable for non-markdown files
 48 |             sa.Column("file_path", sa.String(), nullable=True),
 49 |             sa.Column("type", sa.String(), nullable=True),
 50 |             sa.Column("from_id", sa.Integer(), nullable=True),  # Relation IDs are integers
 51 |             sa.Column("to_id", sa.Integer(), nullable=True),  # Relation IDs are integers
 52 |             sa.Column("relation_type", sa.String(), nullable=True),
 53 |             sa.Column("entity_id", sa.Integer(), nullable=True),  # Entity IDs are integers
 54 |             sa.Column("category", sa.String(), nullable=True),
 55 |             sa.Column("metadata", JSONB(), nullable=True),  # Use JSONB for Postgres
 56 |             sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
 57 |             sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
 58 |             sa.PrimaryKeyConstraint(
 59 |                 "id", "type", "project_id"
 60 |             ),  # Composite key: id can repeat across types
 61 |             sa.ForeignKeyConstraint(
 62 |                 ["project_id"],
 63 |                 ["project.id"],
 64 |                 name="fk_search_index_project_id",
 65 |                 ondelete="CASCADE",
 66 |             ),
 67 |             if_not_exists=True,
 68 |         )
 69 | 
 70 |         # Create index on project_id for efficient multi-tenant queries
 71 |         op.create_index(
 72 |             "ix_search_index_project_id",
 73 |             "search_index",
 74 |             ["project_id"],
 75 |             unique=False,
 76 |         )
 77 | 
 78 |         # Create unique partial index on permalink for markdown files
 79 |         # Non-markdown files don't have permalinks, so we use a partial index
 80 |         op.execute("""
 81 |             CREATE UNIQUE INDEX uix_search_index_permalink_project
 82 |             ON search_index (permalink, project_id)
 83 |             WHERE permalink IS NOT NULL
 84 |         """)
 85 | 
 86 |         # Add tsvector column as a GENERATED ALWAYS column
 87 |         # This automatically updates when title or content_stems change
 88 |         op.execute("""
 89 |             ALTER TABLE search_index
 90 |             ADD COLUMN textsearchable_index_col tsvector
 91 |             GENERATED ALWAYS AS (
 92 |                 to_tsvector('english',
 93 |                     coalesce(title, '') || ' ' ||
 94 |                     coalesce(content_stems, '')
 95 |                 )
 96 |             ) STORED
 97 |         """)
 98 | 
 99 |         # Create GIN index on tsvector column for fast full-text search
100 |         op.create_index(
101 |             "idx_search_index_fts",
102 |             "search_index",
103 |             ["textsearchable_index_col"],
104 |             unique=False,
105 |             postgresql_using="gin",
106 |         )
107 | 
108 |         # Create GIN index on metadata JSONB for fast containment queries
109 |         # Using jsonb_path_ops for smaller index size and better performance
110 |         op.execute("""
111 |             CREATE INDEX idx_search_index_metadata_gin
112 |             ON search_index
113 |             USING GIN (metadata jsonb_path_ops)
114 |         """)
115 | 
116 | 
117 | def downgrade() -> None:
118 |     """Remove PostgreSQL full-text search support."""
119 |     connection = op.get_bind()
120 |     if connection.dialect.name == "postgresql":
121 |         # Drop indexes first
122 |         op.execute("DROP INDEX IF EXISTS idx_search_index_metadata_gin")
123 |         op.drop_index("idx_search_index_fts", table_name="search_index")
124 |         op.execute("DROP INDEX IF EXISTS uix_search_index_permalink_project")
125 |         op.drop_index("ix_search_index_project_id", table_name="search_index")
126 | 
127 |         # Drop the generated column
128 |         op.execute("ALTER TABLE search_index DROP COLUMN IF EXISTS textsearchable_index_col")
129 | 
130 |         # Drop the search_index table
131 |         op.drop_table("search_index")
132 | 
```

--------------------------------------------------------------------------------
/tests/cli/test_project_add_with_local_path.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for bm project add with --local-path flag."""
  2 | 
  3 | import json
  4 | from pathlib import Path
  5 | from contextlib import asynccontextmanager
  6 | 
  7 | import pytest
  8 | from typer.testing import CliRunner
  9 | 
 10 | from basic_memory.cli.app import app
 11 | 
 12 | 
 13 | @pytest.fixture
 14 | def runner():
 15 |     return CliRunner()
 16 | 
 17 | 
 18 | @pytest.fixture
 19 | def mock_config(tmp_path, monkeypatch):
 20 |     """Create a mock config in cloud mode using environment variables."""
 21 |     # Invalidate config cache to ensure clean state for each test
 22 |     from basic_memory import config as config_module
 23 | 
 24 |     config_module._CONFIG_CACHE = None
 25 | 
 26 |     config_dir = tmp_path / ".basic-memory"
 27 |     config_dir.mkdir(parents=True, exist_ok=True)
 28 |     config_file = config_dir / "config.json"
 29 | 
 30 |     config_data = {
 31 |         "env": "dev",
 32 |         "projects": {},
 33 |         "default_project": "main",
 34 |         "cloud_mode": True,
 35 |         "cloud_projects": {},
 36 |     }
 37 | 
 38 |     config_file.write_text(json.dumps(config_data, indent=2))
 39 | 
 40 |     # Set HOME to tmp_path so ConfigManager uses our test config
 41 |     monkeypatch.setenv("HOME", str(tmp_path))
 42 | 
 43 |     yield config_file
 44 | 
 45 | 
 46 | @pytest.fixture
 47 | def mock_api_client(monkeypatch):
 48 |     """Stub the API client for project add without stdlib mocks."""
 49 |     import basic_memory.cli.commands.project as project_cmd
 50 | 
 51 |     @asynccontextmanager
 52 |     async def fake_get_client():
 53 |         yield object()
 54 | 
 55 |     class _Resp:
 56 |         def json(self):
 57 |             return {
 58 |                 "message": "Project 'test-project' added successfully",
 59 |                 "status": "success",
 60 |                 "default": False,
 61 |                 "old_project": None,
 62 |                 "new_project": {
 63 |                     "id": 1,
 64 |                     "external_id": "12345678-1234-1234-1234-123456789012",
 65 |                     "name": "test-project",
 66 |                     "path": "/test-project",
 67 |                     "is_default": False,
 68 |                 },
 69 |             }
 70 | 
 71 |     calls: list[tuple[str, dict]] = []
 72 | 
 73 |     async def fake_call_post(client, path: str, json: dict, **kwargs):
 74 |         calls.append((path, json))
 75 |         return _Resp()
 76 | 
 77 |     monkeypatch.setattr(project_cmd, "get_client", fake_get_client)
 78 |     monkeypatch.setattr(project_cmd, "call_post", fake_call_post)
 79 | 
 80 |     return calls
 81 | 
 82 | 
 83 | def test_project_add_with_local_path_saves_to_config(
 84 |     runner, mock_config, mock_api_client, tmp_path
 85 | ):
 86 |     """Test that bm project add --local-path saves sync path to config."""
 87 |     local_sync_dir = tmp_path / "sync" / "test-project"
 88 | 
 89 |     result = runner.invoke(
 90 |         app,
 91 |         [
 92 |             "project",
 93 |             "add",
 94 |             "test-project",
 95 |             "--local-path",
 96 |             str(local_sync_dir),
 97 |         ],
 98 |     )
 99 | 
100 |     assert result.exit_code == 0, f"Exit code: {result.exit_code}, Stdout: {result.stdout}"
101 |     assert "Project 'test-project' added successfully" in result.stdout
102 |     assert "Local sync path configured" in result.stdout
103 |     # Check path is present (may be line-wrapped in output)
104 |     assert "test-project" in result.stdout
105 |     assert "sync" in result.stdout
106 | 
107 |     # Verify config was updated
108 |     config_data = json.loads(mock_config.read_text())
109 |     assert "test-project" in config_data["cloud_projects"]
110 |     # Use as_posix() for cross-platform compatibility (Windows uses backslashes)
111 |     assert config_data["cloud_projects"]["test-project"]["local_path"] == local_sync_dir.as_posix()
112 |     assert config_data["cloud_projects"]["test-project"]["last_sync"] is None
113 |     assert config_data["cloud_projects"]["test-project"]["bisync_initialized"] is False
114 | 
115 |     # Verify local directory was created
116 |     assert local_sync_dir.exists()
117 |     assert local_sync_dir.is_dir()
118 | 
119 | 
120 | def test_project_add_without_local_path_no_config_entry(runner, mock_config, mock_api_client):
121 |     """Test that bm project add without --local-path doesn't save to config."""
122 |     result = runner.invoke(
123 |         app,
124 |         ["project", "add", "test-project"],
125 |     )
126 | 
127 |     assert result.exit_code == 0
128 |     assert "Project 'test-project' added successfully" in result.stdout
129 |     assert "Local sync path configured" not in result.stdout
130 | 
131 |     # Verify config was NOT updated with cloud_projects entry
132 |     config_data = json.loads(mock_config.read_text())
133 |     assert "test-project" not in config_data.get("cloud_projects", {})
134 | 
135 | 
136 | def test_project_add_local_path_expands_tilde(runner, mock_config, mock_api_client):
137 |     """Test that --local-path ~/path expands to absolute path."""
138 |     result = runner.invoke(
139 |         app,
140 |         ["project", "add", "test-project", "--local-path", "~/test-sync"],
141 |     )
142 | 
143 |     assert result.exit_code == 0
144 | 
145 |     # Verify config has expanded path
146 |     config_data = json.loads(mock_config.read_text())
147 |     local_path = config_data["cloud_projects"]["test-project"]["local_path"]
148 |     # Path should be absolute (starts with / on Unix or drive letter on Windows)
149 |     assert Path(local_path).is_absolute()
150 |     assert "~" not in local_path
151 |     assert local_path.endswith("/test-sync")
152 | 
153 | 
154 | def test_project_add_local_path_creates_nested_directories(
155 |     runner, mock_config, mock_api_client, tmp_path
156 | ):
157 |     """Test that --local-path creates nested directories."""
158 |     nested_path = tmp_path / "a" / "b" / "c" / "test-project"
159 | 
160 |     result = runner.invoke(
161 |         app,
162 |         ["project", "add", "test-project", "--local-path", str(nested_path)],
163 |     )
164 | 
165 |     assert result.exit_code == 0
166 |     assert nested_path.exists()
167 |     assert nested_path.is_dir()
168 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/mcp/clients/knowledge.py:
--------------------------------------------------------------------------------

```python
  1 | """Typed client for knowledge/entity API operations.
  2 | 
  3 | Encapsulates all /v2/projects/{project_id}/knowledge/* endpoints.
  4 | """
  5 | 
  6 | from typing import Any
  7 | 
  8 | from httpx import AsyncClient
  9 | 
 10 | from basic_memory.mcp.tools.utils import call_get, call_post, call_put, call_patch, call_delete
 11 | from basic_memory.schemas.response import EntityResponse, DeleteEntitiesResponse
 12 | 
 13 | 
 14 | class KnowledgeClient:
 15 |     """Typed client for knowledge graph entity operations.
 16 | 
 17 |     Centralizes:
 18 |     - API path construction for /v2/projects/{project_id}/knowledge/*
 19 |     - Response validation via Pydantic models
 20 |     - Consistent error handling through call_* utilities
 21 | 
 22 |     Usage:
 23 |         async with get_client() as http_client:
 24 |             client = KnowledgeClient(http_client, project_id)
 25 |             entity = await client.create_entity(entity_data)
 26 |     """
 27 | 
 28 |     def __init__(self, http_client: AsyncClient, project_id: str):
 29 |         """Initialize the knowledge client.
 30 | 
 31 |         Args:
 32 |             http_client: HTTPX AsyncClient for making requests
 33 |             project_id: Project external_id (UUID) for API calls
 34 |         """
 35 |         self.http_client = http_client
 36 |         self.project_id = project_id
 37 |         self._base_path = f"/v2/projects/{project_id}/knowledge"
 38 | 
 39 |     # --- Entity CRUD Operations ---
 40 | 
 41 |     async def create_entity(self, entity_data: dict[str, Any]) -> EntityResponse:
 42 |         """Create a new entity.
 43 | 
 44 |         Args:
 45 |             entity_data: Entity data including title, content, folder, etc.
 46 | 
 47 |         Returns:
 48 |             EntityResponse with created entity details
 49 | 
 50 |         Raises:
 51 |             ToolError: If the request fails
 52 |         """
 53 |         response = await call_post(
 54 |             self.http_client,
 55 |             f"{self._base_path}/entities",
 56 |             json=entity_data,
 57 |         )
 58 |         return EntityResponse.model_validate(response.json())
 59 | 
 60 |     async def update_entity(self, entity_id: str, entity_data: dict[str, Any]) -> EntityResponse:
 61 |         """Update an existing entity (full replacement).
 62 | 
 63 |         Args:
 64 |             entity_id: Entity external_id (UUID)
 65 |             entity_data: Complete entity data for replacement
 66 | 
 67 |         Returns:
 68 |             EntityResponse with updated entity details
 69 | 
 70 |         Raises:
 71 |             ToolError: If the request fails
 72 |         """
 73 |         response = await call_put(
 74 |             self.http_client,
 75 |             f"{self._base_path}/entities/{entity_id}",
 76 |             json=entity_data,
 77 |         )
 78 |         return EntityResponse.model_validate(response.json())
 79 | 
 80 |     async def get_entity(self, entity_id: str) -> EntityResponse:
 81 |         """Get an entity by ID.
 82 | 
 83 |         Args:
 84 |             entity_id: Entity external_id (UUID)
 85 | 
 86 |         Returns:
 87 |             EntityResponse with entity details
 88 | 
 89 |         Raises:
 90 |             ToolError: If the entity is not found or request fails
 91 |         """
 92 |         response = await call_get(
 93 |             self.http_client,
 94 |             f"{self._base_path}/entities/{entity_id}",
 95 |         )
 96 |         return EntityResponse.model_validate(response.json())
 97 | 
 98 |     async def patch_entity(self, entity_id: str, patch_data: dict[str, Any]) -> EntityResponse:
 99 |         """Partially update an entity.
100 | 
101 |         Args:
102 |             entity_id: Entity external_id (UUID)
103 |             patch_data: Partial entity data to update
104 | 
105 |         Returns:
106 |             EntityResponse with updated entity details
107 | 
108 |         Raises:
109 |             ToolError: If the request fails
110 |         """
111 |         response = await call_patch(
112 |             self.http_client,
113 |             f"{self._base_path}/entities/{entity_id}",
114 |             json=patch_data,
115 |         )
116 |         return EntityResponse.model_validate(response.json())
117 | 
118 |     async def delete_entity(self, entity_id: str) -> DeleteEntitiesResponse:
119 |         """Delete an entity.
120 | 
121 |         Args:
122 |             entity_id: Entity external_id (UUID)
123 | 
124 |         Returns:
125 |             DeleteEntitiesResponse confirming deletion
126 | 
127 |         Raises:
128 |             ToolError: If the entity is not found or request fails
129 |         """
130 |         response = await call_delete(
131 |             self.http_client,
132 |             f"{self._base_path}/entities/{entity_id}",
133 |         )
134 |         return DeleteEntitiesResponse.model_validate(response.json())
135 | 
136 |     async def move_entity(self, entity_id: str, destination_path: str) -> EntityResponse:
137 |         """Move an entity to a new location.
138 | 
139 |         Args:
140 |             entity_id: Entity external_id (UUID)
141 |             destination_path: New file path for the entity
142 | 
143 |         Returns:
144 |             EntityResponse with updated entity details
145 | 
146 |         Raises:
147 |             ToolError: If the request fails
148 |         """
149 |         response = await call_put(
150 |             self.http_client,
151 |             f"{self._base_path}/entities/{entity_id}/move",
152 |             json={"destination_path": destination_path},
153 |         )
154 |         return EntityResponse.model_validate(response.json())
155 | 
156 |     # --- Resolution ---
157 | 
158 |     async def resolve_entity(self, identifier: str) -> str:
159 |         """Resolve a string identifier to an entity external_id.
160 | 
161 |         Args:
162 |             identifier: The identifier to resolve (permalink, title, or path)
163 | 
164 |         Returns:
165 |             The resolved entity external_id (UUID)
166 | 
167 |         Raises:
168 |             ToolError: If the identifier cannot be resolved
169 |         """
170 |         response = await call_post(
171 |             self.http_client,
172 |             f"{self._base_path}/resolve",
173 |             json={"identifier": identifier},
174 |         )
175 |         data = response.json()
176 |         return data["external_id"]
177 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/importers/memory_json_importer.py:
--------------------------------------------------------------------------------

```python
  1 | """Memory JSON import service for Basic Memory."""
  2 | 
  3 | import logging
  4 | from typing import Any, Dict, List, Optional
  5 | 
  6 | from basic_memory.markdown.schemas import EntityFrontmatter, EntityMarkdown, Observation, Relation
  7 | from basic_memory.importers.base import Importer
  8 | from basic_memory.schemas.importer import EntityImportResult
  9 | 
 10 | logger = logging.getLogger(__name__)
 11 | 
 12 | 
 13 | class MemoryJsonImporter(Importer[EntityImportResult]):
 14 |     """Service for importing memory.json format data."""
 15 | 
 16 |     def handle_error(  # pragma: no cover
 17 |         self, message: str, error: Optional[Exception] = None
 18 |     ) -> EntityImportResult:
 19 |         """Return a failed EntityImportResult with an error message."""
 20 |         error_msg = f"{message}: {error}" if error else message
 21 |         return EntityImportResult(
 22 |             import_count={},
 23 |             success=False,
 24 |             error_message=error_msg,
 25 |             entities=0,
 26 |             relations=0,
 27 |             skipped_entities=0,
 28 |         )
 29 | 
 30 |     async def import_data(
 31 |         self, source_data, destination_folder: str = "", **kwargs: Any
 32 |     ) -> EntityImportResult:
 33 |         """Import entities and relations from a memory.json file.
 34 | 
 35 |         Args:
 36 |             source_data: Path to the memory.json file.
 37 |             destination_folder: Optional destination folder within the project.
 38 |             **kwargs: Additional keyword arguments.
 39 | 
 40 |         Returns:
 41 |             EntityImportResult containing statistics and status of the import.
 42 |         """
 43 |         try:
 44 |             # First pass - collect all relations by source entity
 45 |             entity_relations: Dict[str, List[Relation]] = {}
 46 |             entities: Dict[str, Dict[str, Any]] = {}
 47 |             skipped_entities: int = 0
 48 | 
 49 |             # Ensure the destination folder exists if provided
 50 |             if destination_folder:  # pragma: no cover
 51 |                 await self.ensure_folder_exists(destination_folder)
 52 | 
 53 |             # First pass - collect entities and relations
 54 |             for line in source_data:
 55 |                 data = line
 56 |                 if data["type"] == "entity":
 57 |                     # Handle different possible name keys
 58 |                     entity_name = data.get("name") or data.get("entityName") or data.get("id")
 59 |                     if not entity_name:
 60 |                         logger.warning(f"Entity missing name field: {data}")  # pragma: no cover
 61 |                         skipped_entities += 1  # pragma: no cover
 62 |                         continue  # pragma: no cover
 63 |                     entities[entity_name] = data
 64 |                 elif data["type"] == "relation":
 65 |                     # Store relation with its source entity
 66 |                     source = data.get("from") or data.get("from_id")
 67 |                     if source not in entity_relations:
 68 |                         entity_relations[source] = []
 69 |                     entity_relations[source].append(
 70 |                         Relation(
 71 |                             type=data.get("relationType") or data.get("relation_type"),
 72 |                             target=data.get("to") or data.get("to_id"),
 73 |                         )
 74 |                     )
 75 | 
 76 |             # Second pass - create and write entities
 77 |             entities_created = 0
 78 |             for name, entity_data in entities.items():
 79 |                 # Get entity type with fallback
 80 |                 entity_type = entity_data.get("entityType") or entity_data.get("type") or "entity"
 81 | 
 82 |                 # Build permalink with optional destination folder prefix
 83 |                 permalink = (
 84 |                     f"{destination_folder}/{entity_type}/{name}"
 85 |                     if destination_folder
 86 |                     else f"{entity_type}/{name}"
 87 |                 )
 88 | 
 89 |                 # Ensure entity type directory exists using FileService with relative path
 90 |                 entity_type_dir = (
 91 |                     f"{destination_folder}/{entity_type}" if destination_folder else entity_type
 92 |                 )
 93 |                 await self.file_service.ensure_directory(entity_type_dir)
 94 | 
 95 |                 # Get observations with fallback to empty list
 96 |                 observations = entity_data.get("observations", [])
 97 | 
 98 |                 entity = EntityMarkdown(
 99 |                     frontmatter=EntityFrontmatter(
100 |                         metadata={
101 |                             "type": entity_type,
102 |                             "title": name,
103 |                             "permalink": permalink,
104 |                         }
105 |                     ),
106 |                     content=f"# {name}\n",
107 |                     observations=[Observation(content=obs) for obs in observations],
108 |                     relations=entity_relations.get(name, []),
109 |                 )
110 | 
111 |                 # Write file using relative path - FileService handles base_path
112 |                 file_path = f"{entity.frontmatter.metadata['permalink']}.md"
113 |                 await self.write_entity(entity, file_path)
114 |                 entities_created += 1
115 | 
116 |             relations_count = sum(len(rels) for rels in entity_relations.values())
117 | 
118 |             return EntityImportResult(
119 |                 import_count={"entities": entities_created, "relations": relations_count},
120 |                 success=True,
121 |                 entities=entities_created,
122 |                 relations=relations_count,
123 |                 skipped_entities=skipped_entities,
124 |             )
125 | 
126 |         except Exception as e:  # pragma: no cover
127 |             logger.exception("Failed to import memory.json")
128 |             return self.handle_error("Failed to import memory.json", e)
129 | 
```

--------------------------------------------------------------------------------
/tests/api/test_memory_router.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for memory router endpoints."""
  2 | 
  3 | from datetime import datetime
  4 | 
  5 | import pytest
  6 | 
  7 | from basic_memory.schemas.memory import GraphContext
  8 | 
  9 | 
 10 | @pytest.mark.asyncio
 11 | async def test_get_memory_context(client, test_graph, project_url):
 12 |     """Test getting context from memory URL."""
 13 |     response = await client.get(f"{project_url}/memory/test/root")
 14 |     assert response.status_code == 200
 15 | 
 16 |     context = GraphContext(**response.json())
 17 |     assert len(context.results) == 1
 18 |     assert context.results[0].primary_result.permalink == "test/root"
 19 |     assert len(context.results[0].related_results) > 0
 20 | 
 21 |     # Verify metadata
 22 |     assert context.metadata.uri == "test/root"
 23 |     assert context.metadata.depth == 1  # default depth
 24 |     assert isinstance(context.metadata.generated_at, datetime)
 25 |     assert context.metadata.primary_count + context.metadata.related_count > 0
 26 |     assert context.metadata.total_results is not None  # Backwards compatibility field
 27 | 
 28 | 
 29 | @pytest.mark.asyncio
 30 | async def test_get_memory_context_pagination(client, test_graph, project_url):
 31 |     """Test getting context from memory URL."""
 32 |     response = await client.get(f"{project_url}/memory/test/root?page=1&page_size=1")
 33 |     assert response.status_code == 200
 34 | 
 35 |     context = GraphContext(**response.json())
 36 |     assert len(context.results) == 1
 37 |     assert context.results[0].primary_result.permalink == "test/root"
 38 |     assert len(context.results[0].related_results) > 0
 39 | 
 40 |     # Verify metadata
 41 |     assert context.metadata.uri == "test/root"
 42 |     assert context.metadata.depth == 1  # default depth
 43 |     assert isinstance(context.metadata.generated_at, datetime)
 44 |     assert context.metadata.primary_count > 0
 45 | 
 46 | 
 47 | @pytest.mark.asyncio
 48 | async def test_get_memory_context_pattern(client, test_graph, project_url):
 49 |     """Test getting context with pattern matching."""
 50 |     response = await client.get(f"{project_url}/memory/test/*")
 51 |     assert response.status_code == 200
 52 | 
 53 |     context = GraphContext(**response.json())
 54 |     assert len(context.results) > 1  # Should match multiple test/* paths
 55 |     assert all("test/" in item.primary_result.permalink for item in context.results)
 56 | 
 57 | 
 58 | @pytest.mark.asyncio
 59 | async def test_get_memory_context_depth(client, test_graph, project_url):
 60 |     """Test depth parameter affects relation traversal."""
 61 |     # With depth=1, should only get immediate connections
 62 |     response = await client.get(f"{project_url}/memory/test/root?depth=1&max_results=20")
 63 |     assert response.status_code == 200
 64 |     context1 = GraphContext(**response.json())
 65 | 
 66 |     # With depth=2, should get deeper connections
 67 |     response = await client.get(f"{project_url}/memory/test/root?depth=3&max_results=20")
 68 |     assert response.status_code == 200
 69 |     context2 = GraphContext(**response.json())
 70 | 
 71 |     # Calculate total related items in all result items
 72 |     total_related1 = sum(len(item.related_results) for item in context1.results)
 73 |     total_related2 = sum(len(item.related_results) for item in context2.results)
 74 | 
 75 |     assert total_related2 > total_related1
 76 | 
 77 | 
 78 | @pytest.mark.asyncio
 79 | async def test_get_memory_context_timeframe(client, test_graph, project_url):
 80 |     """Test timeframe parameter filters by date."""
 81 |     # Recent timeframe
 82 |     response = await client.get(f"{project_url}/memory/test/root?timeframe=1d")
 83 |     assert response.status_code == 200
 84 |     recent = GraphContext(**response.json())
 85 | 
 86 |     # Longer timeframe
 87 |     response = await client.get(f"{project_url}/memory/test/root?timeframe=30d")
 88 |     assert response.status_code == 200
 89 |     older = GraphContext(**response.json())
 90 | 
 91 |     # Calculate total related items
 92 |     total_recent_related = (
 93 |         sum(len(item.related_results) for item in recent.results) if recent.results else 0
 94 |     )
 95 |     total_older_related = (
 96 |         sum(len(item.related_results) for item in older.results) if older.results else 0
 97 |     )
 98 | 
 99 |     assert total_older_related >= total_recent_related
100 | 
101 | 
102 | @pytest.mark.asyncio
103 | async def test_not_found(client, project_url):
104 |     """Test handling of non-existent paths."""
105 |     response = await client.get(f"{project_url}/memory/test/does-not-exist")
106 |     assert response.status_code == 200
107 | 
108 |     context = GraphContext(**response.json())
109 |     assert len(context.results) == 0
110 | 
111 | 
112 | @pytest.mark.asyncio
113 | async def test_recent_activity(client, test_graph, project_url):
114 |     """Test handling of recent activity."""
115 |     response = await client.get(f"{project_url}/memory/recent")
116 |     assert response.status_code == 200
117 | 
118 |     context = GraphContext(**response.json())
119 |     assert len(context.results) > 0
120 |     assert context.metadata.primary_count > 0
121 | 
122 | 
123 | @pytest.mark.asyncio
124 | async def test_recent_activity_pagination(client, test_graph, project_url):
125 |     """Test pagination for recent activity."""
126 |     response = await client.get(f"{project_url}/memory/recent?page=1&page_size=1")
127 |     assert response.status_code == 200
128 | 
129 |     context = GraphContext(**response.json())
130 |     assert len(context.results) == 1
131 |     assert context.page == 1
132 |     assert context.page_size == 1
133 | 
134 | 
135 | @pytest.mark.asyncio
136 | async def test_recent_activity_by_type(client, test_graph, project_url):
137 |     """Test filtering recent activity by type."""
138 |     response = await client.get(f"{project_url}/memory/recent?type=relation&type=observation")
139 |     assert response.status_code == 200
140 | 
141 |     context = GraphContext(**response.json())
142 |     assert len(context.results) > 0
143 | 
144 |     # Check for relation and observation types in primary results
145 |     primary_types = [item.primary_result.type for item in context.results]
146 |     assert "relation" in primary_types or "observation" in primary_types
147 | 
```

--------------------------------------------------------------------------------
/tests/markdown/test_markdown_processor.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for MarkdownProcessor.
  2 | 
  3 | Tests focus on the Read -> Modify -> Write pattern and content preservation.
  4 | """
  5 | 
  6 | from datetime import datetime
  7 | from pathlib import Path
  8 | 
  9 | import pytest
 10 | 
 11 | from basic_memory.markdown.markdown_processor import DirtyFileError, MarkdownProcessor
 12 | from basic_memory.markdown.schemas import (
 13 |     EntityFrontmatter,
 14 |     EntityMarkdown,
 15 |     Observation,
 16 |     Relation,
 17 | )
 18 | 
 19 | 
 20 | @pytest.mark.asyncio
 21 | async def test_write_new_minimal_file(markdown_processor: MarkdownProcessor, tmp_path: Path):
 22 |     """Test creating new file with just title."""
 23 |     path = tmp_path / "test.md"
 24 | 
 25 |     # Create minimal markdown schema
 26 |     metadata = {}
 27 |     metadata["title"] = "Test Note"
 28 |     metadata["type"] = "note"
 29 |     metadata["permalink"] = "test"
 30 |     metadata["created"] = datetime(2024, 1, 1)
 31 |     metadata["modified"] = datetime(2024, 1, 1)
 32 |     metadata["tags"] = ["test"]
 33 |     markdown = EntityMarkdown(
 34 |         frontmatter=EntityFrontmatter(
 35 |             metadata=metadata,
 36 |         ),
 37 |         content="",
 38 |     )
 39 | 
 40 |     # Write file
 41 |     await markdown_processor.write_file(path, markdown)
 42 | 
 43 |     # Read back and verify
 44 |     content = path.read_text(encoding="utf-8")
 45 |     assert "---" in content  # Has frontmatter
 46 |     assert "type: note" in content
 47 |     assert "permalink: test" in content
 48 |     assert "# Test Note" in content  # Added title
 49 |     assert "tags:" in content
 50 |     assert "- test" in content
 51 | 
 52 |     # Should not have empty sections
 53 |     assert "## Observations" not in content
 54 |     assert "## Relations" not in content
 55 | 
 56 | 
 57 | @pytest.mark.asyncio
 58 | async def test_write_new_file_with_content(markdown_processor: MarkdownProcessor, tmp_path: Path):
 59 |     """Test creating new file with content and sections."""
 60 |     path = tmp_path / "test.md"
 61 | 
 62 |     # Create markdown with content and sections
 63 |     markdown = EntityMarkdown(
 64 |         frontmatter=EntityFrontmatter(
 65 |             type="note",
 66 |             permalink="test",
 67 |             title="Test Note",
 68 |             created=datetime(2024, 1, 1),
 69 |             modified=datetime(2024, 1, 1),
 70 |         ),
 71 |         content="# Custom Title\n\nMy content here.\nMultiple lines.",
 72 |         observations=[
 73 |             Observation(
 74 |                 content="Test observation #test",
 75 |                 category="tech",
 76 |                 tags=["test"],
 77 |                 context="test context",
 78 |             ),
 79 |         ],
 80 |         relations=[
 81 |             Relation(
 82 |                 type="relates_to",
 83 |                 target="other-note",
 84 |                 context="test relation",
 85 |             ),
 86 |         ],
 87 |     )
 88 | 
 89 |     # Write file
 90 |     await markdown_processor.write_file(path, markdown)
 91 | 
 92 |     # Read back and verify
 93 |     content = path.read_text(encoding="utf-8")
 94 | 
 95 |     # Check content preserved exactly
 96 |     assert "# Custom Title" in content
 97 |     assert "My content here." in content
 98 |     assert "Multiple lines." in content
 99 | 
100 |     # Check sections formatted correctly
101 |     assert "- [tech] Test observation #test (test context)" in content
102 |     assert "- relates_to [[other-note]] (test relation)" in content
103 | 
104 | 
105 | @pytest.mark.asyncio
106 | async def test_update_preserves_content(markdown_processor: MarkdownProcessor, tmp_path: Path):
107 |     """Test that updating file preserves existing content."""
108 |     path = tmp_path / "test.md"
109 | 
110 |     # Create initial file
111 |     initial = EntityMarkdown(
112 |         frontmatter=EntityFrontmatter(
113 |             type="note",
114 |             permalink="test",
115 |             title="Test Note",
116 |             created=datetime(2024, 1, 1),
117 |             modified=datetime(2024, 1, 1),
118 |         ),
119 |         content="# My Note\n\nOriginal content here.",
120 |         observations=[
121 |             Observation(content="First observation", category="note"),
122 |         ],
123 |     )
124 | 
125 |     checksum = await markdown_processor.write_file(path, initial)
126 | 
127 |     # Update with new observation
128 |     updated = EntityMarkdown(
129 |         frontmatter=initial.frontmatter,
130 |         content=initial.content,  # Preserve original content
131 |         observations=[
132 |             initial.observations[0],  # Keep original observation
133 |             Observation(content="Second observation", category="tech"),  # Add new one
134 |         ],
135 |     )
136 | 
137 |     # Update file
138 |     await markdown_processor.write_file(path, updated, expected_checksum=checksum)
139 | 
140 |     # Read back and verify
141 |     result = await markdown_processor.read_file(path)
142 | 
143 |     # Original content preserved
144 |     assert "Original content here." in result.content
145 | 
146 |     # Both observations present
147 |     assert len(result.observations) == 2
148 |     assert any(o.content == "First observation" for o in result.observations)
149 |     assert any(o.content == "Second observation" for o in result.observations)
150 | 
151 | 
152 | @pytest.mark.asyncio
153 | async def test_dirty_file_detection(markdown_processor: MarkdownProcessor, tmp_path: Path):
154 |     """Test detection of file modifications."""
155 |     path = tmp_path / "test.md"
156 | 
157 |     # Create initial file
158 |     initial = EntityMarkdown(
159 |         frontmatter=EntityFrontmatter(
160 |             type="note",
161 |             permalink="test",
162 |             title="Test Note",
163 |             created=datetime(2024, 1, 1),
164 |             modified=datetime(2024, 1, 1),
165 |         ),
166 |         content="Initial content",
167 |     )
168 | 
169 |     checksum = await markdown_processor.write_file(path, initial)
170 | 
171 |     # Modify file directly
172 |     path.write_text(path.read_text(encoding="utf-8") + "\nModified!")
173 | 
174 |     # Try to update with old checksum
175 |     update = EntityMarkdown(
176 |         frontmatter=initial.frontmatter,
177 |         content="New content",
178 |     )
179 | 
180 |     # Should raise DirtyFileError
181 |     with pytest.raises(DirtyFileError):
182 |         await markdown_processor.write_file(path, update, expected_checksum=checksum)
183 | 
184 |     # Should succeed without checksum
185 |     new_checksum = await markdown_processor.write_file(path, update)
186 |     assert new_checksum != checksum
187 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/deps/repositories.py:
--------------------------------------------------------------------------------

```python
  1 | """Repository dependency injection for basic-memory.
  2 | 
  3 | This module provides repository dependencies:
  4 | - EntityRepository
  5 | - ObservationRepository
  6 | - RelationRepository
  7 | - SearchRepository
  8 | 
  9 | Each repository is scoped to a project ID from the request.
 10 | """
 11 | 
 12 | from typing import Annotated
 13 | 
 14 | from fastapi import Depends
 15 | 
 16 | from basic_memory.deps.db import SessionMakerDep
 17 | from basic_memory.deps.projects import (
 18 |     ProjectIdDep,
 19 |     ProjectIdPathDep,
 20 |     ProjectExternalIdPathDep,
 21 | )
 22 | from basic_memory.repository.entity_repository import EntityRepository
 23 | from basic_memory.repository.observation_repository import ObservationRepository
 24 | from basic_memory.repository.relation_repository import RelationRepository
 25 | from basic_memory.repository.search_repository import SearchRepository, create_search_repository
 26 | 
 27 | 
 28 | # --- Entity Repository ---
 29 | 
 30 | 
 31 | async def get_entity_repository(
 32 |     session_maker: SessionMakerDep,
 33 |     project_id: ProjectIdDep,
 34 | ) -> EntityRepository:
 35 |     """Create an EntityRepository instance for the current project."""
 36 |     return EntityRepository(session_maker, project_id=project_id)
 37 | 
 38 | 
 39 | EntityRepositoryDep = Annotated[EntityRepository, Depends(get_entity_repository)]
 40 | 
 41 | 
 42 | async def get_entity_repository_v2(  # pragma: no cover
 43 |     session_maker: SessionMakerDep,
 44 |     project_id: ProjectIdPathDep,
 45 | ) -> EntityRepository:
 46 |     """Create an EntityRepository instance for v2 API (uses integer project_id from path)."""
 47 |     return EntityRepository(session_maker, project_id=project_id)
 48 | 
 49 | 
 50 | EntityRepositoryV2Dep = Annotated[EntityRepository, Depends(get_entity_repository_v2)]
 51 | 
 52 | 
 53 | async def get_entity_repository_v2_external(
 54 |     session_maker: SessionMakerDep,
 55 |     project_id: ProjectExternalIdPathDep,
 56 | ) -> EntityRepository:
 57 |     """Create an EntityRepository instance for v2 API (uses external_id from path)."""
 58 |     return EntityRepository(session_maker, project_id=project_id)
 59 | 
 60 | 
 61 | EntityRepositoryV2ExternalDep = Annotated[
 62 |     EntityRepository, Depends(get_entity_repository_v2_external)
 63 | ]
 64 | 
 65 | 
 66 | # --- Observation Repository ---
 67 | 
 68 | 
 69 | async def get_observation_repository(
 70 |     session_maker: SessionMakerDep,
 71 |     project_id: ProjectIdDep,
 72 | ) -> ObservationRepository:
 73 |     """Create an ObservationRepository instance for the current project."""
 74 |     return ObservationRepository(session_maker, project_id=project_id)
 75 | 
 76 | 
 77 | ObservationRepositoryDep = Annotated[ObservationRepository, Depends(get_observation_repository)]
 78 | 
 79 | 
 80 | async def get_observation_repository_v2(  # pragma: no cover
 81 |     session_maker: SessionMakerDep,
 82 |     project_id: ProjectIdPathDep,
 83 | ) -> ObservationRepository:
 84 |     """Create an ObservationRepository instance for v2 API."""
 85 |     return ObservationRepository(session_maker, project_id=project_id)
 86 | 
 87 | 
 88 | ObservationRepositoryV2Dep = Annotated[
 89 |     ObservationRepository, Depends(get_observation_repository_v2)
 90 | ]
 91 | 
 92 | 
 93 | async def get_observation_repository_v2_external(
 94 |     session_maker: SessionMakerDep,
 95 |     project_id: ProjectExternalIdPathDep,
 96 | ) -> ObservationRepository:
 97 |     """Create an ObservationRepository instance for v2 API (uses external_id)."""
 98 |     return ObservationRepository(session_maker, project_id=project_id)
 99 | 
100 | 
101 | ObservationRepositoryV2ExternalDep = Annotated[
102 |     ObservationRepository, Depends(get_observation_repository_v2_external)
103 | ]
104 | 
105 | 
106 | # --- Relation Repository ---
107 | 
108 | 
109 | async def get_relation_repository(
110 |     session_maker: SessionMakerDep,
111 |     project_id: ProjectIdDep,
112 | ) -> RelationRepository:
113 |     """Create a RelationRepository instance for the current project."""
114 |     return RelationRepository(session_maker, project_id=project_id)
115 | 
116 | 
117 | RelationRepositoryDep = Annotated[RelationRepository, Depends(get_relation_repository)]
118 | 
119 | 
120 | async def get_relation_repository_v2(  # pragma: no cover
121 |     session_maker: SessionMakerDep,
122 |     project_id: ProjectIdPathDep,
123 | ) -> RelationRepository:
124 |     """Create a RelationRepository instance for v2 API."""
125 |     return RelationRepository(session_maker, project_id=project_id)
126 | 
127 | 
128 | RelationRepositoryV2Dep = Annotated[RelationRepository, Depends(get_relation_repository_v2)]
129 | 
130 | 
131 | async def get_relation_repository_v2_external(
132 |     session_maker: SessionMakerDep,
133 |     project_id: ProjectExternalIdPathDep,
134 | ) -> RelationRepository:
135 |     """Create a RelationRepository instance for v2 API (uses external_id)."""
136 |     return RelationRepository(session_maker, project_id=project_id)
137 | 
138 | 
139 | RelationRepositoryV2ExternalDep = Annotated[
140 |     RelationRepository, Depends(get_relation_repository_v2_external)
141 | ]
142 | 
143 | 
144 | # --- Search Repository ---
145 | 
146 | 
147 | async def get_search_repository(
148 |     session_maker: SessionMakerDep,
149 |     project_id: ProjectIdDep,
150 | ) -> SearchRepository:
151 |     """Create a backend-specific SearchRepository instance for the current project.
152 | 
153 |     Uses factory function to return SQLiteSearchRepository or PostgresSearchRepository
154 |     based on database backend configuration.
155 |     """
156 |     return create_search_repository(session_maker, project_id=project_id)
157 | 
158 | 
159 | SearchRepositoryDep = Annotated[SearchRepository, Depends(get_search_repository)]
160 | 
161 | 
162 | async def get_search_repository_v2(  # pragma: no cover
163 |     session_maker: SessionMakerDep,
164 |     project_id: ProjectIdPathDep,
165 | ) -> SearchRepository:
166 |     """Create a SearchRepository instance for v2 API."""
167 |     return create_search_repository(session_maker, project_id=project_id)
168 | 
169 | 
170 | SearchRepositoryV2Dep = Annotated[SearchRepository, Depends(get_search_repository_v2)]
171 | 
172 | 
173 | async def get_search_repository_v2_external(
174 |     session_maker: SessionMakerDep,
175 |     project_id: ProjectExternalIdPathDep,
176 | ) -> SearchRepository:
177 |     """Create a SearchRepository instance for v2 API (uses external_id)."""
178 |     return create_search_repository(session_maker, project_id=project_id)
179 | 
180 | 
181 | SearchRepositoryV2ExternalDep = Annotated[
182 |     SearchRepository, Depends(get_search_repository_v2_external)
183 | ]
184 | 
```

--------------------------------------------------------------------------------
/tests/services/test_project_removal_bug.py:
--------------------------------------------------------------------------------

```python
  1 | """Test for project removal bug #254."""
  2 | 
  3 | import os
  4 | import tempfile
  5 | from datetime import timezone, datetime
  6 | from pathlib import Path
  7 | 
  8 | import pytest
  9 | 
 10 | from basic_memory.services.project_service import ProjectService
 11 | 
 12 | 
 13 | @pytest.mark.asyncio
 14 | async def test_remove_project_with_related_entities(project_service: ProjectService):
 15 |     """Test removing a project that has related entities (reproduces issue #254).
 16 | 
 17 |     This test verifies that projects with related entities (entities, observations, relations)
 18 |     can be properly deleted without foreign key constraint violations.
 19 | 
 20 |     The bug was caused by missing foreign key constraints with CASCADE DELETE after
 21 |     the project table was recreated in migration 647e7a75e2cd.
 22 |     """
 23 |     test_project_name = f"test-remove-with-entities-{os.urandom(4).hex()}"
 24 |     with tempfile.TemporaryDirectory() as temp_dir:
 25 |         test_root = Path(temp_dir)
 26 |         test_project_path = str(test_root / "test-remove-with-entities")
 27 | 
 28 |         # Make sure the test directory exists
 29 |         os.makedirs(test_project_path, exist_ok=True)
 30 | 
 31 |         try:
 32 |             # Step 1: Add the test project
 33 |             await project_service.add_project(test_project_name, test_project_path)
 34 | 
 35 |             # Verify project exists
 36 |             project = await project_service.get_project(test_project_name)
 37 |             assert project is not None
 38 | 
 39 |             # Step 2: Create related entities for this project
 40 |             from basic_memory.repository.entity_repository import EntityRepository
 41 | 
 42 |             entity_repo = EntityRepository(
 43 |                 project_service.repository.session_maker, project_id=project.id
 44 |             )
 45 | 
 46 |             entity_data = {
 47 |                 "title": "Test Entity for Deletion",
 48 |                 "entity_type": "note",
 49 |                 "content_type": "text/markdown",
 50 |                 "project_id": project.id,
 51 |                 "permalink": "test-deletion-entity",
 52 |                 "file_path": "test-deletion-entity.md",
 53 |                 "checksum": "test123",
 54 |                 "created_at": datetime.now(timezone.utc),
 55 |                 "updated_at": datetime.now(timezone.utc),
 56 |             }
 57 |             entity = await entity_repo.create(entity_data)
 58 |             assert entity is not None
 59 | 
 60 |             # Step 3: Create observations for the entity
 61 |             from basic_memory.repository.observation_repository import ObservationRepository
 62 | 
 63 |             obs_repo = ObservationRepository(
 64 |                 project_service.repository.session_maker, project_id=project.id
 65 |             )
 66 | 
 67 |             observation_data = {
 68 |                 "entity_id": entity.id,
 69 |                 "content": "This is a test observation",
 70 |                 "category": "note",
 71 |             }
 72 |             observation = await obs_repo.create(observation_data)
 73 |             assert observation is not None
 74 | 
 75 |             # Step 4: Create relations involving the entity
 76 |             from basic_memory.repository.relation_repository import RelationRepository
 77 | 
 78 |             rel_repo = RelationRepository(
 79 |                 project_service.repository.session_maker, project_id=project.id
 80 |             )
 81 | 
 82 |             relation_data = {
 83 |                 "from_id": entity.id,
 84 |                 "to_name": "some-target-entity",
 85 |                 "relation_type": "relates-to",
 86 |             }
 87 |             relation = await rel_repo.create(relation_data)
 88 |             assert relation is not None
 89 | 
 90 |             # Step 5: Attempt to remove the project
 91 |             # This should work with proper cascade delete, or fail with foreign key constraint
 92 |             await project_service.remove_project(test_project_name)
 93 | 
 94 |             # Step 6: Verify everything was properly deleted
 95 | 
 96 |             # Project should be gone
 97 |             removed_project = await project_service.get_project(test_project_name)
 98 |             assert removed_project is None, "Project should have been removed"
 99 | 
100 |             # Related entities should be cascade deleted
101 |             remaining_entity = await entity_repo.find_by_id(entity.id)
102 |             assert remaining_entity is None, "Entity should have been cascade deleted"
103 | 
104 |             # Observations should be cascade deleted
105 |             remaining_obs = await obs_repo.find_by_id(observation.id)
106 |             assert remaining_obs is None, "Observation should have been cascade deleted"
107 | 
108 |             # Relations should be cascade deleted
109 |             remaining_rel = await rel_repo.find_by_id(relation.id)
110 |             assert remaining_rel is None, "Relation should have been cascade deleted"
111 | 
112 |         except Exception as e:
113 |             # Check if this is the specific foreign key constraint error from the bug report
114 |             if "FOREIGN KEY constraint failed" in str(e):
115 |                 pytest.fail(
116 |                     f"Bug #254 reproduced: {e}. "
117 |                     "This indicates missing foreign key constraints with CASCADE DELETE. "
118 |                     "Run migration a1b2c3d4e5f6_fix_project_foreign_keys.py to fix this."
119 |                 )
120 |             else:
121 |                 # Re-raise other unexpected errors
122 |                 raise e
123 | 
124 |         finally:
125 |             # Clean up - remove project if it still exists
126 |             if test_project_name in project_service.projects:
127 |                 try:
128 |                     await project_service.remove_project(test_project_name)
129 |                 except Exception:
130 |                     # Manual cleanup if remove_project fails
131 |                     try:
132 |                         project_service.config_manager.remove_project(test_project_name)
133 |                     except Exception:
134 |                         pass
135 | 
136 |                     project = await project_service.get_project(test_project_name)
137 |                     if project:
138 |                         await project_service.repository.delete(project.id)
139 | 
```
Page 4/27FirstPrevNextLast