#
tokens: 42721/50000 4/416 files (page 20/27)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 20 of 27. Use http://codebase.md/basicmachines-co/basic-memory?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .claude
│   ├── commands
│   │   ├── release
│   │   │   ├── beta.md
│   │   │   ├── changelog.md
│   │   │   ├── release-check.md
│   │   │   └── release.md
│   │   ├── spec.md
│   │   └── test-live.md
│   └── settings.json
├── .dockerignore
├── .env.example
├── .github
│   ├── dependabot.yml
│   ├── ISSUE_TEMPLATE
│   │   ├── bug_report.md
│   │   ├── config.yml
│   │   ├── documentation.md
│   │   └── feature_request.md
│   └── workflows
│       ├── claude-code-review.yml
│       ├── claude-issue-triage.yml
│       ├── claude.yml
│       ├── dev-release.yml
│       ├── docker.yml
│       ├── pr-title.yml
│       ├── release.yml
│       └── test.yml
├── .gitignore
├── .python-version
├── CHANGELOG.md
├── CITATION.cff
├── CLA.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── docker-compose-postgres.yml
├── docker-compose.yml
├── Dockerfile
├── docs
│   ├── ai-assistant-guide-extended.md
│   ├── ARCHITECTURE.md
│   ├── character-handling.md
│   ├── cloud-cli.md
│   ├── Docker.md
│   └── testing-coverage.md
├── justfile
├── LICENSE
├── llms-install.md
├── pyproject.toml
├── README.md
├── SECURITY.md
├── smithery.yaml
├── specs
│   ├── SPEC-1 Specification-Driven Development Process.md
│   ├── SPEC-10 Unified Deployment Workflow and Event Tracking.md
│   ├── SPEC-11 Basic Memory API Performance Optimization.md
│   ├── SPEC-12 OpenTelemetry Observability.md
│   ├── SPEC-13 CLI Authentication with Subscription Validation.md
│   ├── SPEC-14 Cloud Git Versioning & GitHub Backup.md
│   ├── SPEC-14- Cloud Git Versioning & GitHub Backup.md
│   ├── SPEC-15 Configuration Persistence via Tigris for Cloud Tenants.md
│   ├── SPEC-16 MCP Cloud Service Consolidation.md
│   ├── SPEC-17 Semantic Search with ChromaDB.md
│   ├── SPEC-18 AI Memory Management Tool.md
│   ├── SPEC-19 Sync Performance and Memory Optimization.md
│   ├── SPEC-2 Slash Commands Reference.md
│   ├── SPEC-20 Simplified Project-Scoped Rclone Sync.md
│   ├── SPEC-3 Agent Definitions.md
│   ├── SPEC-4 Notes Web UI Component Architecture.md
│   ├── SPEC-5 CLI Cloud Upload via WebDAV.md
│   ├── SPEC-6 Explicit Project Parameter Architecture.md
│   ├── SPEC-7 POC to spike Tigris Turso for local access to cloud data.md
│   ├── SPEC-8 TigrisFS Integration.md
│   ├── SPEC-9 Multi-Project Bidirectional Sync Architecture.md
│   ├── SPEC-9 Signed Header Tenant Information.md
│   └── SPEC-9-1 Follow-Ups- Conflict, Sync, and Observability.md
├── src
│   └── basic_memory
│       ├── __init__.py
│       ├── alembic
│       │   ├── alembic.ini
│       │   ├── env.py
│       │   ├── migrations.py
│       │   ├── script.py.mako
│       │   └── versions
│       │       ├── 314f1ea54dc4_add_postgres_full_text_search_support_.py
│       │       ├── 3dae7c7b1564_initial_schema.py
│       │       ├── 502b60eaa905_remove_required_from_entity_permalink.py
│       │       ├── 5fe1ab1ccebe_add_projects_table.py
│       │       ├── 647e7a75e2cd_project_constraint_fix.py
│       │       ├── 6830751f5fb6_merge_multiple_heads.py
│       │       ├── 9d9c1cb7d8f5_add_mtime_and_size_columns_to_entity_.py
│       │       ├── a1b2c3d4e5f6_fix_project_foreign_keys.py
│       │       ├── a2b3c4d5e6f7_add_search_index_entity_cascade.py
│       │       ├── b3c3938bacdb_relation_to_name_unique_index.py
│       │       ├── cc7172b46608_update_search_index_schema.py
│       │       ├── e7e1f4367280_add_scan_watermark_tracking_to_project.py
│       │       ├── f8a9b2c3d4e5_add_pg_trgm_for_fuzzy_link_resolution.py
│       │       └── g9a0b3c4d5e6_add_external_id_to_project_and_entity.py
│       ├── api
│       │   ├── __init__.py
│       │   ├── app.py
│       │   ├── container.py
│       │   ├── routers
│       │   │   ├── __init__.py
│       │   │   ├── directory_router.py
│       │   │   ├── importer_router.py
│       │   │   ├── knowledge_router.py
│       │   │   ├── management_router.py
│       │   │   ├── memory_router.py
│       │   │   ├── project_router.py
│       │   │   ├── prompt_router.py
│       │   │   ├── resource_router.py
│       │   │   ├── search_router.py
│       │   │   └── utils.py
│       │   ├── template_loader.py
│       │   └── v2
│       │       ├── __init__.py
│       │       └── routers
│       │           ├── __init__.py
│       │           ├── directory_router.py
│       │           ├── importer_router.py
│       │           ├── knowledge_router.py
│       │           ├── memory_router.py
│       │           ├── project_router.py
│       │           ├── prompt_router.py
│       │           ├── resource_router.py
│       │           └── search_router.py
│       ├── cli
│       │   ├── __init__.py
│       │   ├── app.py
│       │   ├── auth.py
│       │   ├── commands
│       │   │   ├── __init__.py
│       │   │   ├── cloud
│       │   │   │   ├── __init__.py
│       │   │   │   ├── api_client.py
│       │   │   │   ├── bisync_commands.py
│       │   │   │   ├── cloud_utils.py
│       │   │   │   ├── core_commands.py
│       │   │   │   ├── rclone_commands.py
│       │   │   │   ├── rclone_config.py
│       │   │   │   ├── rclone_installer.py
│       │   │   │   ├── upload_command.py
│       │   │   │   └── upload.py
│       │   │   ├── command_utils.py
│       │   │   ├── db.py
│       │   │   ├── format.py
│       │   │   ├── import_chatgpt.py
│       │   │   ├── import_claude_conversations.py
│       │   │   ├── import_claude_projects.py
│       │   │   ├── import_memory_json.py
│       │   │   ├── mcp.py
│       │   │   ├── project.py
│       │   │   ├── status.py
│       │   │   ├── telemetry.py
│       │   │   └── tool.py
│       │   ├── container.py
│       │   └── main.py
│       ├── config.py
│       ├── db.py
│       ├── deps
│       │   ├── __init__.py
│       │   ├── config.py
│       │   ├── db.py
│       │   ├── importers.py
│       │   ├── projects.py
│       │   ├── repositories.py
│       │   └── services.py
│       ├── deps.py
│       ├── file_utils.py
│       ├── ignore_utils.py
│       ├── importers
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── chatgpt_importer.py
│       │   ├── claude_conversations_importer.py
│       │   ├── claude_projects_importer.py
│       │   ├── memory_json_importer.py
│       │   └── utils.py
│       ├── markdown
│       │   ├── __init__.py
│       │   ├── entity_parser.py
│       │   ├── markdown_processor.py
│       │   ├── plugins.py
│       │   ├── schemas.py
│       │   └── utils.py
│       ├── mcp
│       │   ├── __init__.py
│       │   ├── async_client.py
│       │   ├── clients
│       │   │   ├── __init__.py
│       │   │   ├── directory.py
│       │   │   ├── knowledge.py
│       │   │   ├── memory.py
│       │   │   ├── project.py
│       │   │   ├── resource.py
│       │   │   └── search.py
│       │   ├── container.py
│       │   ├── project_context.py
│       │   ├── prompts
│       │   │   ├── __init__.py
│       │   │   ├── ai_assistant_guide.py
│       │   │   ├── continue_conversation.py
│       │   │   ├── recent_activity.py
│       │   │   ├── search.py
│       │   │   └── utils.py
│       │   ├── resources
│       │   │   ├── ai_assistant_guide.md
│       │   │   └── project_info.py
│       │   ├── server.py
│       │   └── tools
│       │       ├── __init__.py
│       │       ├── build_context.py
│       │       ├── canvas.py
│       │       ├── chatgpt_tools.py
│       │       ├── delete_note.py
│       │       ├── edit_note.py
│       │       ├── list_directory.py
│       │       ├── move_note.py
│       │       ├── project_management.py
│       │       ├── read_content.py
│       │       ├── read_note.py
│       │       ├── recent_activity.py
│       │       ├── search.py
│       │       ├── utils.py
│       │       ├── view_note.py
│       │       └── write_note.py
│       ├── models
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── knowledge.py
│       │   ├── project.py
│       │   └── search.py
│       ├── project_resolver.py
│       ├── repository
│       │   ├── __init__.py
│       │   ├── entity_repository.py
│       │   ├── observation_repository.py
│       │   ├── postgres_search_repository.py
│       │   ├── project_info_repository.py
│       │   ├── project_repository.py
│       │   ├── relation_repository.py
│       │   ├── repository.py
│       │   ├── search_index_row.py
│       │   ├── search_repository_base.py
│       │   ├── search_repository.py
│       │   └── sqlite_search_repository.py
│       ├── runtime.py
│       ├── schemas
│       │   ├── __init__.py
│       │   ├── base.py
│       │   ├── cloud.py
│       │   ├── delete.py
│       │   ├── directory.py
│       │   ├── importer.py
│       │   ├── memory.py
│       │   ├── project_info.py
│       │   ├── prompt.py
│       │   ├── request.py
│       │   ├── response.py
│       │   ├── search.py
│       │   ├── sync_report.py
│       │   └── v2
│       │       ├── __init__.py
│       │       ├── entity.py
│       │       └── resource.py
│       ├── services
│       │   ├── __init__.py
│       │   ├── context_service.py
│       │   ├── directory_service.py
│       │   ├── entity_service.py
│       │   ├── exceptions.py
│       │   ├── file_service.py
│       │   ├── initialization.py
│       │   ├── link_resolver.py
│       │   ├── project_service.py
│       │   ├── search_service.py
│       │   └── service.py
│       ├── sync
│       │   ├── __init__.py
│       │   ├── background_sync.py
│       │   ├── coordinator.py
│       │   ├── sync_service.py
│       │   └── watch_service.py
│       ├── telemetry.py
│       ├── templates
│       │   └── prompts
│       │       ├── continue_conversation.hbs
│       │       └── search.hbs
│       └── utils.py
├── test-int
│   ├── BENCHMARKS.md
│   ├── cli
│   │   ├── test_project_commands_integration.py
│   │   └── test_version_integration.py
│   ├── conftest.py
│   ├── mcp
│   │   ├── test_build_context_underscore.py
│   │   ├── test_build_context_validation.py
│   │   ├── test_chatgpt_tools_integration.py
│   │   ├── test_default_project_mode_integration.py
│   │   ├── test_delete_note_integration.py
│   │   ├── test_edit_note_integration.py
│   │   ├── test_lifespan_shutdown_sync_task_cancellation_integration.py
│   │   ├── test_list_directory_integration.py
│   │   ├── test_move_note_integration.py
│   │   ├── test_project_management_integration.py
│   │   ├── test_project_state_sync_integration.py
│   │   ├── test_read_content_integration.py
│   │   ├── test_read_note_integration.py
│   │   ├── test_search_integration.py
│   │   ├── test_single_project_mcp_integration.py
│   │   └── test_write_note_integration.py
│   ├── test_db_wal_mode.py
│   └── test_disable_permalinks_integration.py
├── tests
│   ├── __init__.py
│   ├── api
│   │   ├── conftest.py
│   │   ├── test_api_container.py
│   │   ├── test_async_client.py
│   │   ├── test_continue_conversation_template.py
│   │   ├── test_directory_router.py
│   │   ├── test_importer_router.py
│   │   ├── test_knowledge_router.py
│   │   ├── test_management_router.py
│   │   ├── test_memory_router.py
│   │   ├── test_project_router_operations.py
│   │   ├── test_project_router.py
│   │   ├── test_prompt_router.py
│   │   ├── test_relation_background_resolution.py
│   │   ├── test_resource_router.py
│   │   ├── test_search_router.py
│   │   ├── test_search_template.py
│   │   ├── test_template_loader_helpers.py
│   │   ├── test_template_loader.py
│   │   └── v2
│   │       ├── __init__.py
│   │       ├── conftest.py
│   │       ├── test_directory_router.py
│   │       ├── test_importer_router.py
│   │       ├── test_knowledge_router.py
│   │       ├── test_memory_router.py
│   │       ├── test_project_router.py
│   │       ├── test_prompt_router.py
│   │       ├── test_resource_router.py
│   │       └── test_search_router.py
│   ├── cli
│   │   ├── cloud
│   │   │   ├── test_cloud_api_client_and_utils.py
│   │   │   ├── test_rclone_config_and_bmignore_filters.py
│   │   │   └── test_upload_path.py
│   │   ├── conftest.py
│   │   ├── test_auth_cli_auth.py
│   │   ├── test_cli_container.py
│   │   ├── test_cli_exit.py
│   │   ├── test_cli_tool_exit.py
│   │   ├── test_cli_tools.py
│   │   ├── test_cloud_authentication.py
│   │   ├── test_ignore_utils.py
│   │   ├── test_import_chatgpt.py
│   │   ├── test_import_claude_conversations.py
│   │   ├── test_import_claude_projects.py
│   │   ├── test_import_memory_json.py
│   │   ├── test_project_add_with_local_path.py
│   │   └── test_upload.py
│   ├── conftest.py
│   ├── db
│   │   └── test_issue_254_foreign_key_constraints.py
│   ├── importers
│   │   ├── test_conversation_indexing.py
│   │   ├── test_importer_base.py
│   │   └── test_importer_utils.py
│   ├── markdown
│   │   ├── __init__.py
│   │   ├── test_date_frontmatter_parsing.py
│   │   ├── test_entity_parser_error_handling.py
│   │   ├── test_entity_parser.py
│   │   ├── test_markdown_plugins.py
│   │   ├── test_markdown_processor.py
│   │   ├── test_observation_edge_cases.py
│   │   ├── test_parser_edge_cases.py
│   │   ├── test_relation_edge_cases.py
│   │   └── test_task_detection.py
│   ├── mcp
│   │   ├── clients
│   │   │   ├── __init__.py
│   │   │   └── test_clients.py
│   │   ├── conftest.py
│   │   ├── test_async_client_modes.py
│   │   ├── test_mcp_container.py
│   │   ├── test_obsidian_yaml_formatting.py
│   │   ├── test_permalink_collision_file_overwrite.py
│   │   ├── test_project_context.py
│   │   ├── test_prompts.py
│   │   ├── test_recent_activity_prompt_modes.py
│   │   ├── test_resources.py
│   │   ├── test_server_lifespan_branches.py
│   │   ├── test_tool_build_context.py
│   │   ├── test_tool_canvas.py
│   │   ├── test_tool_delete_note.py
│   │   ├── test_tool_edit_note.py
│   │   ├── test_tool_list_directory.py
│   │   ├── test_tool_move_note.py
│   │   ├── test_tool_project_management.py
│   │   ├── test_tool_read_content.py
│   │   ├── test_tool_read_note.py
│   │   ├── test_tool_recent_activity.py
│   │   ├── test_tool_resource.py
│   │   ├── test_tool_search.py
│   │   ├── test_tool_utils.py
│   │   ├── test_tool_view_note.py
│   │   ├── test_tool_write_note_kebab_filenames.py
│   │   ├── test_tool_write_note.py
│   │   └── tools
│   │       └── test_chatgpt_tools.py
│   ├── Non-MarkdownFileSupport.pdf
│   ├── README.md
│   ├── repository
│   │   ├── test_entity_repository_upsert.py
│   │   ├── test_entity_repository.py
│   │   ├── test_entity_upsert_issue_187.py
│   │   ├── test_observation_repository.py
│   │   ├── test_postgres_search_repository.py
│   │   ├── test_project_info_repository.py
│   │   ├── test_project_repository.py
│   │   ├── test_relation_repository.py
│   │   ├── test_repository.py
│   │   ├── test_search_repository_edit_bug_fix.py
│   │   └── test_search_repository.py
│   ├── schemas
│   │   ├── test_base_timeframe_minimum.py
│   │   ├── test_memory_serialization.py
│   │   ├── test_memory_url_validation.py
│   │   ├── test_memory_url.py
│   │   ├── test_relation_response_reference_resolution.py
│   │   ├── test_schemas.py
│   │   └── test_search.py
│   ├── Screenshot.png
│   ├── services
│   │   ├── test_context_service.py
│   │   ├── test_directory_service.py
│   │   ├── test_entity_service_disable_permalinks.py
│   │   ├── test_entity_service.py
│   │   ├── test_file_service.py
│   │   ├── test_initialization_cloud_mode_branches.py
│   │   ├── test_initialization.py
│   │   ├── test_link_resolver.py
│   │   ├── test_project_removal_bug.py
│   │   ├── test_project_service_operations.py
│   │   ├── test_project_service.py
│   │   └── test_search_service.py
│   ├── sync
│   │   ├── test_character_conflicts.py
│   │   ├── test_coordinator.py
│   │   ├── test_sync_service_incremental.py
│   │   ├── test_sync_service.py
│   │   ├── test_sync_wikilink_issue.py
│   │   ├── test_tmp_files.py
│   │   ├── test_watch_service_atomic_adds.py
│   │   ├── test_watch_service_edge_cases.py
│   │   ├── test_watch_service_reload.py
│   │   └── test_watch_service.py
│   ├── test_config.py
│   ├── test_deps.py
│   ├── test_production_cascade_delete.py
│   ├── test_project_resolver.py
│   ├── test_rclone_commands.py
│   ├── test_runtime.py
│   ├── test_telemetry.py
│   └── utils
│       ├── test_file_utils.py
│       ├── test_frontmatter_obsidian_compatible.py
│       ├── test_parse_tags.py
│       ├── test_permalink_formatting.py
│       ├── test_timezone_utils.py
│       ├── test_utf8_handling.py
│       └── test_validate_project_path.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/src/basic_memory/cli/commands/project.py:
--------------------------------------------------------------------------------

```python
  1 | """Command module for basic-memory project management."""
  2 | 
  3 | import os
  4 | from pathlib import Path
  5 | 
  6 | import typer
  7 | from rich.console import Console
  8 | from rich.table import Table
  9 | 
 10 | from basic_memory.cli.app import app
 11 | from basic_memory.cli.commands.command_utils import get_project_info, run_with_cleanup
 12 | from basic_memory.config import ConfigManager
 13 | import json
 14 | from datetime import datetime
 15 | 
 16 | from rich.panel import Panel
 17 | from basic_memory.mcp.async_client import get_client
 18 | from basic_memory.mcp.tools.utils import call_get, call_post, call_delete, call_put, call_patch
 19 | from basic_memory.schemas.project_info import ProjectList, ProjectStatusResponse
 20 | from basic_memory.utils import generate_permalink, normalize_project_path
 21 | 
 22 | # Import rclone commands for project sync
 23 | from basic_memory.cli.commands.cloud.rclone_commands import (
 24 |     SyncProject,
 25 |     RcloneError,
 26 |     project_sync,
 27 |     project_bisync,
 28 |     project_check,
 29 |     project_ls,
 30 | )
 31 | from basic_memory.cli.commands.cloud.bisync_commands import get_mount_info
 32 | 
 33 | console = Console()
 34 | 
 35 | # Create a project subcommand
 36 | project_app = typer.Typer(help="Manage multiple Basic Memory projects")
 37 | app.add_typer(project_app, name="project")
 38 | 
 39 | 
 40 | def format_path(path: str) -> str:
 41 |     """Format a path for display, using ~ for home directory."""
 42 |     home = str(Path.home())
 43 |     if path.startswith(home):
 44 |         return path.replace(home, "~", 1)  # pragma: no cover
 45 |     return path
 46 | 
 47 | 
 48 | @project_app.command("list")
 49 | def list_projects() -> None:
 50 |     """List all Basic Memory projects."""
 51 | 
 52 |     async def _list_projects():
 53 |         async with get_client() as client:
 54 |             response = await call_get(client, "/projects/projects")
 55 |             return ProjectList.model_validate(response.json())
 56 | 
 57 |     try:
 58 |         result = run_with_cleanup(_list_projects())
 59 |         config = ConfigManager().config
 60 | 
 61 |         table = Table(title="Basic Memory Projects")
 62 |         table.add_column("Name", style="cyan")
 63 |         table.add_column("Path", style="green")
 64 | 
 65 |         # Add Local Path column if in cloud mode
 66 |         if config.cloud_mode_enabled:
 67 |             table.add_column("Local Path", style="yellow", no_wrap=True, overflow="fold")
 68 | 
 69 |         # Show Default column in local mode or if default_project_mode is enabled in cloud mode
 70 |         show_default_column = not config.cloud_mode_enabled or config.default_project_mode
 71 |         if show_default_column:
 72 |             table.add_column("Default", style="magenta")
 73 | 
 74 |         for project in result.projects:
 75 |             is_default = "[X]" if project.is_default else ""
 76 |             normalized_path = normalize_project_path(project.path)
 77 | 
 78 |             # Build row based on mode
 79 |             row = [project.name, format_path(normalized_path)]
 80 | 
 81 |             # Add local path if in cloud mode
 82 |             if config.cloud_mode_enabled:
 83 |                 local_path = ""
 84 |                 if project.name in config.cloud_projects:
 85 |                     local_path = config.cloud_projects[project.name].local_path or ""
 86 |                     local_path = format_path(local_path)
 87 |                 row.append(local_path)
 88 | 
 89 |             # Add default indicator if showing default column
 90 |             if show_default_column:
 91 |                 row.append(is_default)
 92 | 
 93 |             table.add_row(*row)
 94 | 
 95 |         console.print(table)
 96 |     except Exception as e:
 97 |         console.print(f"[red]Error listing projects: {str(e)}[/red]")
 98 |         raise typer.Exit(1)
 99 | 
100 | 
101 | @project_app.command("add")
102 | def add_project(
103 |     name: str = typer.Argument(..., help="Name of the project"),
104 |     path: str = typer.Argument(
105 |         None, help="Path to the project directory (required for local mode)"
106 |     ),
107 |     local_path: str = typer.Option(
108 |         None, "--local-path", help="Local sync path for cloud mode (optional)"
109 |     ),
110 |     set_default: bool = typer.Option(False, "--default", help="Set as default project"),
111 | ) -> None:
112 |     """Add a new project.
113 | 
114 |     Cloud mode examples:\n
115 |         bm project add research                           # No local sync\n
116 |         bm project add research --local-path ~/docs       # With local sync\n
117 | 
118 |     Local mode example:\n
119 |         bm project add research ~/Documents/research
120 |     """
121 |     config = ConfigManager().config
122 | 
123 |     # Resolve local sync path early (needed for both cloud and local mode)
124 |     local_sync_path: str | None = None
125 |     if local_path:
126 |         local_sync_path = Path(os.path.abspath(os.path.expanduser(local_path))).as_posix()
127 | 
128 |     if config.cloud_mode_enabled:
129 |         # Cloud mode: path auto-generated from name, local sync is optional
130 | 
131 |         async def _add_project():
132 |             async with get_client() as client:
133 |                 data = {
134 |                     "name": name,
135 |                     "path": generate_permalink(name),
136 |                     "local_sync_path": local_sync_path,
137 |                     "set_default": set_default,
138 |                 }
139 |                 response = await call_post(client, "/projects/projects", json=data)
140 |                 return ProjectStatusResponse.model_validate(response.json())
141 |     else:
142 |         # Local mode: path is required
143 |         if path is None:
144 |             console.print("[red]Error: path argument is required in local mode[/red]")
145 |             raise typer.Exit(1)
146 | 
147 |         # Resolve to absolute path
148 |         resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
149 | 
150 |         async def _add_project():
151 |             async with get_client() as client:
152 |                 data = {"name": name, "path": resolved_path, "set_default": set_default}
153 |                 response = await call_post(client, "/projects/projects", json=data)
154 |                 return ProjectStatusResponse.model_validate(response.json())
155 | 
156 |     try:
157 |         result = run_with_cleanup(_add_project())
158 |         console.print(f"[green]{result.message}[/green]")
159 | 
160 |         # Save local sync path to config if in cloud mode
161 |         if config.cloud_mode_enabled and local_sync_path:
162 |             from basic_memory.config import CloudProjectConfig
163 | 
164 |             # Create local directory if it doesn't exist
165 |             local_dir = Path(local_sync_path)
166 |             local_dir.mkdir(parents=True, exist_ok=True)
167 | 
168 |             # Update config with sync path
169 |             config.cloud_projects[name] = CloudProjectConfig(
170 |                 local_path=local_sync_path,
171 |                 last_sync=None,
172 |                 bisync_initialized=False,
173 |             )
174 |             ConfigManager().save_config(config)
175 | 
176 |             console.print(f"\n[green]Local sync path configured: {local_sync_path}[/green]")
177 |             console.print("\nNext steps:")
178 |             console.print(f"  1. Preview: bm project bisync --name {name} --resync --dry-run")
179 |             console.print(f"  2. Sync: bm project bisync --name {name} --resync")
180 |     except Exception as e:
181 |         console.print(f"[red]Error adding project: {str(e)}[/red]")
182 |         raise typer.Exit(1)
183 | 
184 | 
185 | @project_app.command("sync-setup")
186 | def setup_project_sync(
187 |     name: str = typer.Argument(..., help="Project name"),
188 |     local_path: str = typer.Argument(..., help="Local sync directory"),
189 | ) -> None:
190 |     """Configure local sync for an existing cloud project.
191 | 
192 |     Example:
193 |       bm project sync-setup research ~/Documents/research
194 |     """
195 |     config_manager = ConfigManager()
196 |     config = config_manager.config
197 | 
198 |     if not config.cloud_mode_enabled:
199 |         console.print("[red]Error: sync-setup only available in cloud mode[/red]")
200 |         raise typer.Exit(1)
201 | 
202 |     async def _verify_project_exists():
203 |         """Verify the project exists on cloud by listing all projects."""
204 |         async with get_client() as client:
205 |             response = await call_get(client, "/projects/projects")
206 |             project_list = response.json()
207 |             project_names = [p["name"] for p in project_list["projects"]]
208 |             if name not in project_names:
209 |                 raise ValueError(f"Project '{name}' not found on cloud")
210 |             return True
211 | 
212 |     try:
213 |         # Verify project exists on cloud
214 |         run_with_cleanup(_verify_project_exists())
215 | 
216 |         # Resolve and create local path
217 |         resolved_path = Path(os.path.abspath(os.path.expanduser(local_path)))
218 |         resolved_path.mkdir(parents=True, exist_ok=True)
219 | 
220 |         # Update local config with sync path
221 |         from basic_memory.config import CloudProjectConfig
222 | 
223 |         config.cloud_projects[name] = CloudProjectConfig(
224 |             local_path=resolved_path.as_posix(),
225 |             last_sync=None,
226 |             bisync_initialized=False,
227 |         )
228 |         config_manager.save_config(config)
229 | 
230 |         console.print(f"[green]Sync configured for project '{name}'[/green]")
231 |         console.print(f"\nLocal sync path: {resolved_path}")
232 |         console.print("\nNext steps:")
233 |         console.print(f"  1. Preview: bm project bisync --name {name} --resync --dry-run")
234 |         console.print(f"  2. Sync: bm project bisync --name {name} --resync")
235 |     except Exception as e:
236 |         console.print(f"[red]Error configuring sync: {str(e)}[/red]")
237 |         raise typer.Exit(1)
238 | 
239 | 
240 | @project_app.command("remove")
241 | def remove_project(
242 |     name: str = typer.Argument(..., help="Name of the project to remove"),
243 |     delete_notes: bool = typer.Option(
244 |         False, "--delete-notes", help="Delete project files from disk"
245 |     ),
246 | ) -> None:
247 |     """Remove a project."""
248 | 
249 |     async def _remove_project():
250 |         async with get_client() as client:
251 |             # Convert name to permalink for efficient resolution
252 |             project_permalink = generate_permalink(name)
253 | 
254 |             # Use v2 project resolver to find project ID by permalink
255 |             resolve_data = {"identifier": project_permalink}
256 |             response = await call_post(client, "/v2/projects/resolve", json=resolve_data)
257 |             target_project = response.json()
258 | 
259 |             # Use v2 API with project ID
260 |             response = await call_delete(
261 |                 client, f"/v2/projects/{target_project['external_id']}?delete_notes={delete_notes}"
262 |             )
263 |             return ProjectStatusResponse.model_validate(response.json())
264 | 
265 |     try:
266 |         # Get config to check for local sync path and bisync state
267 |         config = ConfigManager().config
268 |         local_path = None
269 |         has_bisync_state = False
270 | 
271 |         if config.cloud_mode_enabled and name in config.cloud_projects:
272 |             local_path = config.cloud_projects[name].local_path
273 | 
274 |             # Check for bisync state
275 |             from basic_memory.cli.commands.cloud.rclone_commands import get_project_bisync_state
276 | 
277 |             bisync_state_path = get_project_bisync_state(name)
278 |             has_bisync_state = bisync_state_path.exists()
279 | 
280 |         # Remove project from cloud/API
281 |         result = run_with_cleanup(_remove_project())
282 |         console.print(f"[green]{result.message}[/green]")
283 | 
284 |         # Clean up local sync directory if it exists and delete_notes is True
285 |         if delete_notes and local_path:
286 |             local_dir = Path(local_path)
287 |             if local_dir.exists():
288 |                 import shutil
289 | 
290 |                 shutil.rmtree(local_dir)
291 |                 console.print(f"[green]Removed local sync directory: {local_path}[/green]")
292 | 
293 |         # Clean up bisync state if it exists
294 |         if has_bisync_state:
295 |             from basic_memory.cli.commands.cloud.rclone_commands import get_project_bisync_state
296 |             import shutil
297 | 
298 |             bisync_state_path = get_project_bisync_state(name)
299 |             if bisync_state_path.exists():
300 |                 shutil.rmtree(bisync_state_path)
301 |                 console.print("[green]Removed bisync state[/green]")
302 | 
303 |         # Clean up cloud_projects config entry
304 |         if config.cloud_mode_enabled and name in config.cloud_projects:
305 |             del config.cloud_projects[name]
306 |             ConfigManager().save_config(config)
307 | 
308 |         # Show informative message if files were not deleted
309 |         if not delete_notes:
310 |             if local_path:
311 |                 console.print(f"[yellow]Note: Local files remain at {local_path}[/yellow]")
312 | 
313 |     except Exception as e:
314 |         console.print(f"[red]Error removing project: {str(e)}[/red]")
315 |         raise typer.Exit(1)
316 | 
317 | 
318 | @project_app.command("default")
319 | def set_default_project(
320 |     name: str = typer.Argument(..., help="Name of the project to set as CLI default"),
321 | ) -> None:
322 |     """Set the default project when 'config.default_project_mode' is set.
323 | 
324 |     Note: This command is only available in local mode.
325 |     """
326 |     config = ConfigManager().config
327 | 
328 |     if config.cloud_mode_enabled:
329 |         console.print("[red]Error: 'default' command is not available in cloud mode[/red]")
330 |         raise typer.Exit(1)
331 | 
332 |     async def _set_default():
333 |         async with get_client() as client:
334 |             # Convert name to permalink for efficient resolution
335 |             project_permalink = generate_permalink(name)
336 | 
337 |             # Use v2 project resolver to find project ID by permalink
338 |             resolve_data = {"identifier": project_permalink}
339 |             response = await call_post(client, "/v2/projects/resolve", json=resolve_data)
340 |             target_project = response.json()
341 | 
342 |             # Use v2 API with project ID
343 |             response = await call_put(
344 |                 client, f"/v2/projects/{target_project['external_id']}/default"
345 |             )
346 |             return ProjectStatusResponse.model_validate(response.json())
347 | 
348 |     try:
349 |         result = run_with_cleanup(_set_default())
350 |         console.print(f"[green]{result.message}[/green]")
351 |     except Exception as e:
352 |         console.print(f"[red]Error setting default project: {str(e)}[/red]")
353 |         raise typer.Exit(1)
354 | 
355 | 
356 | @project_app.command("sync-config")
357 | def synchronize_projects() -> None:
358 |     """Synchronize project config between configuration file and database.
359 | 
360 |     Note: This command is only available in local mode.
361 |     """
362 |     config = ConfigManager().config
363 | 
364 |     if config.cloud_mode_enabled:
365 |         console.print("[red]Error: 'sync-config' command is not available in cloud mode[/red]")
366 |         raise typer.Exit(1)
367 | 
368 |     async def _sync_config():
369 |         async with get_client() as client:
370 |             response = await call_post(client, "/projects/config/sync")
371 |             return ProjectStatusResponse.model_validate(response.json())
372 | 
373 |     try:
374 |         result = run_with_cleanup(_sync_config())
375 |         console.print(f"[green]{result.message}[/green]")
376 |     except Exception as e:  # pragma: no cover
377 |         console.print(f"[red]Error synchronizing projects: {str(e)}[/red]")
378 |         raise typer.Exit(1)
379 | 
380 | 
381 | @project_app.command("move")
382 | def move_project(
383 |     name: str = typer.Argument(..., help="Name of the project to move"),
384 |     new_path: str = typer.Argument(..., help="New absolute path for the project"),
385 | ) -> None:
386 |     """Move a project to a new location.
387 | 
388 |     Note: This command is only available in local mode.
389 |     """
390 |     config = ConfigManager().config
391 | 
392 |     if config.cloud_mode_enabled:
393 |         console.print("[red]Error: 'move' command is not available in cloud mode[/red]")
394 |         raise typer.Exit(1)
395 | 
396 |     # Resolve to absolute path
397 |     resolved_path = Path(os.path.abspath(os.path.expanduser(new_path))).as_posix()
398 | 
399 |     async def _move_project():
400 |         async with get_client() as client:
401 |             data = {"path": resolved_path}
402 |             project_permalink = generate_permalink(name)
403 | 
404 |             # TODO fix route to use ProjectPathDep
405 |             response = await call_patch(client, f"/{name}/project/{project_permalink}", json=data)
406 |             return ProjectStatusResponse.model_validate(response.json())
407 | 
408 |     try:
409 |         result = run_with_cleanup(_move_project())
410 |         console.print(f"[green]{result.message}[/green]")
411 | 
412 |         # Show important file movement reminder
413 |         console.print()  # Empty line for spacing
414 |         console.print(
415 |             Panel(
416 |                 "[bold red]IMPORTANT:[/bold red] Project configuration updated successfully.\n\n"
417 |                 "[yellow]You must manually move your project files from the old location to:[/yellow]\n"
418 |                 f"[cyan]{resolved_path}[/cyan]\n\n"
419 |                 "[dim]Basic Memory has only updated the configuration - your files remain in their original location.[/dim]",
420 |                 title="Manual File Movement Required",
421 |                 border_style="yellow",
422 |                 expand=False,
423 |             )
424 |         )
425 | 
426 |     except Exception as e:
427 |         console.print(f"[red]Error moving project: {str(e)}[/red]")
428 |         raise typer.Exit(1)
429 | 
430 | 
431 | @project_app.command("sync")
432 | def sync_project_command(
433 |     name: str = typer.Option(..., "--name", help="Project name to sync"),
434 |     dry_run: bool = typer.Option(False, "--dry-run", help="Preview changes without syncing"),
435 |     verbose: bool = typer.Option(False, "--verbose", "-v", help="Show detailed output"),
436 | ) -> None:
437 |     """One-way sync: local -> cloud (make cloud identical to local).
438 | 
439 |     Example:
440 |       bm project sync --name research
441 |       bm project sync --name research --dry-run
442 |     """
443 |     config = ConfigManager().config
444 |     if not config.cloud_mode_enabled:
445 |         console.print("[red]Error: sync only available in cloud mode[/red]")
446 |         raise typer.Exit(1)
447 | 
448 |     try:
449 |         # Get tenant info for bucket name
450 |         tenant_info = run_with_cleanup(get_mount_info())
451 |         bucket_name = tenant_info.bucket_name
452 | 
453 |         # Get project info
454 |         async def _get_project():
455 |             async with get_client() as client:
456 |                 response = await call_get(client, "/projects/projects")
457 |                 projects_list = ProjectList.model_validate(response.json())
458 |                 for proj in projects_list.projects:
459 |                     if generate_permalink(proj.name) == generate_permalink(name):
460 |                         return proj
461 |                 return None
462 | 
463 |         project_data = run_with_cleanup(_get_project())
464 |         if not project_data:
465 |             console.print(f"[red]Error: Project '{name}' not found[/red]")
466 |             raise typer.Exit(1)
467 | 
468 |         # Get local_sync_path from cloud_projects config
469 |         local_sync_path = None
470 |         if name in config.cloud_projects:
471 |             local_sync_path = config.cloud_projects[name].local_path
472 | 
473 |         if not local_sync_path:
474 |             console.print(f"[red]Error: Project '{name}' has no local_sync_path configured[/red]")
475 |             console.print(f"\nConfigure sync with: bm project sync-setup {name} ~/path/to/local")
476 |             raise typer.Exit(1)
477 | 
478 |         # Create SyncProject
479 |         sync_project = SyncProject(
480 |             name=project_data.name,
481 |             path=normalize_project_path(project_data.path),
482 |             local_sync_path=local_sync_path,
483 |         )
484 | 
485 |         # Run sync
486 |         console.print(f"[blue]Syncing {name} (local -> cloud)...[/blue]")
487 |         success = project_sync(sync_project, bucket_name, dry_run=dry_run, verbose=verbose)
488 | 
489 |         if success:
490 |             console.print(f"[green]{name} synced successfully[/green]")
491 | 
492 |             # Trigger database sync if not a dry run
493 |             if not dry_run:
494 | 
495 |                 async def _trigger_db_sync():
496 |                     async with get_client() as client:
497 |                         permalink = generate_permalink(name)
498 |                         response = await call_post(
499 |                             client, f"/{permalink}/project/sync?force_full=true", json={}
500 |                         )
501 |                         return response.json()
502 | 
503 |                 try:
504 |                     result = run_with_cleanup(_trigger_db_sync())
505 |                     console.print(f"[dim]Database sync initiated: {result.get('message')}[/dim]")
506 |                 except Exception as e:
507 |                     console.print(f"[yellow]Warning: Could not trigger database sync: {e}[/yellow]")
508 |         else:
509 |             console.print(f"[red]{name} sync failed[/red]")
510 |             raise typer.Exit(1)
511 | 
512 |     except RcloneError as e:
513 |         console.print(f"[red]Sync error: {e}[/red]")
514 |         raise typer.Exit(1)
515 |     except Exception as e:
516 |         console.print(f"[red]Error: {e}[/red]")
517 |         raise typer.Exit(1)
518 | 
519 | 
520 | @project_app.command("bisync")
521 | def bisync_project_command(
522 |     name: str = typer.Option(..., "--name", help="Project name to bisync"),
523 |     dry_run: bool = typer.Option(False, "--dry-run", help="Preview changes without syncing"),
524 |     resync: bool = typer.Option(False, "--resync", help="Force new baseline"),
525 |     verbose: bool = typer.Option(False, "--verbose", "-v", help="Show detailed output"),
526 | ) -> None:
527 |     """Two-way sync: local <-> cloud (bidirectional sync).
528 | 
529 |     Examples:
530 |       bm project bisync --name research --resync  # First time
531 |       bm project bisync --name research           # Subsequent syncs
532 |       bm project bisync --name research --dry-run # Preview changes
533 |     """
534 |     config = ConfigManager().config
535 |     if not config.cloud_mode_enabled:
536 |         console.print("[red]Error: bisync only available in cloud mode[/red]")
537 |         raise typer.Exit(1)
538 | 
539 |     try:
540 |         # Get tenant info for bucket name
541 |         tenant_info = run_with_cleanup(get_mount_info())
542 |         bucket_name = tenant_info.bucket_name
543 | 
544 |         # Get project info
545 |         async def _get_project():
546 |             async with get_client() as client:
547 |                 response = await call_get(client, "/projects/projects")
548 |                 projects_list = ProjectList.model_validate(response.json())
549 |                 for proj in projects_list.projects:
550 |                     if generate_permalink(proj.name) == generate_permalink(name):
551 |                         return proj
552 |                 return None
553 | 
554 |         project_data = run_with_cleanup(_get_project())
555 |         if not project_data:
556 |             console.print(f"[red]Error: Project '{name}' not found[/red]")
557 |             raise typer.Exit(1)
558 | 
559 |         # Get local_sync_path from cloud_projects config
560 |         local_sync_path = None
561 |         if name in config.cloud_projects:
562 |             local_sync_path = config.cloud_projects[name].local_path
563 | 
564 |         if not local_sync_path:
565 |             console.print(f"[red]Error: Project '{name}' has no local_sync_path configured[/red]")
566 |             console.print(f"\nConfigure sync with: bm project sync-setup {name} ~/path/to/local")
567 |             raise typer.Exit(1)
568 | 
569 |         # Create SyncProject
570 |         sync_project = SyncProject(
571 |             name=project_data.name,
572 |             path=normalize_project_path(project_data.path),
573 |             local_sync_path=local_sync_path,
574 |         )
575 | 
576 |         # Run bisync
577 |         console.print(f"[blue]Bisync {name} (local <-> cloud)...[/blue]")
578 |         success = project_bisync(
579 |             sync_project, bucket_name, dry_run=dry_run, resync=resync, verbose=verbose
580 |         )
581 | 
582 |         if success:
583 |             console.print(f"[green]{name} bisync completed successfully[/green]")
584 | 
585 |             # Update config
586 |             config.cloud_projects[name].last_sync = datetime.now()
587 |             config.cloud_projects[name].bisync_initialized = True
588 |             ConfigManager().save_config(config)
589 | 
590 |             # Trigger database sync if not a dry run
591 |             if not dry_run:
592 | 
593 |                 async def _trigger_db_sync():
594 |                     async with get_client() as client:
595 |                         permalink = generate_permalink(name)
596 |                         response = await call_post(
597 |                             client, f"/{permalink}/project/sync?force_full=true", json={}
598 |                         )
599 |                         return response.json()
600 | 
601 |                 try:
602 |                     result = run_with_cleanup(_trigger_db_sync())
603 |                     console.print(f"[dim]Database sync initiated: {result.get('message')}[/dim]")
604 |                 except Exception as e:
605 |                     console.print(f"[yellow]Warning: Could not trigger database sync: {e}[/yellow]")
606 |         else:
607 |             console.print(f"[red]{name} bisync failed[/red]")
608 |             raise typer.Exit(1)
609 | 
610 |     except RcloneError as e:
611 |         console.print(f"[red]Bisync error: {e}[/red]")
612 |         raise typer.Exit(1)
613 |     except Exception as e:
614 |         console.print(f"[red]Error: {e}[/red]")
615 |         raise typer.Exit(1)
616 | 
617 | 
618 | @project_app.command("check")
619 | def check_project_command(
620 |     name: str = typer.Option(..., "--name", help="Project name to check"),
621 |     one_way: bool = typer.Option(False, "--one-way", help="Check one direction only (faster)"),
622 | ) -> None:
623 |     """Verify file integrity between local and cloud.
624 | 
625 |     Example:
626 |       bm project check --name research
627 |     """
628 |     config = ConfigManager().config
629 |     if not config.cloud_mode_enabled:
630 |         console.print("[red]Error: check only available in cloud mode[/red]")
631 |         raise typer.Exit(1)
632 | 
633 |     try:
634 |         # Get tenant info for bucket name
635 |         tenant_info = run_with_cleanup(get_mount_info())
636 |         bucket_name = tenant_info.bucket_name
637 | 
638 |         # Get project info
639 |         async def _get_project():
640 |             async with get_client() as client:
641 |                 response = await call_get(client, "/projects/projects")
642 |                 projects_list = ProjectList.model_validate(response.json())
643 |                 for proj in projects_list.projects:
644 |                     if generate_permalink(proj.name) == generate_permalink(name):
645 |                         return proj
646 |                 return None
647 | 
648 |         project_data = run_with_cleanup(_get_project())
649 |         if not project_data:
650 |             console.print(f"[red]Error: Project '{name}' not found[/red]")
651 |             raise typer.Exit(1)
652 | 
653 |         # Get local_sync_path from cloud_projects config
654 |         local_sync_path = None
655 |         if name in config.cloud_projects:
656 |             local_sync_path = config.cloud_projects[name].local_path
657 | 
658 |         if not local_sync_path:
659 |             console.print(f"[red]Error: Project '{name}' has no local_sync_path configured[/red]")
660 |             console.print(f"\nConfigure sync with: bm project sync-setup {name} ~/path/to/local")
661 |             raise typer.Exit(1)
662 | 
663 |         # Create SyncProject
664 |         sync_project = SyncProject(
665 |             name=project_data.name,
666 |             path=normalize_project_path(project_data.path),
667 |             local_sync_path=local_sync_path,
668 |         )
669 | 
670 |         # Run check
671 |         console.print(f"[blue]Checking {name} integrity...[/blue]")
672 |         match = project_check(sync_project, bucket_name, one_way=one_way)
673 | 
674 |         if match:
675 |             console.print(f"[green]{name} files match[/green]")
676 |         else:
677 |             console.print(f"[yellow]!{name} has differences[/yellow]")
678 | 
679 |     except RcloneError as e:
680 |         console.print(f"[red]Check error: {e}[/red]")
681 |         raise typer.Exit(1)
682 |     except Exception as e:
683 |         console.print(f"[red]Error: {e}[/red]")
684 |         raise typer.Exit(1)
685 | 
686 | 
687 | @project_app.command("bisync-reset")
688 | def bisync_reset(
689 |     name: str = typer.Argument(..., help="Project name to reset bisync state for"),
690 | ) -> None:
691 |     """Clear bisync state for a project.
692 | 
693 |     This removes the bisync metadata files, forcing a fresh --resync on next bisync.
694 |     Useful when bisync gets into an inconsistent state or when remote path changes.
695 |     """
696 |     from basic_memory.cli.commands.cloud.rclone_commands import get_project_bisync_state
697 |     import shutil
698 | 
699 |     try:
700 |         state_path = get_project_bisync_state(name)
701 | 
702 |         if not state_path.exists():
703 |             console.print(f"[yellow]No bisync state found for project '{name}'[/yellow]")
704 |             return
705 | 
706 |         # Remove the entire state directory
707 |         shutil.rmtree(state_path)
708 |         console.print(f"[green]Cleared bisync state for project '{name}'[/green]")
709 |         console.print("\nNext steps:")
710 |         console.print(f"  1. Preview: bm project bisync --name {name} --resync --dry-run")
711 |         console.print(f"  2. Sync: bm project bisync --name {name} --resync")
712 | 
713 |     except Exception as e:
714 |         console.print(f"[red]Error clearing bisync state: {str(e)}[/red]")
715 |         raise typer.Exit(1)
716 | 
717 | 
718 | @project_app.command("ls")
719 | def ls_project_command(
720 |     name: str = typer.Option(..., "--name", help="Project name to list files from"),
721 |     path: str = typer.Argument(None, help="Path within project (optional)"),
722 | ) -> None:
723 |     """List files in remote project.
724 | 
725 |     Examples:
726 |       bm project ls --name research
727 |       bm project ls --name research subfolder
728 |     """
729 |     config = ConfigManager().config
730 |     if not config.cloud_mode_enabled:
731 |         console.print("[red]Error: ls only available in cloud mode[/red]")
732 |         raise typer.Exit(1)
733 | 
734 |     try:
735 |         # Get tenant info for bucket name
736 |         tenant_info = run_with_cleanup(get_mount_info())
737 |         bucket_name = tenant_info.bucket_name
738 | 
739 |         # Get project info
740 |         async def _get_project():
741 |             async with get_client() as client:
742 |                 response = await call_get(client, "/projects/projects")
743 |                 projects_list = ProjectList.model_validate(response.json())
744 |                 for proj in projects_list.projects:
745 |                     if generate_permalink(proj.name) == generate_permalink(name):
746 |                         return proj
747 |                 return None
748 | 
749 |         project_data = run_with_cleanup(_get_project())
750 |         if not project_data:
751 |             console.print(f"[red]Error: Project '{name}' not found[/red]")
752 |             raise typer.Exit(1)
753 | 
754 |         # Create SyncProject (local_sync_path not needed for ls)
755 |         sync_project = SyncProject(
756 |             name=project_data.name,
757 |             path=normalize_project_path(project_data.path),
758 |         )
759 | 
760 |         # List files
761 |         files = project_ls(sync_project, bucket_name, path=path)
762 | 
763 |         if files:
764 |             console.print(f"\n[bold]Files in {name}" + (f"/{path}" if path else "") + ":[/bold]")
765 |             for file in files:
766 |                 console.print(f"  {file}")
767 |             console.print(f"\n[dim]Total: {len(files)} files[/dim]")
768 |         else:
769 |             console.print(
770 |                 f"[yellow]No files found in {name}" + (f"/{path}" if path else "") + "[/yellow]"
771 |             )
772 | 
773 |     except Exception as e:
774 |         console.print(f"[red]Error: {e}[/red]")
775 |         raise typer.Exit(1)
776 | 
777 | 
778 | @project_app.command("info")
779 | def display_project_info(
780 |     name: str = typer.Argument(..., help="Name of the project"),
781 |     json_output: bool = typer.Option(False, "--json", help="Output in JSON format"),
782 | ):
783 |     """Display detailed information and statistics about the current project."""
784 |     try:
785 |         # Get project info
786 |         info = run_with_cleanup(get_project_info(name))
787 | 
788 |         if json_output:
789 |             # Convert to JSON and print
790 |             print(json.dumps(info.model_dump(), indent=2, default=str))
791 |         else:
792 |             # Project configuration section
793 |             console.print(
794 |                 Panel(
795 |                     f"Basic Memory version: [bold green]{info.system.version}[/bold green]\n"
796 |                     f"[bold]Project:[/bold] {info.project_name}\n"
797 |                     f"[bold]Path:[/bold] {info.project_path}\n"
798 |                     f"[bold]Default Project:[/bold] {info.default_project}\n",
799 |                     title="Basic Memory Project Info",
800 |                     expand=False,
801 |                 )
802 |             )
803 | 
804 |             # Statistics section
805 |             stats_table = Table(title="Statistics")
806 |             stats_table.add_column("Metric", style="cyan")
807 |             stats_table.add_column("Count", style="green")
808 | 
809 |             stats_table.add_row("Entities", str(info.statistics.total_entities))
810 |             stats_table.add_row("Observations", str(info.statistics.total_observations))
811 |             stats_table.add_row("Relations", str(info.statistics.total_relations))
812 |             stats_table.add_row(
813 |                 "Unresolved Relations", str(info.statistics.total_unresolved_relations)
814 |             )
815 |             stats_table.add_row("Isolated Entities", str(info.statistics.isolated_entities))
816 | 
817 |             console.print(stats_table)
818 | 
819 |             # Entity types
820 |             if info.statistics.entity_types:
821 |                 entity_types_table = Table(title="Entity Types")
822 |                 entity_types_table.add_column("Type", style="blue")
823 |                 entity_types_table.add_column("Count", style="green")
824 | 
825 |                 for entity_type, count in info.statistics.entity_types.items():
826 |                     entity_types_table.add_row(entity_type, str(count))
827 | 
828 |                 console.print(entity_types_table)
829 | 
830 |             # Most connected entities
831 |             if info.statistics.most_connected_entities:  # pragma: no cover
832 |                 connected_table = Table(title="Most Connected Entities")
833 |                 connected_table.add_column("Title", style="blue")
834 |                 connected_table.add_column("Permalink", style="cyan")
835 |                 connected_table.add_column("Relations", style="green")
836 | 
837 |                 for entity in info.statistics.most_connected_entities:
838 |                     connected_table.add_row(
839 |                         entity["title"], entity["permalink"], str(entity["relation_count"])
840 |                     )
841 | 
842 |                 console.print(connected_table)
843 | 
844 |             # Recent activity
845 |             if info.activity.recently_updated:  # pragma: no cover
846 |                 recent_table = Table(title="Recent Activity")
847 |                 recent_table.add_column("Title", style="blue")
848 |                 recent_table.add_column("Type", style="cyan")
849 |                 recent_table.add_column("Last Updated", style="green")
850 | 
851 |                 for entity in info.activity.recently_updated[:5]:  # Show top 5
852 |                     updated_at = (
853 |                         datetime.fromisoformat(entity["updated_at"])
854 |                         if isinstance(entity["updated_at"], str)
855 |                         else entity["updated_at"]
856 |                     )
857 |                     recent_table.add_row(
858 |                         entity["title"],
859 |                         entity["entity_type"],
860 |                         updated_at.strftime("%Y-%m-%d %H:%M"),
861 |                     )
862 | 
863 |                 console.print(recent_table)
864 | 
865 |             # Available projects
866 |             projects_table = Table(title="Available Projects")
867 |             projects_table.add_column("Name", style="blue")
868 |             projects_table.add_column("Path", style="cyan")
869 |             projects_table.add_column("Default", style="green")
870 | 
871 |             for name, proj_info in info.available_projects.items():
872 |                 is_default = name == info.default_project
873 |                 project_path = proj_info["path"]
874 |                 projects_table.add_row(name, project_path, "[X]" if is_default else "")
875 | 
876 |             console.print(projects_table)
877 | 
878 |             # Timestamp
879 |             current_time = (
880 |                 datetime.fromisoformat(str(info.system.timestamp))
881 |                 if isinstance(info.system.timestamp, str)
882 |                 else info.system.timestamp
883 |             )
884 |             console.print(f"\nTimestamp: [cyan]{current_time.strftime('%Y-%m-%d %H:%M:%S')}[/cyan]")
885 | 
886 |     except Exception as e:  # pragma: no cover
887 |         typer.echo(f"Error getting project info: {e}", err=True)
888 |         raise typer.Exit(1)
889 | 
```

--------------------------------------------------------------------------------
/tests/repository/test_search_repository.py:
--------------------------------------------------------------------------------

```python
  1 | """Tests for the SearchRepository."""
  2 | 
  3 | from datetime import datetime, timezone
  4 | 
  5 | import pytest
  6 | import pytest_asyncio
  7 | from sqlalchemy import text
  8 | 
  9 | from basic_memory import db
 10 | from basic_memory.models import Entity
 11 | from basic_memory.models.project import Project
 12 | from basic_memory.repository.search_repository import SearchIndexRow
 13 | from basic_memory.repository.postgres_search_repository import PostgresSearchRepository
 14 | from basic_memory.schemas.search import SearchItemType
 15 | 
 16 | 
 17 | def is_postgres_backend(search_repository):
 18 |     """Helper to check if search repository is Postgres-based."""
 19 |     return isinstance(search_repository, PostgresSearchRepository)
 20 | 
 21 | 
 22 | @pytest_asyncio.fixture
 23 | async def search_entity(session_maker, test_project: Project):
 24 |     """Create a test entity for search testing."""
 25 |     async with db.scoped_session(session_maker) as session:
 26 |         entity = Entity(
 27 |             project_id=test_project.id,
 28 |             title="Search Test Entity",
 29 |             entity_type="test",
 30 |             permalink="test/search-test-entity",
 31 |             file_path="test/search_test_entity.md",
 32 |             content_type="text/markdown",
 33 |             created_at=datetime.now(timezone.utc),
 34 |             updated_at=datetime.now(timezone.utc),
 35 |         )
 36 |         session.add(entity)
 37 |         await session.flush()
 38 |         return entity
 39 | 
 40 | 
 41 | @pytest_asyncio.fixture
 42 | async def second_project(project_repository):
 43 |     """Create a second project for testing project isolation."""
 44 |     project_data = {
 45 |         "name": "Second Test Project",
 46 |         "description": "Another project for testing",
 47 |         "path": "/second/project/path",
 48 |         "is_active": True,
 49 |         "is_default": None,
 50 |     }
 51 |     return await project_repository.create(project_data)
 52 | 
 53 | 
 54 | @pytest_asyncio.fixture
 55 | async def second_project_repository(session_maker, second_project, search_repository):
 56 |     """Create a backend-appropriate repository for the second project.
 57 | 
 58 |     Uses the same type as search_repository to ensure backend consistency.
 59 |     """
 60 |     # Use the same repository class as the main search_repository
 61 |     return type(search_repository)(session_maker, project_id=second_project.id)
 62 | 
 63 | 
 64 | @pytest_asyncio.fixture
 65 | async def second_entity(session_maker, second_project: Project):
 66 |     """Create a test entity in the second project."""
 67 |     async with db.scoped_session(session_maker) as session:
 68 |         entity = Entity(
 69 |             project_id=second_project.id,
 70 |             title="Second Project Entity",
 71 |             entity_type="test",
 72 |             permalink="test/second-project-entity",
 73 |             file_path="test/second_project_entity.md",
 74 |             content_type="text/markdown",
 75 |             created_at=datetime.now(timezone.utc),
 76 |             updated_at=datetime.now(timezone.utc),
 77 |         )
 78 |         session.add(entity)
 79 |         await session.flush()
 80 |         return entity
 81 | 
 82 | 
 83 | @pytest.mark.asyncio
 84 | async def test_init_search_index(search_repository, app_config):
 85 |     """Test that search index can be initialized."""
 86 |     from basic_memory.config import DatabaseBackend
 87 | 
 88 |     await search_repository.init_search_index()
 89 | 
 90 |     # Verify search_index table exists (backend-specific query)
 91 |     async with db.scoped_session(search_repository.session_maker) as session:
 92 |         if app_config.database_backend == DatabaseBackend.POSTGRES:
 93 |             # For Postgres, query information_schema
 94 |             result = await session.execute(
 95 |                 text(
 96 |                     "SELECT table_name FROM information_schema.tables "
 97 |                     "WHERE table_schema = 'public' AND table_name = 'search_index';"
 98 |                 )
 99 |             )
100 |         else:
101 |             # For SQLite, query sqlite_master
102 |             result = await session.execute(
103 |                 text("SELECT name FROM sqlite_master WHERE type='table' AND name='search_index';")
104 |             )
105 | 
106 |         table_name = result.scalar()
107 |         assert table_name == "search_index"
108 | 
109 | 
110 | @pytest.mark.asyncio
111 | async def test_init_search_index_preserves_data(search_repository, search_entity):
112 |     """Regression test: calling init_search_index() twice should preserve indexed data.
113 | 
114 |     This test prevents regression of the bug fixed in PR #503 where
115 |     init_search_index() was dropping existing data on every call due to
116 |     an unconditional DROP TABLE statement.
117 | 
118 |     The bug caused search to work immediately after creating notes, but
119 |     return empty results after MCP server restarts (~30 minutes in Claude Desktop).
120 |     """
121 |     # Create and index a search item
122 |     search_row = SearchIndexRow(
123 |         id=search_entity.id,
124 |         type=SearchItemType.ENTITY.value,
125 |         title=search_entity.title,
126 |         content_stems="regression test content for server restart",
127 |         content_snippet="This content should persist across init_search_index calls",
128 |         permalink=search_entity.permalink,
129 |         file_path=search_entity.file_path,
130 |         entity_id=search_entity.id,
131 |         metadata={"entity_type": search_entity.entity_type},
132 |         created_at=search_entity.created_at,
133 |         updated_at=search_entity.updated_at,
134 |         project_id=search_repository.project_id,
135 |     )
136 |     await search_repository.index_item(search_row)
137 | 
138 |     # Verify it's searchable
139 |     results = await search_repository.search(search_text="regression test")
140 |     assert len(results) == 1
141 |     assert results[0].title == search_entity.title
142 | 
143 |     # Re-initialize the search index (simulates MCP server restart)
144 |     await search_repository.init_search_index()
145 | 
146 |     # Verify data is still there after re-initialization
147 |     results_after = await search_repository.search(search_text="regression test")
148 |     assert len(results_after) == 1, "Search index data was lost after init_search_index()"
149 |     assert results_after[0].id == search_entity.id
150 | 
151 | 
152 | @pytest.mark.asyncio
153 | async def test_index_item(search_repository, search_entity):
154 |     """Test indexing an item with project_id."""
155 |     # Create search index row for the entity
156 |     search_row = SearchIndexRow(
157 |         id=search_entity.id,
158 |         type=SearchItemType.ENTITY.value,
159 |         title=search_entity.title,
160 |         content_stems="search test entity content",
161 |         content_snippet="This is a test entity for search",
162 |         permalink=search_entity.permalink,
163 |         file_path=search_entity.file_path,
164 |         entity_id=search_entity.id,
165 |         metadata={"entity_type": search_entity.entity_type},
166 |         created_at=search_entity.created_at,
167 |         updated_at=search_entity.updated_at,
168 |         project_id=search_repository.project_id,
169 |     )
170 | 
171 |     # Index the item
172 |     await search_repository.index_item(search_row)
173 | 
174 |     # Search for the item
175 |     results = await search_repository.search(search_text="search test")
176 | 
177 |     # Verify we found the item
178 |     assert len(results) == 1
179 |     assert results[0].title == search_entity.title
180 |     assert results[0].project_id == search_repository.project_id
181 | 
182 | 
183 | @pytest.mark.asyncio
184 | async def test_index_item_upsert_on_duplicate_permalink(search_repository, search_entity):
185 |     """Test that indexing the same permalink twice uses upsert instead of failing.
186 | 
187 |     This tests the fix for the race condition where parallel entity indexing
188 |     could cause IntegrityError on the unique permalink constraint.
189 |     """
190 |     # First insert
191 |     search_row1 = SearchIndexRow(
192 |         id=search_entity.id,
193 |         type=SearchItemType.ENTITY.value,
194 |         title="Original Title",
195 |         content_stems="original content",
196 |         content_snippet="Original content snippet",
197 |         permalink=search_entity.permalink,
198 |         file_path=search_entity.file_path,
199 |         entity_id=search_entity.id,
200 |         metadata={"entity_type": search_entity.entity_type},
201 |         created_at=search_entity.created_at,
202 |         updated_at=search_entity.updated_at,
203 |         project_id=search_repository.project_id,
204 |     )
205 |     await search_repository.index_item(search_row1)
206 | 
207 |     # Verify first insert worked
208 |     results = await search_repository.search(search_text="original")
209 |     assert len(results) == 1
210 |     assert results[0].title == "Original Title"
211 | 
212 |     # Second insert with same permalink but different content (simulates race condition)
213 |     # This should NOT raise IntegrityError - it should upsert (update) instead
214 |     search_row2 = SearchIndexRow(
215 |         id=search_entity.id,
216 |         type=SearchItemType.ENTITY.value,
217 |         title="Updated Title",
218 |         content_stems="updated content",
219 |         content_snippet="Updated content snippet",
220 |         permalink=search_entity.permalink,  # Same permalink!
221 |         file_path=search_entity.file_path,
222 |         entity_id=search_entity.id,
223 |         metadata={"entity_type": search_entity.entity_type},
224 |         created_at=search_entity.created_at,
225 |         updated_at=search_entity.updated_at,
226 |         project_id=search_repository.project_id,
227 |     )
228 |     # This should succeed without raising IntegrityError
229 |     await search_repository.index_item(search_row2)
230 | 
231 |     # Verify the row was updated, not duplicated
232 |     results_after = await search_repository.search(search_text="updated")
233 |     assert len(results_after) == 1
234 |     assert results_after[0].title == "Updated Title"
235 | 
236 |     # Verify old content is gone (was replaced)
237 |     results_old = await search_repository.search(search_text="original")
238 |     assert len(results_old) == 0
239 | 
240 | 
241 | @pytest.mark.asyncio
242 | async def test_bulk_index_items_upsert_on_duplicate_permalink(search_repository, search_entity):
243 |     """Test that bulk_index_items uses upsert for duplicate permalinks.
244 | 
245 |     This tests the fix for race conditions during bulk entity indexing.
246 |     """
247 |     # First bulk insert
248 |     search_row1 = SearchIndexRow(
249 |         id=search_entity.id,
250 |         type=SearchItemType.ENTITY.value,
251 |         title="Bulk Original Title",
252 |         content_stems="bulk original content",
253 |         content_snippet="Bulk original content snippet",
254 |         permalink=search_entity.permalink,
255 |         file_path=search_entity.file_path,
256 |         entity_id=search_entity.id,
257 |         metadata={"entity_type": search_entity.entity_type},
258 |         created_at=search_entity.created_at,
259 |         updated_at=search_entity.updated_at,
260 |         project_id=search_repository.project_id,
261 |     )
262 |     await search_repository.bulk_index_items([search_row1])
263 | 
264 |     # Verify first insert worked
265 |     results = await search_repository.search(search_text="bulk original")
266 |     assert len(results) == 1
267 |     assert results[0].title == "Bulk Original Title"
268 | 
269 |     # Second bulk insert with same permalink (simulates race condition)
270 |     search_row2 = SearchIndexRow(
271 |         id=search_entity.id,
272 |         type=SearchItemType.ENTITY.value,
273 |         title="Bulk Updated Title",
274 |         content_stems="bulk updated content",
275 |         content_snippet="Bulk updated content snippet",
276 |         permalink=search_entity.permalink,  # Same permalink!
277 |         file_path=search_entity.file_path,
278 |         entity_id=search_entity.id,
279 |         metadata={"entity_type": search_entity.entity_type},
280 |         created_at=search_entity.created_at,
281 |         updated_at=search_entity.updated_at,
282 |         project_id=search_repository.project_id,
283 |     )
284 |     # This should succeed without raising IntegrityError
285 |     await search_repository.bulk_index_items([search_row2])
286 | 
287 |     # Verify the row was updated
288 |     results_after = await search_repository.search(search_text="bulk updated")
289 |     assert len(results_after) == 1
290 |     assert results_after[0].title == "Bulk Updated Title"
291 | 
292 | 
293 | @pytest.mark.asyncio
294 | async def test_project_isolation(
295 |     search_repository, second_project_repository, search_entity, second_entity
296 | ):
297 |     """Test that search is isolated by project."""
298 |     # Index entities in both projects
299 |     search_row1 = SearchIndexRow(
300 |         id=search_entity.id,
301 |         type=SearchItemType.ENTITY.value,
302 |         title=search_entity.title,
303 |         content_stems="unique first project content",
304 |         content_snippet="This is a test entity in the first project",
305 |         permalink=search_entity.permalink,
306 |         file_path=search_entity.file_path,
307 |         entity_id=search_entity.id,
308 |         metadata={"entity_type": search_entity.entity_type},
309 |         created_at=search_entity.created_at,
310 |         updated_at=search_entity.updated_at,
311 |         project_id=search_repository.project_id,
312 |     )
313 | 
314 |     search_row2 = SearchIndexRow(
315 |         id=second_entity.id,
316 |         type=SearchItemType.ENTITY.value,
317 |         title=second_entity.title,
318 |         content_stems="unique second project content",
319 |         content_snippet="This is a test entity in the second project",
320 |         permalink=second_entity.permalink,
321 |         file_path=second_entity.file_path,
322 |         entity_id=second_entity.id,
323 |         metadata={"entity_type": second_entity.entity_type},
324 |         created_at=second_entity.created_at,
325 |         updated_at=second_entity.updated_at,
326 |         project_id=second_project_repository.project_id,
327 |     )
328 | 
329 |     # Index items in their respective repositories
330 |     await search_repository.index_item(search_row1)
331 |     await second_project_repository.index_item(search_row2)
332 | 
333 |     # Search in first project
334 |     results1 = await search_repository.search(search_text="unique first")
335 |     assert len(results1) == 1
336 |     assert results1[0].title == search_entity.title
337 |     assert results1[0].project_id == search_repository.project_id
338 | 
339 |     # Search in second project
340 |     results2 = await second_project_repository.search(search_text="unique second")
341 |     assert len(results2) == 1
342 |     assert results2[0].title == second_entity.title
343 |     assert results2[0].project_id == second_project_repository.project_id
344 | 
345 |     # Make sure first project can't see second project's content
346 |     results_cross1 = await search_repository.search(search_text="unique second")
347 |     assert len(results_cross1) == 0
348 | 
349 |     # Make sure second project can't see first project's content
350 |     results_cross2 = await second_project_repository.search(search_text="unique first")
351 |     assert len(results_cross2) == 0
352 | 
353 | 
354 | @pytest.mark.asyncio
355 | async def test_delete_by_permalink(search_repository, search_entity):
356 |     """Test deleting an item by permalink respects project isolation."""
357 |     # Index the item
358 |     search_row = SearchIndexRow(
359 |         id=search_entity.id,
360 |         type=SearchItemType.ENTITY.value,
361 |         title=search_entity.title,
362 |         content_stems="content to delete",
363 |         content_snippet="This content should be deleted",
364 |         permalink=search_entity.permalink,
365 |         file_path=search_entity.file_path,
366 |         entity_id=search_entity.id,
367 |         metadata={"entity_type": search_entity.entity_type},
368 |         created_at=search_entity.created_at,
369 |         updated_at=search_entity.updated_at,
370 |         project_id=search_repository.project_id,
371 |     )
372 | 
373 |     await search_repository.index_item(search_row)
374 | 
375 |     # Verify it exists
376 |     results = await search_repository.search(search_text="content to delete")
377 |     assert len(results) == 1
378 | 
379 |     # Delete by permalink
380 |     await search_repository.delete_by_permalink(search_entity.permalink)
381 | 
382 |     # Verify it's gone
383 |     results_after = await search_repository.search(search_text="content to delete")
384 |     assert len(results_after) == 0
385 | 
386 | 
387 | @pytest.mark.asyncio
388 | async def test_delete_by_entity_id(search_repository, search_entity):
389 |     """Test deleting an item by entity_id respects project isolation."""
390 |     # Index the item
391 |     search_row = SearchIndexRow(
392 |         id=search_entity.id,
393 |         type=SearchItemType.ENTITY.value,
394 |         title=search_entity.title,
395 |         content_stems="entity to delete",
396 |         content_snippet="This entity should be deleted",
397 |         permalink=search_entity.permalink,
398 |         file_path=search_entity.file_path,
399 |         entity_id=search_entity.id,
400 |         metadata={"entity_type": search_entity.entity_type},
401 |         created_at=search_entity.created_at,
402 |         updated_at=search_entity.updated_at,
403 |         project_id=search_repository.project_id,
404 |     )
405 | 
406 |     await search_repository.index_item(search_row)
407 | 
408 |     # Verify it exists
409 |     results = await search_repository.search(search_text="entity to delete")
410 |     assert len(results) == 1
411 | 
412 |     # Delete by entity_id
413 |     await search_repository.delete_by_entity_id(search_entity.id)
414 | 
415 |     # Verify it's gone
416 |     results_after = await search_repository.search(search_text="entity to delete")
417 |     assert len(results_after) == 0
418 | 
419 | 
420 | @pytest.mark.asyncio
421 | async def test_to_insert_includes_project_id(search_repository):
422 |     """Test that the to_insert method includes project_id."""
423 |     # Create a search index row with project_id
424 |     row = SearchIndexRow(
425 |         id=1234,
426 |         type=SearchItemType.ENTITY.value,
427 |         title="Test Title",
428 |         content_stems="test content",
429 |         content_snippet="test snippet",
430 |         permalink="test/permalink",
431 |         file_path="test/file.md",
432 |         metadata={"test": "metadata"},
433 |         created_at=datetime.now(timezone.utc),
434 |         updated_at=datetime.now(timezone.utc),
435 |         project_id=search_repository.project_id,
436 |     )
437 | 
438 |     # Get insert data
439 |     insert_data = row.to_insert()
440 | 
441 |     # Verify project_id is included
442 |     assert "project_id" in insert_data
443 |     assert insert_data["project_id"] == search_repository.project_id
444 | 
445 | 
446 | def test_directory_property():
447 |     """Test the directory property of SearchIndexRow."""
448 |     # Test a file in a nested directory
449 |     row1 = SearchIndexRow(
450 |         id=1,
451 |         type=SearchItemType.ENTITY.value,
452 |         file_path="projects/notes/ideas.md",
453 |         created_at=datetime.now(timezone.utc),
454 |         updated_at=datetime.now(timezone.utc),
455 |         project_id=1,
456 |     )
457 |     assert row1.directory == "/projects/notes"
458 | 
459 |     # Test a file at the root level
460 |     row2 = SearchIndexRow(
461 |         id=2,
462 |         type=SearchItemType.ENTITY.value,
463 |         file_path="README.md",
464 |         created_at=datetime.now(timezone.utc),
465 |         updated_at=datetime.now(timezone.utc),
466 |         project_id=1,
467 |     )
468 |     assert row2.directory == "/"
469 | 
470 |     # Test a non-entity type with empty file_path
471 |     row3 = SearchIndexRow(
472 |         id=3,
473 |         type=SearchItemType.OBSERVATION.value,
474 |         file_path="",
475 |         created_at=datetime.now(timezone.utc),
476 |         updated_at=datetime.now(timezone.utc),
477 |         project_id=1,
478 |     )
479 |     assert row3.directory == ""
480 | 
481 | 
482 | class TestSearchTermPreparation:
483 |     """Test cases for search term preparation.
484 | 
485 |     Note: Tests with `[sqlite]` marker test SQLite FTS5-specific implementation details.
486 |     Tests with `[asyncio-sqlite]` or `[asyncio-postgres]` test backend-agnostic functionality.
487 |     """
488 | 
489 |     def test_simple_terms_get_prefix_wildcard(self, search_repository):
490 |         """Simple alphanumeric terms should get prefix matching."""
491 |         from basic_memory.repository.postgres_search_repository import PostgresSearchRepository
492 | 
493 |         if isinstance(search_repository, PostgresSearchRepository):
494 |             # Postgres tsquery uses :* for prefix matching
495 |             assert search_repository._prepare_search_term("hello") == "hello:*"
496 |             assert search_repository._prepare_search_term("project") == "project:*"
497 |             assert search_repository._prepare_search_term("test123") == "test123:*"
498 |         else:
499 |             # SQLite FTS5 uses * for prefix matching
500 |             assert search_repository._prepare_search_term("hello") == "hello*"
501 |             assert search_repository._prepare_search_term("project") == "project*"
502 |             assert search_repository._prepare_search_term("test123") == "test123*"
503 | 
504 |     def test_terms_with_existing_wildcard_unchanged(self, search_repository):
505 |         """Terms that already contain * should remain unchanged."""
506 |         if is_postgres_backend(search_repository):
507 |             # Postgres uses different syntax (:* instead of *)
508 |             assert search_repository._prepare_search_term("hello*") == "hello:*"
509 |             assert search_repository._prepare_search_term("test*world") == "test:*world"
510 |         else:
511 |             assert search_repository._prepare_search_term("hello*") == "hello*"
512 |             assert search_repository._prepare_search_term("test*world") == "test*world"
513 | 
514 |     def test_boolean_operators_preserved(self, search_repository):
515 |         """Boolean operators should be preserved without modification."""
516 |         if is_postgres_backend(search_repository):
517 |             # Postgres converts AND/OR/NOT to &/|/!
518 |             assert search_repository._prepare_search_term("hello AND world") == "hello & world"
519 |             assert search_repository._prepare_search_term("cat OR dog") == "cat | dog"
520 |             # NOT must be converted to "& !" for proper tsquery syntax
521 |             assert (
522 |                 search_repository._prepare_search_term("project NOT meeting")
523 |                 == "project & !meeting"
524 |             )
525 |             assert (
526 |                 search_repository._prepare_search_term("(hello AND world) OR test")
527 |                 == "(hello & world) | test"
528 |             )
529 |         else:
530 |             assert search_repository._prepare_search_term("hello AND world") == "hello AND world"
531 |             assert search_repository._prepare_search_term("cat OR dog") == "cat OR dog"
532 |             assert (
533 |                 search_repository._prepare_search_term("project NOT meeting")
534 |                 == "project NOT meeting"
535 |             )
536 |             assert (
537 |                 search_repository._prepare_search_term("(hello AND world) OR test")
538 |                 == "(hello AND world) OR test"
539 |             )
540 | 
541 |     def test_hyphenated_terms_with_boolean_operators(self, search_repository):
542 |         """Hyphenated terms with Boolean operators should be properly quoted."""
543 |         if is_postgres_backend(search_repository):
544 |             pytest.skip("This test is for SQLite FTS5-specific quoting behavior")
545 | 
546 |         # Test the specific case from the GitHub issue
547 |         result = search_repository._prepare_search_term("tier1-test AND unicode")
548 |         assert result == '"tier1-test" AND unicode'
549 | 
550 |         # Test other hyphenated Boolean combinations
551 |         assert (
552 |             search_repository._prepare_search_term("multi-word OR single")
553 |             == '"multi-word" OR single'
554 |         )
555 |         assert (
556 |             search_repository._prepare_search_term("well-formed NOT badly-formed")
557 |             == '"well-formed" NOT "badly-formed"'
558 |         )
559 |         assert (
560 |             search_repository._prepare_search_term("test-case AND (hello OR world)")
561 |             == '"test-case" AND (hello OR world)'
562 |         )
563 | 
564 |         # Test mixed special characters with Boolean operators
565 |         assert (
566 |             search_repository._prepare_search_term("config.json AND test-file")
567 |             == '"config.json" AND "test-file"'
568 |         )
569 |         assert (
570 |             search_repository._prepare_search_term("C++ OR python-script")
571 |             == '"C++" OR "python-script"'
572 |         )
573 | 
574 |     def test_programming_terms_should_work(self, search_repository):
575 |         """Programming-related terms with special chars should be searchable."""
576 |         if is_postgres_backend(search_repository):
577 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
578 | 
579 |         # These should be quoted to handle special characters safely
580 |         assert search_repository._prepare_search_term("C++") == '"C++"*'
581 |         assert search_repository._prepare_search_term("function()") == '"function()"*'
582 |         assert search_repository._prepare_search_term("[email protected]") == '"[email protected]"*'
583 |         assert search_repository._prepare_search_term("array[index]") == '"array[index]"*'
584 |         assert search_repository._prepare_search_term("config.json") == '"config.json"*'
585 | 
586 |     def test_malformed_fts5_syntax_quoted(self, search_repository):
587 |         """Malformed FTS5 syntax should be quoted to prevent errors."""
588 |         if is_postgres_backend(search_repository):
589 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
590 | 
591 |         # Multiple operators without proper syntax
592 |         assert search_repository._prepare_search_term("+++invalid+++") == '"+++invalid+++"*'
593 |         assert search_repository._prepare_search_term("!!!error!!!") == '"!!!error!!!"*'
594 |         assert search_repository._prepare_search_term("@#$%^&*()") == '"@#$%^&*()"*'
595 | 
596 |     def test_quoted_strings_handled_properly(self, search_repository):
597 |         """Strings with quotes should have quotes escaped."""
598 |         if is_postgres_backend(search_repository):
599 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
600 | 
601 |         assert search_repository._prepare_search_term('say "hello"') == '"say ""hello"""*'
602 |         assert search_repository._prepare_search_term("it's working") == '"it\'s working"*'
603 | 
604 |     def test_file_paths_no_prefix_wildcard(self, search_repository):
605 |         """File paths should not get prefix wildcards."""
606 |         if is_postgres_backend(search_repository):
607 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
608 | 
609 |         assert (
610 |             search_repository._prepare_search_term("config.json", is_prefix=False)
611 |             == '"config.json"'
612 |         )
613 |         assert (
614 |             search_repository._prepare_search_term("docs/readme.md", is_prefix=False)
615 |             == '"docs/readme.md"'
616 |         )
617 | 
618 |     def test_spaces_handled_correctly(self, search_repository):
619 |         """Terms with spaces should use boolean AND for word order independence."""
620 |         if is_postgres_backend(search_repository):
621 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
622 | 
623 |         assert search_repository._prepare_search_term("hello world") == "hello* AND world*"
624 |         assert (
625 |             search_repository._prepare_search_term("project planning") == "project* AND planning*"
626 |         )
627 | 
628 |     def test_version_strings_with_dots_handled_correctly(self, search_repository):
629 |         """Version strings with dots should be quoted to prevent FTS5 syntax errors."""
630 |         if is_postgres_backend(search_repository):
631 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
632 | 
633 |         # This reproduces the bug where "Basic Memory v0.13.0b2" becomes "Basic* AND Memory* AND v0.13.0b2*"
634 |         # which causes FTS5 syntax errors because v0.13.0b2* is not valid FTS5 syntax
635 |         result = search_repository._prepare_search_term("Basic Memory v0.13.0b2")
636 |         # Should be quoted because of dots in v0.13.0b2
637 |         assert result == '"Basic Memory v0.13.0b2"*'
638 | 
639 |     def test_mixed_special_characters_in_multi_word_queries(self, search_repository):
640 |         """Multi-word queries with special characters in any word should be fully quoted."""
641 |         if is_postgres_backend(search_repository):
642 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
643 | 
644 |         # Any word containing special characters should cause the entire phrase to be quoted
645 |         assert search_repository._prepare_search_term("config.json file") == '"config.json file"*'
646 |         assert (
647 |             search_repository._prepare_search_term("[email protected] account")
648 |             == '"[email protected] account"*'
649 |         )
650 |         assert search_repository._prepare_search_term("node.js and react") == '"node.js and react"*'
651 | 
652 |     @pytest.mark.asyncio
653 |     async def test_search_with_special_characters_returns_results(self, search_repository):
654 |         """Integration test: search with special characters should work gracefully."""
655 |         # This test ensures the search doesn't crash with FTS5 syntax errors
656 | 
657 |         # These should all return empty results gracefully, not crash
658 |         results1 = await search_repository.search(search_text="C++")
659 |         assert isinstance(results1, list)  # Should not crash
660 | 
661 |         results2 = await search_repository.search(search_text="function()")
662 |         assert isinstance(results2, list)  # Should not crash
663 | 
664 |         results3 = await search_repository.search(search_text="+++malformed+++")
665 |         assert isinstance(results3, list)  # Should not crash, return empty results
666 | 
667 |         results4 = await search_repository.search(search_text="[email protected]")
668 |         assert isinstance(results4, list)  # Should not crash
669 | 
670 |     @pytest.mark.asyncio
671 |     async def test_boolean_search_still_works(self, search_repository):
672 |         """Boolean search operations should continue to work."""
673 |         # These should not crash and should respect boolean logic
674 |         results1 = await search_repository.search(search_text="hello AND world")
675 |         assert isinstance(results1, list)
676 | 
677 |         results2 = await search_repository.search(search_text="cat OR dog")
678 |         assert isinstance(results2, list)
679 | 
680 |         results3 = await search_repository.search(search_text="project NOT meeting")
681 |         assert isinstance(results3, list)
682 | 
683 |     @pytest.mark.asyncio
684 |     async def test_permalink_match_exact_with_slash(self, search_repository):
685 |         """Test exact permalink matching with slash (line 249 coverage)."""
686 |         # This tests the exact match path: if "/" in permalink_text:
687 |         results = await search_repository.search(permalink_match="test/path")
688 |         assert isinstance(results, list)
689 |         # Should use exact equality matching for paths with slashes
690 | 
691 |     @pytest.mark.asyncio
692 |     async def test_permalink_match_simple_term(self, search_repository):
693 |         """Test permalink matching with simple term (no slash)."""
694 |         # This tests the simple term path that goes through _prepare_search_term
695 |         results = await search_repository.search(permalink_match="simpleterm")
696 |         assert isinstance(results, list)
697 |         # Should use FTS5 MATCH for simple terms
698 | 
699 |     @pytest.mark.asyncio
700 |     async def test_fts5_error_handling_database_error(self, search_repository):
701 |         """Test that non-FTS5 database errors are properly re-raised."""
702 |         # Force a real database error (not an FTS5 syntax error) by removing the search index.
703 |         # The repository should re-raise the error rather than returning an empty list.
704 |         async with db.scoped_session(search_repository.session_maker) as session:
705 |             await session.execute(text("DROP TABLE IF EXISTS search_index"))
706 |             await session.commit()
707 | 
708 |         try:
709 |             with pytest.raises(Exception):
710 |                 await search_repository.search(search_text="test")
711 |         finally:
712 |             # Restore index so later tests in this module keep working.
713 |             await search_repository.init_search_index()
714 | 
715 |     @pytest.mark.asyncio
716 |     async def test_version_string_search_integration(self, search_repository, search_entity):
717 |         """Integration test: searching for version strings should work without FTS5 errors."""
718 |         # Index an entity with version information
719 |         search_row = SearchIndexRow(
720 |             id=search_entity.id,
721 |             type=SearchItemType.ENTITY.value,
722 |             title="Basic Memory v0.13.0b2 Release",
723 |             content_stems="basic memory version 0.13.0b2 beta release notes features",
724 |             content_snippet="Basic Memory v0.13.0b2 is a beta release with new features",
725 |             permalink=search_entity.permalink,
726 |             file_path=search_entity.file_path,
727 |             entity_id=search_entity.id,
728 |             metadata={"entity_type": search_entity.entity_type},
729 |             created_at=search_entity.created_at,
730 |             updated_at=search_entity.updated_at,
731 |             project_id=search_repository.project_id,
732 |         )
733 | 
734 |         await search_repository.index_item(search_row)
735 | 
736 |         # This should not cause FTS5 syntax errors and should find the entity
737 |         results = await search_repository.search(search_text="Basic Memory v0.13.0b2")
738 |         assert len(results) == 1
739 |         assert results[0].title == "Basic Memory v0.13.0b2 Release"
740 | 
741 |         # Test other version-like patterns
742 |         results2 = await search_repository.search(search_text="v0.13.0b2")
743 |         assert len(results2) == 1  # Should still find it due to content_stems
744 | 
745 |         # Test with other problematic patterns
746 |         results3 = await search_repository.search(search_text="node.js version")
747 |         assert isinstance(results3, list)  # Should not crash
748 | 
749 |     @pytest.mark.asyncio
750 |     async def test_wildcard_only_search(self, search_repository, search_entity):
751 |         """Test that wildcard-only search '*' doesn't cause FTS5 errors (line 243 coverage)."""
752 |         # Index an entity for testing
753 |         search_row = SearchIndexRow(
754 |             id=search_entity.id,
755 |             type=SearchItemType.ENTITY.value,
756 |             title="Test Entity",
757 |             content_stems="test entity content",
758 |             content_snippet="This is a test entity",
759 |             permalink=search_entity.permalink,
760 |             file_path=search_entity.file_path,
761 |             entity_id=search_entity.id,
762 |             metadata={"entity_type": search_entity.entity_type},
763 |             created_at=search_entity.created_at,
764 |             updated_at=search_entity.updated_at,
765 |             project_id=search_repository.project_id,
766 |         )
767 | 
768 |         await search_repository.index_item(search_row)
769 | 
770 |         # Test wildcard-only search - should not crash and should return results
771 |         results = await search_repository.search(search_text="*")
772 |         assert isinstance(results, list)  # Should not crash
773 |         assert len(results) >= 1  # Should return all results, including our test entity
774 | 
775 |         # Test empty string search - should also not crash
776 |         results_empty = await search_repository.search(search_text="")
777 |         assert isinstance(results_empty, list)  # Should not crash
778 | 
779 |         # Test whitespace-only search
780 |         results_whitespace = await search_repository.search(search_text="   ")
781 |         assert isinstance(results_whitespace, list)  # Should not crash
782 | 
783 |     def test_boolean_query_empty_parts_coverage(self, search_repository):
784 |         """Test Boolean query parsing with empty parts (line 143 coverage)."""
785 |         # Create queries that will result in empty parts after splitting
786 |         result1 = search_repository._prepare_boolean_query(
787 |             "hello AND  AND world"
788 |         )  # Double operator
789 |         assert "hello" in result1 and "world" in result1
790 | 
791 |         result2 = search_repository._prepare_boolean_query("  OR test")  # Leading operator
792 |         assert "test" in result2
793 | 
794 |         result3 = search_repository._prepare_boolean_query("test OR  ")  # Trailing operator
795 |         assert "test" in result3
796 | 
797 |     def test_parenthetical_term_quote_escaping(self, search_repository):
798 |         """Test quote escaping in parenthetical terms (lines 190-191 coverage)."""
799 |         if is_postgres_backend(search_repository):
800 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
801 | 
802 |         # Test term with quotes that needs escaping
803 |         result = search_repository._prepare_parenthetical_term('(say "hello" world)')
804 |         # Should escape quotes by doubling them
805 |         assert '""hello""' in result
806 | 
807 |         # Test term with single quotes
808 |         result2 = search_repository._prepare_parenthetical_term("(it's working)")
809 |         assert "it's working" in result2
810 | 
811 |     def test_needs_quoting_empty_input(self, search_repository):
812 |         """Test _needs_quoting with empty inputs (line 207 coverage)."""
813 |         if is_postgres_backend(search_repository):
814 |             pytest.skip("This test is for SQLite FTS5-specific behavior")
815 | 
816 |         # Test empty string
817 |         assert not search_repository._needs_quoting("")
818 | 
819 |         # Test whitespace-only string
820 |         assert not search_repository._needs_quoting("   ")
821 | 
822 |         # Test None-like cases
823 |         assert not search_repository._needs_quoting("\t")
824 | 
825 |     def test_prepare_single_term_empty_input(self, search_repository):
826 |         """Test _prepare_single_term with empty inputs (line 227 coverage)."""
827 |         # Test empty string
828 |         result1 = search_repository._prepare_single_term("")
829 |         assert result1 == ""
830 | 
831 |         # Test whitespace-only string
832 |         result2 = search_repository._prepare_single_term("   ")
833 |         assert result2 == "   "  # Should return as-is
834 | 
835 |         # Test string that becomes empty after strip
836 |         result3 = search_repository._prepare_single_term("\t\n")
837 |         assert result3 == "\t\n"  # Should return original
838 | 
```

--------------------------------------------------------------------------------
/specs/SPEC-13 CLI Authentication with Subscription Validation.md:
--------------------------------------------------------------------------------

```markdown
  1 | ---
  2 | title: 'SPEC-13: CLI Authentication with Subscription Validation'
  3 | type: spec
  4 | permalink: specs/spec-12-cli-auth-subscription-validation
  5 | tags:
  6 | - authentication
  7 | - security
  8 | - cli
  9 | - subscription
 10 | status: draft
 11 | created: 2025-10-02
 12 | ---
 13 | 
 14 | # SPEC-13: CLI Authentication with Subscription Validation
 15 | 
 16 | ## Why
 17 | 
 18 | The Basic Memory Cloud CLI currently has a security gap in authentication that allows unauthorized access:
 19 | 
 20 | **Current Web Flow (Secure)**:
 21 | 1. User signs up via WorkOS AuthKit
 22 | 2. User creates Polar subscription
 23 | 3. Web app validates subscription before calling `POST /tenants/setup`
 24 | 4. Tenant provisioned only after subscription validation ✅
 25 | 
 26 | **Current CLI Flow (Insecure)**:
 27 | 1. User signs up via WorkOS AuthKit (OAuth device flow)
 28 | 2. User runs `bm cloud login`
 29 | 3. CLI receives JWT token from WorkOS
 30 | 4. CLI can access all cloud endpoints without subscription check ❌
 31 | 
 32 | **Problem**: Anyone can sign up with WorkOS and immediately access cloud infrastructure via CLI without having an active Polar subscription. This creates:
 33 | - Revenue loss (free resource consumption)
 34 | - Security risk (unauthorized data access)
 35 | - Support burden (users accessing features they haven't paid for)
 36 | 
 37 | **Root Cause**: The CLI authentication flow validates JWT tokens but doesn't verify subscription status before granting access to cloud resources.
 38 | 
 39 | ## What
 40 | 
 41 | Add subscription validation to authentication flow to ensure only users with active Polar subscriptions can access cloud resources across all access methods (CLI, MCP, Web App, Direct API).
 42 | 
 43 | **Affected Components**:
 44 | 
 45 | ### basic-memory-cloud (Cloud Service)
 46 | - `apps/cloud/src/basic_memory_cloud/deps.py` - Add subscription validation dependency
 47 | - `apps/cloud/src/basic_memory_cloud/services/subscription_service.py` - Add subscription check method
 48 | - `apps/cloud/src/basic_memory_cloud/api/tenant_mount.py` - Protect mount endpoints
 49 | - `apps/cloud/src/basic_memory_cloud/api/proxy.py` - Protect proxy endpoints
 50 | 
 51 | ### basic-memory (CLI)
 52 | - `src/basic_memory/cli/commands/cloud/core_commands.py` - Handle 403 errors
 53 | - `src/basic_memory/cli/commands/cloud/api_client.py` - Parse subscription errors
 54 | - `docs/cloud-cli.md` - Document subscription requirement
 55 | 
 56 | **Endpoints to Protect**:
 57 | - `GET /tenant/mount/info` - Used by CLI bisync setup
 58 | - `POST /tenant/mount/credentials` - Used by CLI bisync credentials
 59 | - `GET /proxy/{path:path}` - Used by Web App, MCP tools, CLI tools, Direct API
 60 | - All other `/proxy/*` endpoints - Centralized access point for all user operations
 61 | 
 62 | ## Complete Authentication Flow Analysis
 63 | 
 64 | ### Overview of All Access Flows
 65 | 
 66 | Basic Memory Cloud has **7 distinct authentication flows**. This spec closes subscription validation gaps in flows 2-4 and 6, which all converge on the `/proxy/*` endpoints.
 67 | 
 68 | ### Flow 1: Polar Webhook → Registration ✅ SECURE
 69 | ```
 70 | Polar webhook → POST /api/webhooks/polar
 71 | → Validates Polar webhook signature
 72 | → Creates/updates subscription in database
 73 | → No direct user access - webhook only
 74 | ```
 75 | **Auth**: Polar webhook signature validation
 76 | **Subscription Check**: N/A (webhook creates subscriptions)
 77 | **Status**: ✅ Secure - webhook validated, no user JWT involved
 78 | 
 79 | ### Flow 2: Web App Login ❌ NEEDS FIX
 80 | ```
 81 | User → apps/web (Vue.js/Nuxt)
 82 | → WorkOS AuthKit magic link authentication
 83 | → JWT stored in browser session
 84 | → Web app calls /proxy/{project}/... endpoints (memory, directory, projects)
 85 | → proxy.py validates JWT but does NOT check subscription
 86 | → Access granted without subscription ❌
 87 | ```
 88 | **Auth**: WorkOS JWT via `CurrentUserProfileHybridJwtDep`
 89 | **Subscription Check**: ❌ Missing
 90 | **Fixed By**: Task 1.4 (protect `/proxy/*` endpoints)
 91 | 
 92 | ### Flow 3: MCP (Model Context Protocol) ❌ NEEDS FIX
 93 | ```
 94 | AI Agent (Claude, Cursor, etc.) → https://mcp.basicmemory.com
 95 | → AuthKit OAuth device flow
 96 | → JWT stored in AI agent
 97 | → MCP tools call {cloud_host}/proxy/{endpoint} with Authorization header
 98 | → proxy.py validates JWT but does NOT check subscription
 99 | → MCP tools can access all cloud resources without subscription ❌
100 | ```
101 | **Auth**: AuthKit JWT via `CurrentUserProfileHybridJwtDep`
102 | **Subscription Check**: ❌ Missing
103 | **Fixed By**: Task 1.4 (protect `/proxy/*` endpoints)
104 | 
105 | ### Flow 4: CLI Auth (basic-memory) ❌ NEEDS FIX
106 | ```
107 | User → bm cloud login
108 | → AuthKit OAuth device flow
109 | → JWT stored in ~/.basic-memory/tokens.json
110 | → CLI calls:
111 |   - {cloud_host}/tenant/mount/info (for bisync setup)
112 |   - {cloud_host}/tenant/mount/credentials (for bisync credentials)
113 |   - {cloud_host}/proxy/{endpoint} (for all MCP tools)
114 | → tenant_mount.py and proxy.py validate JWT but do NOT check subscription
115 | → Access granted without subscription ❌
116 | ```
117 | **Auth**: AuthKit JWT via `CurrentUserProfileHybridJwtDep`
118 | **Subscription Check**: ❌ Missing
119 | **Fixed By**: Task 1.3 (protect `/tenant/mount/*`) + Task 1.4 (protect `/proxy/*`)
120 | 
121 | ### Flow 5: Cloud CLI (Admin Tasks) ✅ SECURE
122 | ```
123 | Admin → python -m basic_memory_cloud.cli.tenant_cli
124 | → Uses CLIAuth with admin WorkOS OAuth client
125 | → Gets JWT token with admin org membership
126 | → Calls /tenants/* endpoints (create, list, delete tenants)
127 | → tenants.py validates JWT AND admin org membership via AdminUserHybridDep
128 | → Access granted only to admin organization members ✅
129 | ```
130 | **Auth**: AuthKit JWT + Admin org validation via `AdminUserHybridDep`
131 | **Subscription Check**: N/A (admins bypass subscription requirement)
132 | **Status**: ✅ Secure - admin-only endpoints, separate from user flows
133 | 
134 | ### Flow 6: Direct API Calls ❌ NEEDS FIX
135 | ```
136 | Any HTTP client → {cloud_host}/proxy/{endpoint}
137 | → Sends Authorization: Bearer {jwt} header
138 | → proxy.py validates JWT but does NOT check subscription
139 | → Direct API access without subscription ❌
140 | ```
141 | **Auth**: WorkOS or AuthKit JWT via `CurrentUserProfileHybridJwtDep`
142 | **Subscription Check**: ❌ Missing
143 | **Fixed By**: Task 1.4 (protect `/proxy/*` endpoints)
144 | 
145 | ### Flow 7: Tenant API Instance (Internal) ✅ SECURE
146 | ```
147 | /proxy/* → Tenant API (basic-memory-{tenant_id}.fly.dev)
148 | → Validates signed header from proxy (tenant_id + signature)
149 | → Direct external access will be disabled in production
150 | → Only accessible via /proxy endpoints
151 | ```
152 | **Auth**: Signed header validation from proxy
153 | **Subscription Check**: N/A (internal only, validated at proxy layer)
154 | **Status**: ✅ Secure - validates proxy signature, not directly accessible
155 | 
156 | ### Authentication Flow Summary Matrix
157 | 
158 | | Flow | Access Method | Current Auth | Subscription Check | Fixed By SPEC-13 |
159 | |------|---------------|--------------|-------------------|------------------|
160 | | 1. Polar Webhook | Polar webhook → `/api/webhooks/polar` | Polar signature | N/A (webhook) | N/A |
161 | | 2. Web App | Browser → `/proxy/*` | WorkOS JWT ✅ | ❌ Missing | ✅ Task 1.4 |
162 | | 3. MCP | AI Agent → `/proxy/*` | AuthKit JWT ✅ | ❌ Missing | ✅ Task 1.4 |
163 | | 4. CLI | `bm cloud` → `/tenant/mount/*` + `/proxy/*` | AuthKit JWT ✅ | ❌ Missing | ✅ Task 1.3 + 1.4 |
164 | | 5. Cloud CLI (Admin) | `tenant_cli` → `/tenants/*` | AuthKit JWT ✅ + Admin org | N/A (admin) | N/A (admin bypass) |
165 | | 6. Direct API | HTTP client → `/proxy/*` | WorkOS/AuthKit JWT ✅ | ❌ Missing | ✅ Task 1.4 |
166 | | 7. Tenant API | Proxy → tenant instance | Proxy signature ✅ | N/A (internal) | N/A |
167 | 
168 | ### Key Insights
169 | 
170 | 1. **Single Point of Failure**: All user access (Web, MCP, CLI, Direct API) converges on `/proxy/*` endpoints
171 | 2. **Centralized Fix**: Protecting `/proxy/*` with subscription validation closes gaps in flows 2, 3, 4, and 6 simultaneously
172 | 3. **Admin Bypass**: Cloud CLI admin tasks use separate `/tenants/*` endpoints with admin-only access (no subscription needed)
173 | 4. **Defense in Depth**: `/tenant/mount/*` endpoints also protected for CLI bisync operations
174 | 
175 | ### Architecture Benefits
176 | 
177 | The `/proxy` layer serves as the **single centralized authorization point** for all user access:
178 | - ✅ One place to validate JWT tokens
179 | - ✅ One place to check subscription status
180 | - ✅ One place to handle tenant routing
181 | - ✅ Protects Web App, MCP, CLI, and Direct API simultaneously
182 | 
183 | This architecture makes the fix comprehensive and maintainable.
184 | 
185 | ## How (High Level)
186 | 
187 | ### Option A: Database Subscription Check (Recommended)
188 | 
189 | **Approach**: Add FastAPI dependency that validates subscription status from database before allowing access.
190 | 
191 | **Implementation**:
192 | 
193 | 1. **Create Subscription Validation Dependency** (`deps.py`)
194 |    ```python
195 |    async def get_authorized_cli_user_profile(
196 |        credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)],
197 |        session: DatabaseSessionDep,
198 |        user_profile_repo: UserProfileRepositoryDep,
199 |        subscription_service: SubscriptionServiceDep,
200 |    ) -> UserProfile:
201 |        """
202 |        Hybrid authentication with subscription validation for CLI access.
203 | 
204 |        Validates JWT (WorkOS or AuthKit) and checks for active subscription.
205 |        Returns UserProfile if both checks pass.
206 |        """
207 |        # Try WorkOS JWT first (faster validation path)
208 |        try:
209 |            user_context = await validate_workos_jwt(credentials.credentials)
210 |        except HTTPException:
211 |            # Fall back to AuthKit JWT validation
212 |            try:
213 |                user_context = await validate_authkit_jwt(credentials.credentials)
214 |            except HTTPException as e:
215 |                raise HTTPException(
216 |                    status_code=401,
217 |                    detail="Invalid JWT token. Authentication required.",
218 |                ) from e
219 | 
220 |        # Check subscription status
221 |        has_subscription = await subscription_service.check_user_has_active_subscription(
222 |            session, user_context.workos_user_id
223 |        )
224 | 
225 |        if not has_subscription:
226 |            raise HTTPException(
227 |                status_code=403,
228 |                detail={
229 |                    "error": "subscription_required",
230 |                    "message": "Active subscription required for CLI access",
231 |                    "subscribe_url": "https://basicmemory.com/subscribe"
232 |                }
233 |            )
234 | 
235 |        # Look up and return user profile
236 |        user_profile = await user_profile_repo.get_user_profile_by_workos_user_id(
237 |            session, user_context.workos_user_id
238 |        )
239 |        if not user_profile:
240 |            raise HTTPException(401, detail="User profile not found")
241 | 
242 |        return user_profile
243 |    ```
244 | 
245 |    ```python
246 |    AuthorizedCLIUserProfileDep = Annotated[UserProfile, Depends(get_authorized_cli_user_profile)]
247 |    ```
248 | 
249 | 2. **Add Subscription Check Method** (`subscription_service.py`)
250 |    ```python
251 |    async def check_user_has_active_subscription(
252 |        self, session: AsyncSession, workos_user_id: str
253 |    ) -> bool:
254 |        """Check if user has active subscription."""
255 |        # Use existing repository method to get subscription by workos_user_id
256 |        # This joins UserProfile -> Subscription in a single query
257 |        subscription = await self.subscription_repository.get_subscription_by_workos_user_id(
258 |            session, workos_user_id
259 |        )
260 | 
261 |        return subscription is not None and subscription.status == "active"
262 |    ```
263 | 
264 | 3. **Protect Endpoints** (Replace `CurrentUserProfileHybridJwtDep` with `AuthorizedCLIUserProfileDep`)
265 |    ```python
266 |    # Before
267 |    @router.get("/mount/info")
268 |    async def get_mount_info(
269 |        user_profile: CurrentUserProfileHybridJwtDep,
270 |        session: DatabaseSessionDep,
271 |    ):
272 |        tenant_id = user_profile.tenant_id
273 |        ...
274 | 
275 |    # After
276 |    @router.get("/mount/info")
277 |    async def get_mount_info(
278 |        user_profile: AuthorizedCLIUserProfileDep,  # Now includes subscription check
279 |        session: DatabaseSessionDep,
280 |    ):
281 |        tenant_id = user_profile.tenant_id  # No changes needed to endpoint logic
282 |        ...
283 |    ```
284 | 
285 | 4. **Update CLI Error Handling**
286 |    ```python
287 |    # In core_commands.py login()
288 |    try:
289 |        success = await auth.login()
290 |        if success:
291 |            # Test subscription by calling protected endpoint
292 |            await make_api_request("GET", f"{host_url}/tenant/mount/info")
293 |    except CloudAPIError as e:
294 |        if e.status_code == 403 and e.detail.get("error") == "subscription_required":
295 |            console.print("[red]Subscription required[/red]")
296 |            console.print(f"Subscribe at: {e.detail['subscribe_url']}")
297 |            raise typer.Exit(1)
298 |    ```
299 | 
300 | **Pros**:
301 | - Simple to implement
302 | - Fast (single database query)
303 | - Clear error messages
304 | - Works with existing subscription flow
305 | 
306 | **Cons**:
307 | - Database is source of truth (could get out of sync with Polar)
308 | - Adds one extra subscription lookup query per request (lightweight JOIN query)
309 | 
310 | ### Option B: WorkOS Organizations
311 | 
312 | **Approach**: Add users to "beta-users" organization in WorkOS after subscription creation, validate org membership via JWT claims.
313 | 
314 | **Implementation**:
315 | 1. After Polar subscription webhook, add user to WorkOS org via API
316 | 2. Validate `org_id` claim in JWT matches authorized org
317 | 3. Use existing `get_admin_workos_jwt` pattern
318 | 
319 | **Pros**:
320 | - WorkOS as single source of truth
321 | - No database queries needed
322 | - More secure (harder to bypass)
323 | 
324 | **Cons**:
325 | - More complex (requires WorkOS API integration)
326 | - Requires managing WorkOS org membership
327 | - Less control over error messages
328 | - Additional API calls during registration
329 | 
330 | ### Recommendation
331 | 
332 | **Start with Option A (Database Check)** for:
333 | - Faster implementation
334 | - Clearer error messages
335 | - Easier testing
336 | - Existing subscription infrastructure
337 | 
338 | **Consider Option B later** if:
339 | - Need tighter security
340 | - Want to reduce database dependency
341 | - Scale requires fewer database queries
342 | 
343 | ## How to Evaluate
344 | 
345 | ### Success Criteria
346 | 
347 | **1. Unauthorized Users Blocked**
348 | - [ ] User without subscription cannot complete `bm cloud login`
349 | - [ ] User without subscription receives clear error with subscribe link
350 | - [ ] User without subscription cannot run `bm cloud setup`
351 | - [ ] User without subscription cannot run `bm sync` in cloud mode
352 | 
353 | **2. Authorized Users Work**
354 | - [ ] User with active subscription can login successfully
355 | - [ ] User with active subscription can setup bisync
356 | - [ ] User with active subscription can sync files
357 | - [ ] User with active subscription can use all MCP tools via proxy
358 | 
359 | **3. Subscription State Changes**
360 | - [ ] Expired subscription blocks access with clear error
361 | - [ ] Renewed subscription immediately restores access
362 | - [ ] Cancelled subscription blocks access after grace period
363 | 
364 | **4. Error Messages**
365 | - [ ] 403 errors include "subscription_required" error code
366 | - [ ] Error messages include subscribe URL
367 | - [ ] CLI displays user-friendly messages
368 | - [ ] Errors logged appropriately for debugging
369 | 
370 | **5. No Regressions**
371 | - [ ] Web app login/subscription flow unaffected
372 | - [ ] Admin endpoints still work (bypass check)
373 | - [ ] Tenant provisioning workflow unchanged
374 | - [ ] Performance not degraded
375 | 
376 | ### Test Cases
377 | 
378 | **Manual Testing**:
379 | ```bash
380 | # Test 1: Unauthorized user
381 | 1. Create new WorkOS account (no subscription)
382 | 2. Run `bm cloud login`
383 | 3. Verify: Login succeeds but shows subscription required error
384 | 4. Verify: Cannot run `bm cloud setup`
385 | 5. Verify: Clear error message with subscribe link
386 | 
387 | # Test 2: Authorized user
388 | 1. Use account with active Polar subscription
389 | 2. Run `bm cloud login`
390 | 3. Verify: Login succeeds without errors
391 | 4. Run `bm cloud setup`
392 | 5. Verify: Setup completes successfully
393 | 6. Run `bm sync`
394 | 7. Verify: Sync works normally
395 | 
396 | # Test 3: Subscription expiration
397 | 1. Use account with active subscription
398 | 2. Manually expire subscription in database
399 | 3. Run `bm cloud login`
400 | 4. Verify: Blocked with clear error
401 | 5. Renew subscription
402 | 6. Run `bm cloud login` again
403 | 7. Verify: Access restored
404 | ```
405 | 
406 | **Automated Tests**:
407 | ```python
408 | # Test subscription validation dependency
409 | async def test_authorized_user_allowed(
410 |     db_session,
411 |     user_profile_repo,
412 |     subscription_service,
413 |     mock_jwt_credentials
414 | ):
415 |     # Create user with active subscription
416 |     user_profile = await create_user_with_subscription(db_session, status="active")
417 | 
418 |     # Mock JWT credentials for the user
419 |     credentials = mock_jwt_credentials(user_profile.workos_user_id)
420 | 
421 |     # Should not raise exception
422 |     result = await get_authorized_cli_user_profile(
423 |         credentials, db_session, user_profile_repo, subscription_service
424 |     )
425 |     assert result.id == user_profile.id
426 |     assert result.workos_user_id == user_profile.workos_user_id
427 | 
428 | async def test_unauthorized_user_blocked(
429 |     db_session,
430 |     user_profile_repo,
431 |     subscription_service,
432 |     mock_jwt_credentials
433 | ):
434 |     # Create user without subscription
435 |     user_profile = await create_user_without_subscription(db_session)
436 |     credentials = mock_jwt_credentials(user_profile.workos_user_id)
437 | 
438 |     # Should raise 403
439 |     with pytest.raises(HTTPException) as exc:
440 |         await get_authorized_cli_user_profile(
441 |             credentials, db_session, user_profile_repo, subscription_service
442 |         )
443 | 
444 |     assert exc.value.status_code == 403
445 |     assert exc.value.detail["error"] == "subscription_required"
446 | 
447 | async def test_inactive_subscription_blocked(
448 |     db_session,
449 |     user_profile_repo,
450 |     subscription_service,
451 |     mock_jwt_credentials
452 | ):
453 |     # Create user with cancelled/inactive subscription
454 |     user_profile = await create_user_with_subscription(db_session, status="cancelled")
455 |     credentials = mock_jwt_credentials(user_profile.workos_user_id)
456 | 
457 |     # Should raise 403
458 |     with pytest.raises(HTTPException) as exc:
459 |         await get_authorized_cli_user_profile(
460 |             credentials, db_session, user_profile_repo, subscription_service
461 |         )
462 | 
463 |     assert exc.value.status_code == 403
464 |     assert exc.value.detail["error"] == "subscription_required"
465 | ```
466 | 
467 | ## Implementation Tasks
468 | 
469 | ### Phase 1: Cloud Service (basic-memory-cloud)
470 | 
471 | #### Task 1.1: Add subscription check method to SubscriptionService ✅
472 | **File**: `apps/cloud/src/basic_memory_cloud/services/subscription_service.py`
473 | 
474 | - [x] Add method `check_subscription(session: AsyncSession, workos_user_id: str) -> bool`
475 | - [x] Use existing `self.subscription_repository.get_subscription_by_workos_user_id(session, workos_user_id)`
476 | - [x] Check both `status == "active"` AND `current_period_end >= now()`
477 | - [x] Log both values when check fails
478 | - [x] Add docstring explaining the method
479 | - [x] Run `just typecheck` to verify types
480 | 
481 | **Actual implementation**:
482 | ```python
483 | async def check_subscription(
484 |     self, session: AsyncSession, workos_user_id: str
485 | ) -> bool:
486 |     """Check if user has active subscription with valid period."""
487 |     subscription = await self.subscription_repository.get_subscription_by_workos_user_id(
488 |         session, workos_user_id
489 |     )
490 | 
491 |     if subscription is None:
492 |         return False
493 | 
494 |     if subscription.status != "active":
495 |         logger.warning("Subscription inactive", workos_user_id=workos_user_id,
496 |                       status=subscription.status, current_period_end=subscription.current_period_end)
497 |         return False
498 | 
499 |     now = datetime.now(timezone.utc)
500 |     if subscription.current_period_end is None or subscription.current_period_end < now:
501 |         logger.warning("Subscription expired", workos_user_id=workos_user_id,
502 |                       status=subscription.status, current_period_end=subscription.current_period_end)
503 |         return False
504 | 
505 |     return True
506 | ```
507 | 
508 | #### Task 1.2: Add subscription validation dependency ✅
509 | **File**: `apps/cloud/src/basic_memory_cloud/deps.py`
510 | 
511 | - [x] Import necessary types at top of file (if not already present)
512 | - [x] Add `authorized_user_profile()` async function
513 | - [x] Implement hybrid JWT validation (WorkOS first, AuthKit fallback)
514 | - [x] Add subscription check using `subscription_service.check_subscription()`
515 | - [x] Raise `HTTPException(403)` with structured error detail if no active subscription
516 | - [x] Look up and return `UserProfile` after validation
517 | - [x] Add `AuthorizedUserProfileDep` type annotation
518 | - [x] Use `settings.subscription_url` from config (env var)
519 | - [x] Run `just typecheck` to verify types
520 | 
521 | **Expected code**:
522 | ```python
523 | async def get_authorized_cli_user_profile(
524 |     credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)],
525 |     session: DatabaseSessionDep,
526 |     user_profile_repo: UserProfileRepositoryDep,
527 |     subscription_service: SubscriptionServiceDep,
528 | ) -> UserProfile:
529 |     """
530 |     Hybrid authentication with subscription validation for CLI access.
531 | 
532 |     Validates JWT (WorkOS or AuthKit) and checks for active subscription.
533 |     Returns UserProfile if both checks pass.
534 | 
535 |     Raises:
536 |         HTTPException(401): Invalid JWT token
537 |         HTTPException(403): No active subscription
538 |     """
539 |     # Try WorkOS JWT first (faster validation path)
540 |     try:
541 |         user_context = await validate_workos_jwt(credentials.credentials)
542 |     except HTTPException:
543 |         # Fall back to AuthKit JWT validation
544 |         try:
545 |             user_context = await validate_authkit_jwt(credentials.credentials)
546 |         except HTTPException as e:
547 |             raise HTTPException(
548 |                 status_code=401,
549 |                 detail="Invalid JWT token. Authentication required.",
550 |             ) from e
551 | 
552 |     # Check subscription status
553 |     has_subscription = await subscription_service.check_user_has_active_subscription(
554 |         session, user_context.workos_user_id
555 |     )
556 | 
557 |     if not has_subscription:
558 |         logger.warning(
559 |             "CLI access denied: no active subscription",
560 |             workos_user_id=user_context.workos_user_id,
561 |         )
562 |         raise HTTPException(
563 |             status_code=403,
564 |             detail={
565 |                 "error": "subscription_required",
566 |                 "message": "Active subscription required for CLI access",
567 |                 "subscribe_url": "https://basicmemory.com/subscribe"
568 |             }
569 |         )
570 | 
571 |     # Look up and return user profile
572 |     user_profile = await user_profile_repo.get_user_profile_by_workos_user_id(
573 |         session, user_context.workos_user_id
574 |     )
575 |     if not user_profile:
576 |         logger.error(
577 |             "User profile not found after successful auth",
578 |             workos_user_id=user_context.workos_user_id,
579 |         )
580 |         raise HTTPException(401, detail="User profile not found")
581 | 
582 |     logger.info(
583 |         "CLI access granted",
584 |         workos_user_id=user_context.workos_user_id,
585 |         user_profile_id=str(user_profile.id),
586 |     )
587 |     return user_profile
588 | 
589 | 
590 | AuthorizedCLIUserProfileDep = Annotated[UserProfile, Depends(get_authorized_cli_user_profile)]
591 | ```
592 | 
593 | #### Task 1.3: Protect tenant mount endpoints ✅
594 | **File**: `apps/cloud/src/basic_memory_cloud/api/tenant_mount.py`
595 | 
596 | - [x] Update import: add `AuthorizedUserProfileDep` from `..deps`
597 | - [x] Replace `user_profile: CurrentUserProfileHybridJwtDep` with `user_profile: AuthorizedUserProfileDep` in:
598 |   - [x] `get_tenant_mount_info()` (line ~23)
599 |   - [x] `create_tenant_mount_credentials()` (line ~88)
600 |   - [x] `revoke_tenant_mount_credentials()` (line ~244)
601 |   - [x] `list_tenant_mount_credentials()` (line ~326)
602 | - [x] Verify no other code changes needed (parameter name and usage stays the same)
603 | - [x] Run `just typecheck` to verify types
604 | 
605 | #### Task 1.4: Protect proxy endpoints ✅
606 | **File**: `apps/cloud/src/basic_memory_cloud/api/proxy.py`
607 | 
608 | - [x] Update import: add `AuthorizedUserProfileDep` from `..deps`
609 | - [x] Replace `user_profile: CurrentUserProfileHybridJwtDep` with `user_profile: AuthorizedUserProfileDep` in:
610 |   - [x] `check_tenant_health()` (line ~21)
611 |   - [x] `proxy_to_tenant()` (line ~63)
612 | - [x] Verify no other code changes needed (parameter name and usage stays the same)
613 | - [x] Run `just typecheck` to verify types
614 | 
615 | **Why Keep /proxy Architecture:**
616 | 
617 | The proxy layer is valuable because it:
618 | 1. **Centralizes authorization** - Single place for JWT + subscription validation (closes both CLI and MCP auth gaps)
619 | 2. **Handles tenant routing** - Maps tenant_id → fly_app_name without exposing infrastructure details
620 | 3. **Abstracts infrastructure** - MCP and CLI don't need to know about Fly.io naming conventions
621 | 4. **Enables features** - Can add rate limiting, caching, request logging, etc. at proxy layer
622 | 5. **Supports both flows** - CLI tools and MCP tools both use /proxy endpoints
623 | 
624 | The extra HTTP hop is minimal (< 10ms) and worth it for architectural benefits.
625 | 
626 | **Performance Note:** Cloud app has Redis available - can cache subscription status to reduce database queries if needed. Initial implementation uses direct database query (simple, acceptable performance ~5-10ms).
627 | 
628 | #### Task 1.5: Add unit tests for subscription service
629 | **File**: `apps/cloud/tests/services/test_subscription_service.py` (create if doesn't exist)
630 | 
631 | - [ ] Create test file if it doesn't exist
632 | - [ ] Add test: `test_check_user_has_active_subscription_returns_true_for_active()`
633 |   - Create user with active subscription
634 |   - Call `check_user_has_active_subscription()`
635 |   - Assert returns `True`
636 | - [ ] Add test: `test_check_user_has_active_subscription_returns_false_for_pending()`
637 |   - Create user with pending subscription
638 |   - Assert returns `False`
639 | - [ ] Add test: `test_check_user_has_active_subscription_returns_false_for_cancelled()`
640 |   - Create user with cancelled subscription
641 |   - Assert returns `False`
642 | - [ ] Add test: `test_check_user_has_active_subscription_returns_false_for_no_subscription()`
643 |   - Create user without subscription
644 |   - Assert returns `False`
645 | - [ ] Run `just test` to verify tests pass
646 | 
647 | #### Task 1.6: Add integration tests for dependency
648 | **File**: `apps/cloud/tests/test_deps.py` (create if doesn't exist)
649 | 
650 | - [ ] Create test file if it doesn't exist
651 | - [ ] Add fixtures for mocking JWT credentials
652 | - [ ] Add test: `test_authorized_cli_user_profile_with_active_subscription()`
653 |   - Mock valid JWT + active subscription
654 |   - Call dependency
655 |   - Assert returns UserProfile
656 | - [ ] Add test: `test_authorized_cli_user_profile_without_subscription_raises_403()`
657 |   - Mock valid JWT + no subscription
658 |   - Assert raises HTTPException(403) with correct error detail
659 | - [ ] Add test: `test_authorized_cli_user_profile_with_inactive_subscription_raises_403()`
660 |   - Mock valid JWT + cancelled subscription
661 |   - Assert raises HTTPException(403)
662 | - [ ] Add test: `test_authorized_cli_user_profile_with_invalid_jwt_raises_401()`
663 |   - Mock invalid JWT
664 |   - Assert raises HTTPException(401)
665 | - [ ] Run `just test` to verify tests pass
666 | 
667 | #### Task 1.7: Deploy and verify cloud service
668 | - [ ] Run `just check` to verify all quality checks pass
669 | - [ ] Commit changes with message: "feat: add subscription validation to CLI endpoints"
670 | - [ ] Deploy to preview environment: `flyctl deploy --config apps/cloud/fly.toml`
671 | - [ ] Test manually:
672 |   - [ ] Call `/tenant/mount/info` with valid JWT but no subscription → expect 403
673 |   - [ ] Call `/tenant/mount/info` with valid JWT and active subscription → expect 200
674 |   - [ ] Verify error response structure matches spec
675 | 
676 | ### Phase 2: CLI (basic-memory)
677 | 
678 | #### Task 2.1: Review and understand CLI authentication flow
679 | **Files**: `src/basic_memory/cli/commands/cloud/`
680 | 
681 | - [ ] Read `core_commands.py` to understand current login flow
682 | - [ ] Read `api_client.py` to understand current error handling
683 | - [ ] Identify where 403 errors should be caught
684 | - [ ] Identify what error messages should be displayed
685 | - [ ] Document current behavior in spec if needed
686 | 
687 | #### Task 2.2: Update API client error handling
688 | **File**: `src/basic_memory/cli/commands/cloud/api_client.py`
689 | 
690 | - [ ] Add custom exception class `SubscriptionRequiredError` (or similar)
691 | - [ ] Update HTTP error handling to parse 403 responses
692 | - [ ] Extract `error`, `message`, and `subscribe_url` from error detail
693 | - [ ] Raise specific exception for subscription_required errors
694 | - [ ] Run `just typecheck` in basic-memory repo to verify types
695 | 
696 | #### Task 2.3: Update CLI login command error handling
697 | **File**: `src/basic_memory/cli/commands/cloud/core_commands.py`
698 | 
699 | - [ ] Import the subscription error exception
700 | - [ ] Wrap login flow with try/except for subscription errors
701 | - [ ] Display user-friendly error message with rich console
702 | - [ ] Show subscribe URL prominently
703 | - [ ] Provide actionable next steps
704 | - [ ] Run `just typecheck` to verify types
705 | 
706 | **Expected error handling**:
707 | ```python
708 | try:
709 |     # Existing login logic
710 |     success = await auth.login()
711 |     if success:
712 |         # Test access to protected endpoint
713 |         await api_client.test_connection()
714 | except SubscriptionRequiredError as e:
715 |     console.print("\n[red]✗ Subscription Required[/red]\n")
716 |     console.print(f"[yellow]{e.message}[/yellow]\n")
717 |     console.print(f"Subscribe at: [blue underline]{e.subscribe_url}[/blue underline]\n")
718 |     console.print("[dim]Once you have an active subscription, run [bold]bm cloud login[/bold] again.[/dim]")
719 |     raise typer.Exit(1)
720 | ```
721 | 
722 | #### Task 2.4: Update CLI tests
723 | **File**: `tests/cli/test_cloud_commands.py`
724 | 
725 | - [ ] Add test: `test_login_without_subscription_shows_error()`
726 |   - Mock 403 subscription_required response
727 |   - Call login command
728 |   - Assert error message displayed
729 |   - Assert subscribe URL shown
730 | - [ ] Add test: `test_login_with_subscription_succeeds()`
731 |   - Mock successful authentication + subscription check
732 |   - Call login command
733 |   - Assert success message
734 | - [ ] Run `just test` to verify tests pass
735 | 
736 | #### Task 2.5: Update CLI documentation
737 | **File**: `docs/cloud-cli.md` (in basic-memory-docs repo)
738 | 
739 | - [ ] Add "Prerequisites" section if not present
740 | - [ ] Document subscription requirement
741 | - [ ] Add "Troubleshooting" section
742 | - [ ] Document "Subscription Required" error
743 | - [ ] Provide subscribe URL
744 | - [ ] Add FAQ entry about subscription errors
745 | - [ ] Build docs locally to verify formatting
746 | 
747 | ### Phase 3: End-to-End Testing
748 | 
749 | #### Task 3.1: Create test user accounts
750 | **Prerequisites**: Access to WorkOS admin and database
751 | 
752 | - [ ] Create test user WITHOUT subscription:
753 |   - [ ] Sign up via WorkOS AuthKit
754 |   - [ ] Get workos_user_id from database
755 |   - [ ] Verify no subscription record exists
756 |   - [ ] Save credentials for testing
757 | - [ ] Create test user WITH active subscription:
758 |   - [ ] Sign up via WorkOS AuthKit
759 |   - [ ] Create subscription via Polar or dev endpoint
760 |   - [ ] Verify subscription.status = "active" in database
761 |   - [ ] Save credentials for testing
762 | 
763 | #### Task 3.2: Manual testing - User without subscription
764 | **Environment**: Preview/staging deployment
765 | 
766 | - [ ] Run `bm cloud login` with no-subscription user
767 | - [ ] Verify: Login shows "Subscription Required" error
768 | - [ ] Verify: Subscribe URL is displayed
769 | - [ ] Verify: Cannot run `bm cloud setup`
770 | - [ ] Verify: Cannot call `/tenant/mount/info` directly via curl
771 | - [ ] Document any issues found
772 | 
773 | #### Task 3.3: Manual testing - User with active subscription
774 | **Environment**: Preview/staging deployment
775 | 
776 | - [ ] Run `bm cloud login` with active-subscription user
777 | - [ ] Verify: Login succeeds without errors
778 | - [ ] Verify: Can run `bm cloud setup`
779 | - [ ] Verify: Can call `/tenant/mount/info` successfully
780 | - [ ] Verify: Can call `/proxy/*` endpoints successfully
781 | - [ ] Document any issues found
782 | 
783 | #### Task 3.4: Test subscription state transitions
784 | **Environment**: Preview/staging deployment + database access
785 | 
786 | - [ ] Start with active subscription user
787 | - [ ] Verify: All operations work
788 | - [ ] Update subscription.status to "cancelled" in database
789 | - [ ] Verify: Login now shows "Subscription Required" error
790 | - [ ] Verify: Existing tokens are rejected with 403
791 | - [ ] Update subscription.status back to "active"
792 | - [ ] Verify: Access restored immediately
793 | - [ ] Document any issues found
794 | 
795 | #### Task 3.5: Integration test suite
796 | **File**: `apps/cloud/tests/integration/test_cli_subscription_flow.py` (create if doesn't exist)
797 | 
798 | - [ ] Create integration test file
799 | - [ ] Add test: `test_cli_flow_without_subscription()`
800 |   - Simulate full CLI flow without subscription
801 |   - Assert 403 at appropriate points
802 | - [ ] Add test: `test_cli_flow_with_active_subscription()`
803 |   - Simulate full CLI flow with active subscription
804 |   - Assert all operations succeed
805 | - [ ] Add test: `test_subscription_expiration_blocks_access()`
806 |   - Start with active subscription
807 |   - Change status to cancelled
808 |   - Assert access denied
809 | - [ ] Run tests in CI/CD pipeline
810 | - [ ] Document test coverage
811 | 
812 | #### Task 3.6: Load/performance testing (optional)
813 | **Environment**: Staging environment
814 | 
815 | - [ ] Test subscription check performance under load
816 | - [ ] Measure latency added by subscription check
817 | - [ ] Verify database query performance
818 | - [ ] Document any performance concerns
819 | - [ ] Optimize if needed
820 | 
821 | ## Implementation Summary Checklist
822 | 
823 | Use this high-level checklist to track overall progress:
824 | 
825 | ### Phase 1: Cloud Service 🔄
826 | - [x] Add subscription check method to SubscriptionService
827 | - [x] Add subscription validation dependency to deps.py
828 | - [x] Add subscription_url config (env var)
829 | - [x] Protect tenant mount endpoints (4 endpoints)
830 | - [x] Protect proxy endpoints (2 endpoints)
831 | - [ ] Add unit tests for subscription service
832 | - [ ] Add integration tests for dependency
833 | - [ ] Deploy and verify cloud service
834 | 
835 | ### Phase 2: CLI Updates 🔄
836 | - [ ] Review CLI authentication flow
837 | - [ ] Update API client error handling
838 | - [ ] Update CLI login command error handling
839 | - [ ] Add CLI tests
840 | - [ ] Update CLI documentation
841 | 
842 | ### Phase 3: End-to-End Testing 🧪
843 | - [ ] Create test user accounts
844 | - [ ] Manual testing - user without subscription
845 | - [ ] Manual testing - user with active subscription
846 | - [ ] Test subscription state transitions
847 | - [ ] Integration test suite
848 | - [ ] Load/performance testing (optional)
849 | 
850 | ## Questions to Resolve
851 | 
852 | ### Resolved ✅
853 | 
854 | 1. **Admin Access**
855 |    - ✅ **Decision**: Admin users bypass subscription check
856 |    - **Rationale**: Admin endpoints already use `AdminUserHybridDep`, which is separate from CLI user endpoints
857 |    - **Implementation**: No changes needed to admin endpoints
858 | 
859 | 2. **Subscription Check Implementation**
860 |    - ✅ **Decision**: Use Option A (Database Check)
861 |    - **Rationale**: Simpler, faster to implement, works with existing infrastructure
862 |    - **Implementation**: Single JOIN query via `get_subscription_by_workos_user_id()`
863 | 
864 | 3. **Dependency Return Type**
865 |    - ✅ **Decision**: Return `UserProfile` (not `UserContext`)
866 |    - **Rationale**: Drop-in compatibility with existing endpoints, no refactoring needed
867 |    - **Implementation**: `AuthorizedCLIUserProfileDep` returns `UserProfile`
868 | 
869 | ### To Be Resolved ⏳
870 | 
871 | 1. **Subscription Check Frequency**
872 |    - **Options**:
873 |      - Check on every API call (slower, more secure) ✅ **RECOMMENDED**
874 |      - Cache subscription status (faster, risk of stale data)
875 |      - Check only on login/setup (fast, but allows expired subscriptions temporarily)
876 |    - **Recommendation**: Check on every call via dependency injection (simple, secure, acceptable performance)
877 |    - **Impact**: ~5-10ms per request (single indexed JOIN query)
878 | 
879 | 2. **Grace Period**
880 |    - **Options**:
881 |      - No grace period - immediate block when status != "active" ✅ **RECOMMENDED**
882 |      - 7-day grace period after period_end
883 |      - 14-day grace period after period_end
884 |    - **Recommendation**: No grace period initially, add later if needed based on customer feedback
885 |    - **Implementation**: Check `subscription.status == "active"` only (ignore period_end initially)
886 | 
887 | 3. **Subscription Expiration Handling**
888 |    - **Question**: Should we check `current_period_end < now()` in addition to `status == "active"`?
889 |    - **Options**:
890 |      - Only check status field (rely on Polar webhooks to update status) ✅ **RECOMMENDED**
891 |      - Check both status and current_period_end (more defensive)
892 |    - **Recommendation**: Only check status field, assume Polar webhooks keep it current
893 |    - **Risk**: If webhooks fail, expired subscriptions might retain access until webhook succeeds
894 | 
895 | 4. **Subscribe URL**
896 |    - **Question**: What's the actual subscription URL?
897 |    - **Current**: Spec uses `https://basicmemory.com/subscribe`
898 |    - **Action Required**: Verify correct URL before implementation
899 | 
900 | 5. **Dev Mode / Testing Bypass**
901 |    - **Question**: Support bypass for development/testing?
902 |    - **Options**:
903 |      - Environment variable: `DISABLE_SUBSCRIPTION_CHECK=true`
904 |      - Always enforce (more realistic testing) ✅ **RECOMMENDED**
905 |    - **Recommendation**: No bypass - use test users with real subscriptions for realistic testing
906 |    - **Implementation**: Create dev endpoint to activate subscriptions for testing
907 | 
908 | ## Related Specs
909 | 
910 | - SPEC-9: Multi-Project Bidirectional Sync Architecture (CLI affected by this change)
911 | - SPEC-8: TigrisFS Integration (Mount endpoints protected)
912 | 
913 | ## Notes
914 | 
915 | - This spec prioritizes security over convenience - better to block unauthorized access than risk revenue loss
916 | - Clear error messages are critical - users should understand why they're blocked and how to resolve it
917 | - Consider adding telemetry to track subscription_required errors for monitoring signup conversion
918 | 
```

--------------------------------------------------------------------------------
/src/basic_memory/services/project_service.py:
--------------------------------------------------------------------------------

```python
  1 | """Project management service for Basic Memory."""
  2 | 
  3 | import asyncio
  4 | import json
  5 | import os
  6 | import shutil
  7 | from datetime import datetime
  8 | from pathlib import Path
  9 | from typing import Dict, Optional, Sequence
 10 | 
 11 | 
 12 | from loguru import logger
 13 | from sqlalchemy import text
 14 | 
 15 | from basic_memory.models import Project
 16 | from basic_memory.repository.project_repository import ProjectRepository
 17 | from basic_memory.schemas import (
 18 |     ActivityMetrics,
 19 |     ProjectInfoResponse,
 20 |     ProjectStatistics,
 21 |     SystemStatus,
 22 | )
 23 | from basic_memory.config import WATCH_STATUS_JSON, ConfigManager, get_project_config, ProjectConfig
 24 | from basic_memory.utils import generate_permalink
 25 | 
 26 | 
 27 | class ProjectService:
 28 |     """Service for managing Basic Memory projects."""
 29 | 
 30 |     repository: ProjectRepository
 31 | 
 32 |     def __init__(self, repository: ProjectRepository):
 33 |         """Initialize the project service."""
 34 |         super().__init__()
 35 |         self.repository = repository
 36 | 
 37 |     @property
 38 |     def config_manager(self) -> ConfigManager:
 39 |         """Get a ConfigManager instance.
 40 | 
 41 |         Returns:
 42 |             Fresh ConfigManager instance for each access
 43 |         """
 44 |         return ConfigManager()
 45 | 
 46 |     @property
 47 |     def config(self) -> ProjectConfig:  # pragma: no cover
 48 |         """Get the current project configuration.
 49 | 
 50 |         Returns:
 51 |             Current project configuration
 52 |         """
 53 |         return get_project_config()
 54 | 
 55 |     @property
 56 |     def projects(self) -> Dict[str, str]:
 57 |         """Get all configured projects.
 58 | 
 59 |         Returns:
 60 |             Dict mapping project names to their file paths
 61 |         """
 62 |         return self.config_manager.projects
 63 | 
 64 |     @property
 65 |     def default_project(self) -> str:
 66 |         """Get the name of the default project.
 67 | 
 68 |         Returns:
 69 |             The name of the default project
 70 |         """
 71 |         return self.config_manager.default_project
 72 | 
 73 |     @property
 74 |     def current_project(self) -> str:
 75 |         """Get the name of the currently active project.
 76 | 
 77 |         Returns:
 78 |             The name of the current project
 79 |         """
 80 |         return os.environ.get("BASIC_MEMORY_PROJECT", self.config_manager.default_project)
 81 | 
 82 |     async def list_projects(self) -> Sequence[Project]:
 83 |         """List all projects without loading entity relationships.
 84 | 
 85 |         Returns only basic project fields (name, path, etc.) without
 86 |         eager loading the entities relationship which could load thousands
 87 |         of entities for large knowledge bases.
 88 |         """
 89 |         return await self.repository.find_all(use_load_options=False)
 90 | 
 91 |     async def get_project(self, name: str) -> Optional[Project]:
 92 |         """Get the file path for a project by name or permalink."""
 93 |         return await self.repository.get_by_name(name) or await self.repository.get_by_permalink(
 94 |             name
 95 |         )
 96 | 
 97 |     def _check_nested_paths(self, path1: str, path2: str) -> bool:
 98 |         """Check if two paths are nested (one is a prefix of the other).
 99 | 
100 |         Args:
101 |             path1: First path to compare
102 |             path2: Second path to compare
103 | 
104 |         Returns:
105 |             True if one path is nested within the other, False otherwise
106 | 
107 |         Examples:
108 |             _check_nested_paths("/foo", "/foo/bar")     # True (child under parent)
109 |             _check_nested_paths("/foo/bar", "/foo")     # True (parent over child)
110 |             _check_nested_paths("/foo", "/bar")         # False (siblings)
111 |         """
112 |         # Normalize paths to ensure proper comparison
113 |         p1 = Path(path1).resolve()
114 |         p2 = Path(path2).resolve()
115 | 
116 |         # Check if either path is a parent of the other
117 |         try:
118 |             # Check if p2 is under p1
119 |             p2.relative_to(p1)
120 |             return True
121 |         except ValueError:
122 |             # Not nested in this direction, check the other
123 |             try:
124 |                 # Check if p1 is under p2
125 |                 p1.relative_to(p2)
126 |                 return True
127 |             except ValueError:
128 |                 # Not nested in either direction
129 |                 return False
130 | 
131 |     async def add_project(self, name: str, path: str, set_default: bool = False) -> None:
132 |         """Add a new project to the configuration and database.
133 | 
134 |         Args:
135 |             name: The name of the project
136 |             path: The file path to the project directory
137 |             set_default: Whether to set this project as the default
138 | 
139 |         Raises:
140 |             ValueError: If the project already exists or path collides with existing project
141 |         """
142 |         # If project_root is set, constrain all projects to that directory
143 |         project_root = self.config_manager.config.project_root
144 |         sanitized_name = None
145 |         if project_root:
146 |             base_path = Path(project_root)
147 | 
148 |             # In cloud mode (when project_root is set), ignore user's path completely
149 |             # and use sanitized project name as the directory name
150 |             # This ensures flat structure: /app/data/test-bisync instead of /app/data/documents/test bisync
151 |             sanitized_name = generate_permalink(name)
152 | 
153 |             # Construct path using sanitized project name only
154 |             resolved_path = (base_path / sanitized_name).resolve().as_posix()
155 | 
156 |             # Verify the resolved path is actually under project_root
157 |             if not resolved_path.startswith(base_path.resolve().as_posix()):  # pragma: no cover
158 |                 raise ValueError(
159 |                     f"BASIC_MEMORY_PROJECT_ROOT is set to {project_root}. "
160 |                     f"All projects must be created under this directory. Invalid path: {path}"
161 |                 )  # pragma: no cover
162 | 
163 |             # Check for case-insensitive path collisions with existing projects
164 |             existing_projects = await self.list_projects()
165 |             for existing in existing_projects:
166 |                 if (
167 |                     existing.path.lower() == resolved_path.lower()
168 |                     and existing.path != resolved_path
169 |                 ):
170 |                     raise ValueError(  # pragma: no cover
171 |                         f"Path collision detected: '{resolved_path}' conflicts with existing project "
172 |                         f"'{existing.name}' at '{existing.path}'. "
173 |                         f"In cloud mode, paths are normalized to lowercase to prevent case-sensitivity issues."
174 |                     )  # pragma: no cover
175 |         else:
176 |             resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
177 | 
178 |         # Check for nested paths with existing projects
179 |         existing_projects = await self.list_projects()
180 |         for existing in existing_projects:
181 |             if self._check_nested_paths(resolved_path, existing.path):
182 |                 # Determine which path is nested within which for appropriate error message
183 |                 p_new = Path(resolved_path).resolve()
184 |                 p_existing = Path(existing.path).resolve()
185 | 
186 |                 # Check if new path is nested under existing project
187 |                 if p_new.is_relative_to(p_existing):
188 |                     raise ValueError(
189 |                         f"Cannot create project at '{resolved_path}': "
190 |                         f"path is nested within existing project '{existing.name}' at '{existing.path}'. "
191 |                         f"Projects cannot share directory trees."
192 |                     )
193 |                 else:
194 |                     # Existing project is nested under new path
195 |                     raise ValueError(
196 |                         f"Cannot create project at '{resolved_path}': "
197 |                         f"existing project '{existing.name}' at '{existing.path}' is nested within this path. "
198 |                         f"Projects cannot share directory trees."
199 |                     )
200 | 
201 |         if not self.config_manager.config.cloud_mode:
202 |             # First add to config file (this will validate the project doesn't exist)
203 |             self.config_manager.add_project(name, resolved_path)
204 | 
205 |         # Then add to database
206 |         project_data = {
207 |             "name": name,
208 |             "path": resolved_path,
209 |             "permalink": sanitized_name,
210 |             "is_active": True,
211 |             # Don't set is_default=False to avoid UNIQUE constraint issues
212 |             # Let it default to NULL, only set to True when explicitly making default
213 |         }
214 |         created_project = await self.repository.create(project_data)
215 | 
216 |         # If this should be the default project, ensure only one default exists
217 |         if set_default:
218 |             await self.repository.set_as_default(created_project.id)
219 |             self.config_manager.set_default_project(name)
220 |             logger.info(f"Project '{name}' set as default")
221 | 
222 |         logger.info(f"Project '{name}' added at {resolved_path}")
223 | 
224 |     async def remove_project(self, name: str, delete_notes: bool = False) -> None:
225 |         """Remove a project from configuration and database.
226 | 
227 |         Args:
228 |             name: The name of the project to remove
229 |             delete_notes: If True, delete the project directory from filesystem
230 | 
231 |         Raises:
232 |             ValueError: If the project doesn't exist or is the default project
233 |         """
234 |         if not self.repository:  # pragma: no cover
235 |             raise ValueError("Repository is required for remove_project")
236 | 
237 |         # Get project from database first
238 |         project = await self.get_project(name)
239 |         if not project:
240 |             raise ValueError(f"Project '{name}' not found")  # pragma: no cover
241 | 
242 |         project_path = project.path
243 | 
244 |         # Check if project is default (in cloud mode, check database; in local mode, check config)
245 |         if project.is_default or name == self.config_manager.config.default_project:
246 |             raise ValueError(f"Cannot remove the default project '{name}'")  # pragma: no cover
247 | 
248 |         # Remove from config if it exists there (may not exist in cloud mode)
249 |         try:
250 |             self.config_manager.remove_project(name)
251 |         except ValueError:  # pragma: no cover
252 |             # Project not in config - that's OK in cloud mode, continue with database deletion
253 |             logger.debug(  # pragma: no cover
254 |                 f"Project '{name}' not found in config, removing from database only"
255 |             )
256 | 
257 |         # Remove from database
258 |         await self.repository.delete(project.id)
259 | 
260 |         logger.info(f"Project '{name}' removed from configuration and database")
261 | 
262 |         # Optionally delete the project directory
263 |         if delete_notes and project_path:
264 |             try:
265 |                 path_obj = Path(project_path)
266 |                 if path_obj.exists() and path_obj.is_dir():
267 |                     await asyncio.to_thread(shutil.rmtree, project_path)
268 |                     logger.info(f"Deleted project directory: {project_path}")
269 |                 else:
270 |                     logger.warning(  # pragma: no cover
271 |                         f"Project directory not found or not a directory: {project_path}"
272 |                     )  # pragma: no cover
273 |             except Exception as e:  # pragma: no cover
274 |                 logger.warning(  # pragma: no cover
275 |                     f"Failed to delete project directory {project_path}: {e}"
276 |                 )
277 | 
278 |     async def set_default_project(self, name: str) -> None:
279 |         """Set the default project in configuration and database.
280 | 
281 |         Args:
282 |             name: The name of the project to set as default
283 | 
284 |         Raises:
285 |             ValueError: If the project doesn't exist
286 |         """
287 |         if not self.repository:  # pragma: no cover
288 |             raise ValueError("Repository is required for set_default_project")
289 | 
290 |         # Look up project in database first to validate it exists
291 |         project = await self.get_project(name)
292 |         if not project:
293 |             raise ValueError(f"Project '{name}' not found")
294 | 
295 |         # Update database
296 |         await self.repository.set_as_default(project.id)
297 | 
298 |         # Update config file only in local mode (cloud mode uses database only)
299 |         if not self.config_manager.config.cloud_mode:
300 |             self.config_manager.set_default_project(name)
301 | 
302 |         logger.info(f"Project '{name}' set as default in configuration and database")
303 | 
304 |     async def _ensure_single_default_project(self) -> None:
305 |         """Ensure only one project has is_default=True.
306 | 
307 |         This method validates the database state and fixes any issues where
308 |         multiple projects might have is_default=True or no project is marked as default.
309 |         """
310 |         if not self.repository:
311 |             raise ValueError(
312 |                 "Repository is required for _ensure_single_default_project"
313 |             )  # pragma: no cover
314 | 
315 |         # Get all projects with is_default=True
316 |         db_projects = await self.repository.find_all()
317 |         default_projects = [p for p in db_projects if p.is_default is True]
318 | 
319 |         if len(default_projects) > 1:  # pragma: no cover
320 |             # Multiple defaults found - fix by keeping the first one and clearing others
321 |             # This is defensive code that should rarely execute due to business logic enforcement
322 |             logger.warning(  # pragma: no cover
323 |                 f"Found {len(default_projects)} projects with is_default=True, fixing..."
324 |             )
325 |             keep_default = default_projects[0]  # pragma: no cover
326 | 
327 |             # Clear all defaults first, then set only the first one as default
328 |             await self.repository.set_as_default(keep_default.id)  # pragma: no cover
329 | 
330 |             logger.info(
331 |                 f"Fixed default project conflicts, kept '{keep_default.name}' as default"
332 |             )  # pragma: no cover
333 | 
334 |         elif len(default_projects) == 0:  # pragma: no cover
335 |             # No default project - set the config default as default
336 |             # This is defensive code for edge cases where no default exists
337 |             config_default = self.config_manager.default_project  # pragma: no cover
338 |             config_project = await self.repository.get_by_name(config_default)  # pragma: no cover
339 |             if config_project:  # pragma: no cover
340 |                 await self.repository.set_as_default(config_project.id)  # pragma: no cover
341 |                 logger.info(
342 |                     f"Set '{config_default}' as default project (was missing)"
343 |                 )  # pragma: no cover
344 | 
345 |     async def synchronize_projects(self) -> None:  # pragma: no cover
346 |         """Synchronize projects between database and configuration.
347 | 
348 |         Ensures that all projects in the configuration file exist in the database
349 |         and vice versa. This should be called during initialization to reconcile
350 |         any differences between the two sources.
351 |         """
352 |         if not self.repository:
353 |             raise ValueError("Repository is required for synchronize_projects")
354 | 
355 |         logger.info("Synchronizing projects between database and configuration")
356 | 
357 |         # Get all projects from database
358 |         db_projects = await self.repository.get_active_projects()
359 |         db_projects_by_permalink = {p.permalink: p for p in db_projects}
360 | 
361 |         # Get all projects from configuration and normalize names if needed
362 |         config_projects = self.config_manager.projects.copy()
363 |         updated_config = {}
364 |         config_updated = False
365 | 
366 |         for name, path in config_projects.items():
367 |             # Generate normalized name (what the database expects)
368 |             normalized_name = generate_permalink(name)
369 | 
370 |             if normalized_name != name:
371 |                 logger.info(f"Normalizing project name in config: '{name}' -> '{normalized_name}'")
372 |                 config_updated = True
373 | 
374 |             updated_config[normalized_name] = path
375 | 
376 |         # Update the configuration if any changes were made
377 |         if config_updated:
378 |             config = self.config_manager.load_config()
379 |             config.projects = updated_config
380 |             self.config_manager.save_config(config)
381 |             logger.info("Config updated with normalized project names")
382 | 
383 |         # Use the normalized config for further processing
384 |         config_projects = updated_config
385 | 
386 |         # Add projects that exist in config but not in DB
387 |         for name, path in config_projects.items():
388 |             if name not in db_projects_by_permalink:
389 |                 logger.info(f"Adding project '{name}' to database")
390 |                 project_data = {
391 |                     "name": name,
392 |                     "path": path,
393 |                     "permalink": generate_permalink(name),
394 |                     "is_active": True,
395 |                     # Don't set is_default here - let the enforcement logic handle it
396 |                 }
397 |                 await self.repository.create(project_data)
398 | 
399 |         # Remove projects that exist in DB but not in config
400 |         # Config is the source of truth - if a project was deleted from config,
401 |         # it should be deleted from DB too (fixes issue #193)
402 |         for name, project in db_projects_by_permalink.items():
403 |             if name not in config_projects:
404 |                 logger.info(
405 |                     f"Removing project '{name}' from database (deleted from config, source of truth)"
406 |                 )
407 |                 await self.repository.delete(project.id)
408 | 
409 |         # Ensure database default project state is consistent
410 |         await self._ensure_single_default_project()
411 | 
412 |         # Make sure default project is synchronized between config and database
413 |         db_default = await self.repository.get_default_project()
414 |         config_default = self.config_manager.default_project
415 | 
416 |         if db_default and db_default.name != config_default:
417 |             # Update config to match DB default
418 |             logger.info(f"Updating default project in config to '{db_default.name}'")
419 |             self.config_manager.set_default_project(db_default.name)
420 |         elif not db_default and config_default:
421 |             # Update DB to match config default (if the project exists)
422 |             project = await self.repository.get_by_name(config_default)
423 |             if project:
424 |                 logger.info(f"Updating default project in database to '{config_default}'")
425 |                 await self.repository.set_as_default(project.id)
426 | 
427 |         logger.info("Project synchronization complete")
428 | 
429 |     async def move_project(self, name: str, new_path: str) -> None:
430 |         """Move a project to a new location.
431 | 
432 |         Args:
433 |             name: The name of the project to move
434 |             new_path: The new absolute path for the project
435 | 
436 |         Raises:
437 |             ValueError: If the project doesn't exist or repository isn't initialized
438 |         """
439 |         if not self.repository:  # pragma: no cover
440 |             raise ValueError("Repository is required for move_project")  # pragma: no cover
441 | 
442 |         # Resolve to absolute path
443 |         resolved_path = Path(os.path.abspath(os.path.expanduser(new_path))).as_posix()
444 | 
445 |         # Validate project exists in config
446 |         if name not in self.config_manager.projects:
447 |             raise ValueError(f"Project '{name}' not found in configuration")
448 | 
449 |         # Create the new directory if it doesn't exist
450 |         Path(resolved_path).mkdir(parents=True, exist_ok=True)
451 | 
452 |         # Update in configuration
453 |         config = self.config_manager.load_config()
454 |         old_path = config.projects[name]
455 |         config.projects[name] = resolved_path
456 |         self.config_manager.save_config(config)
457 | 
458 |         # Update in database using robust lookup
459 |         project = await self.get_project(name)
460 |         if project:
461 |             await self.repository.update_path(project.id, resolved_path)
462 |             logger.info(f"Moved project '{name}' from {old_path} to {resolved_path}")
463 |         else:
464 |             logger.error(f"Project '{name}' exists in config but not in database")
465 |             # Restore the old path in config since DB update failed
466 |             config.projects[name] = old_path
467 |             self.config_manager.save_config(config)
468 |             raise ValueError(f"Project '{name}' not found in database")
469 | 
470 |     async def update_project(  # pragma: no cover
471 |         self, name: str, updated_path: Optional[str] = None, is_active: Optional[bool] = None
472 |     ) -> None:
473 |         """Update project information in both config and database.
474 | 
475 |         Args:
476 |             name: The name of the project to update
477 |             updated_path: Optional new path for the project
478 |             is_active: Optional flag to set project active status
479 | 
480 |         Raises:
481 |             ValueError: If project doesn't exist or repository isn't initialized
482 |         """
483 |         if not self.repository:
484 |             raise ValueError("Repository is required for update_project")
485 | 
486 |         # Validate project exists in config
487 |         if name not in self.config_manager.projects:
488 |             raise ValueError(f"Project '{name}' not found in configuration")
489 | 
490 |         # Get project from database using robust lookup
491 |         project = await self.get_project(name)
492 |         if not project:
493 |             logger.error(f"Project '{name}' exists in config but not in database")
494 |             return
495 | 
496 |         # Update path if provided
497 |         if updated_path:
498 |             resolved_path = Path(os.path.abspath(os.path.expanduser(updated_path))).as_posix()
499 | 
500 |             # Update in config
501 |             config = self.config_manager.load_config()
502 |             config.projects[name] = resolved_path
503 |             self.config_manager.save_config(config)
504 | 
505 |             # Update in database
506 |             project.path = resolved_path
507 |             await self.repository.update(project.id, project)
508 | 
509 |             logger.info(f"Updated path for project '{name}' to {resolved_path}")
510 | 
511 |         # Update active status if provided
512 |         if is_active is not None:
513 |             project.is_active = is_active
514 |             await self.repository.update(project.id, project)
515 |             logger.info(f"Set active status for project '{name}' to {is_active}")
516 | 
517 |         # If project was made inactive and it was the default, we need to pick a new default
518 |         if is_active is False and project.is_default:
519 |             # Find another active project
520 |             active_projects = await self.repository.get_active_projects()
521 |             if active_projects:
522 |                 new_default = active_projects[0]
523 |                 await self.repository.set_as_default(new_default.id)
524 |                 self.config_manager.set_default_project(new_default.name)
525 |                 logger.info(
526 |                     f"Changed default project to '{new_default.name}' as '{name}' was deactivated"
527 |                 )
528 | 
529 |     async def get_project_info(self, project_name: Optional[str] = None) -> ProjectInfoResponse:
530 |         """Get comprehensive information about the specified Basic Memory project.
531 | 
532 |         Args:
533 |             project_name: Name of the project to get info for. If None, uses the current config project.
534 | 
535 |         Returns:
536 |             Comprehensive project information and statistics
537 |         """
538 |         if not self.repository:  # pragma: no cover
539 |             raise ValueError("Repository is required for get_project_info")
540 | 
541 |         # Use specified project or fall back to config project
542 |         project_name = project_name or self.config.project
543 |         # Get project path from configuration
544 |         name, project_path = self.config_manager.get_project(project_name)
545 |         if not name:  # pragma: no cover
546 |             raise ValueError(f"Project '{project_name}' not found in configuration")
547 | 
548 |         assert project_path is not None
549 |         project_permalink = generate_permalink(project_name)
550 | 
551 |         # Get project from database to get project_id
552 |         db_project = await self.repository.get_by_permalink(project_permalink)
553 |         if not db_project:  # pragma: no cover
554 |             raise ValueError(f"Project '{project_name}' not found in database")
555 | 
556 |         # Get statistics for the specified project
557 |         statistics = await self.get_statistics(db_project.id)
558 | 
559 |         # Get activity metrics for the specified project
560 |         activity = await self.get_activity_metrics(db_project.id)
561 | 
562 |         # Get system status
563 |         system = self.get_system_status()
564 | 
565 |         # Get enhanced project information from database
566 |         db_projects = await self.repository.get_active_projects()
567 |         db_projects_by_permalink = {p.permalink: p for p in db_projects}
568 | 
569 |         # Get default project info
570 |         default_project = self.config_manager.default_project
571 | 
572 |         # Convert config projects to include database info
573 |         enhanced_projects = {}
574 |         for name, path in self.config_manager.projects.items():
575 |             config_permalink = generate_permalink(name)
576 |             db_project = db_projects_by_permalink.get(config_permalink)
577 |             enhanced_projects[name] = {
578 |                 "path": path,
579 |                 "active": db_project.is_active if db_project else True,
580 |                 "id": db_project.id if db_project else None,
581 |                 "is_default": (name == default_project),
582 |                 "permalink": db_project.permalink if db_project else name.lower().replace(" ", "-"),
583 |             }
584 | 
585 |         # Construct the response
586 |         return ProjectInfoResponse(
587 |             project_name=project_name,
588 |             project_path=project_path,
589 |             available_projects=enhanced_projects,
590 |             default_project=default_project,
591 |             statistics=statistics,
592 |             activity=activity,
593 |             system=system,
594 |         )
595 | 
596 |     async def get_statistics(self, project_id: int) -> ProjectStatistics:
597 |         """Get statistics about the specified project.
598 | 
599 |         Args:
600 |             project_id: ID of the project to get statistics for (required).
601 |         """
602 |         if not self.repository:  # pragma: no cover
603 |             raise ValueError("Repository is required for get_statistics")
604 | 
605 |         # Get basic counts
606 |         entity_count_result = await self.repository.execute_query(
607 |             text("SELECT COUNT(*) FROM entity WHERE project_id = :project_id"),
608 |             {"project_id": project_id},
609 |         )
610 |         total_entities = entity_count_result.scalar() or 0
611 | 
612 |         observation_count_result = await self.repository.execute_query(
613 |             text(
614 |                 "SELECT COUNT(*) FROM observation o JOIN entity e ON o.entity_id = e.id WHERE e.project_id = :project_id"
615 |             ),
616 |             {"project_id": project_id},
617 |         )
618 |         total_observations = observation_count_result.scalar() or 0
619 | 
620 |         relation_count_result = await self.repository.execute_query(
621 |             text(
622 |                 "SELECT COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE e.project_id = :project_id"
623 |             ),
624 |             {"project_id": project_id},
625 |         )
626 |         total_relations = relation_count_result.scalar() or 0
627 | 
628 |         unresolved_count_result = await self.repository.execute_query(
629 |             text(
630 |                 "SELECT COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE r.to_id IS NULL AND e.project_id = :project_id"
631 |             ),
632 |             {"project_id": project_id},
633 |         )
634 |         total_unresolved = unresolved_count_result.scalar() or 0
635 | 
636 |         # Get entity counts by type
637 |         entity_types_result = await self.repository.execute_query(
638 |             text(
639 |                 "SELECT entity_type, COUNT(*) FROM entity WHERE project_id = :project_id GROUP BY entity_type"
640 |             ),
641 |             {"project_id": project_id},
642 |         )
643 |         entity_types = {row[0]: row[1] for row in entity_types_result.fetchall()}
644 | 
645 |         # Get observation counts by category
646 |         category_result = await self.repository.execute_query(
647 |             text(
648 |                 "SELECT o.category, COUNT(*) FROM observation o JOIN entity e ON o.entity_id = e.id WHERE e.project_id = :project_id GROUP BY o.category"
649 |             ),
650 |             {"project_id": project_id},
651 |         )
652 |         observation_categories = {row[0]: row[1] for row in category_result.fetchall()}
653 | 
654 |         # Get relation counts by type
655 |         relation_types_result = await self.repository.execute_query(
656 |             text(
657 |                 "SELECT r.relation_type, COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE e.project_id = :project_id GROUP BY r.relation_type"
658 |             ),
659 |             {"project_id": project_id},
660 |         )
661 |         relation_types = {row[0]: row[1] for row in relation_types_result.fetchall()}
662 | 
663 |         # Find most connected entities (most outgoing relations) - project filtered
664 |         connected_result = await self.repository.execute_query(
665 |             text("""
666 |             SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count, e.file_path
667 |             FROM entity e
668 |             JOIN relation r ON e.id = r.from_id
669 |             WHERE e.project_id = :project_id
670 |             GROUP BY e.id
671 |             ORDER BY relation_count DESC
672 |             LIMIT 10
673 |         """),
674 |             {"project_id": project_id},
675 |         )
676 |         most_connected = [
677 |             {
678 |                 "id": row[0],
679 |                 "title": row[1],
680 |                 "permalink": row[2],
681 |                 "relation_count": row[3],
682 |                 "file_path": row[4],
683 |             }
684 |             for row in connected_result.fetchall()
685 |         ]
686 | 
687 |         # Count isolated entities (no relations) - project filtered
688 |         isolated_result = await self.repository.execute_query(
689 |             text("""
690 |             SELECT COUNT(e.id)
691 |             FROM entity e
692 |             LEFT JOIN relation r1 ON e.id = r1.from_id
693 |             LEFT JOIN relation r2 ON e.id = r2.to_id
694 |             WHERE e.project_id = :project_id AND r1.id IS NULL AND r2.id IS NULL
695 |         """),
696 |             {"project_id": project_id},
697 |         )
698 |         isolated_count = isolated_result.scalar() or 0
699 | 
700 |         return ProjectStatistics(
701 |             total_entities=total_entities,
702 |             total_observations=total_observations,
703 |             total_relations=total_relations,
704 |             total_unresolved_relations=total_unresolved,
705 |             entity_types=entity_types,
706 |             observation_categories=observation_categories,
707 |             relation_types=relation_types,
708 |             most_connected_entities=most_connected,
709 |             isolated_entities=isolated_count,
710 |         )
711 | 
712 |     async def get_activity_metrics(self, project_id: int) -> ActivityMetrics:
713 |         """Get activity metrics for the specified project.
714 | 
715 |         Args:
716 |             project_id: ID of the project to get activity metrics for (required).
717 |         """
718 |         if not self.repository:  # pragma: no cover
719 |             raise ValueError("Repository is required for get_activity_metrics")
720 | 
721 |         # Get recently created entities (project filtered)
722 |         created_result = await self.repository.execute_query(
723 |             text("""
724 |             SELECT id, title, permalink, entity_type, created_at, file_path 
725 |             FROM entity
726 |             WHERE project_id = :project_id
727 |             ORDER BY created_at DESC
728 |             LIMIT 10
729 |         """),
730 |             {"project_id": project_id},
731 |         )
732 |         recently_created = [
733 |             {
734 |                 "id": row[0],
735 |                 "title": row[1],
736 |                 "permalink": row[2],
737 |                 "entity_type": row[3],
738 |                 "created_at": row[4],
739 |                 "file_path": row[5],
740 |             }
741 |             for row in created_result.fetchall()
742 |         ]
743 | 
744 |         # Get recently updated entities (project filtered)
745 |         updated_result = await self.repository.execute_query(
746 |             text("""
747 |             SELECT id, title, permalink, entity_type, updated_at, file_path 
748 |             FROM entity
749 |             WHERE project_id = :project_id
750 |             ORDER BY updated_at DESC
751 |             LIMIT 10
752 |         """),
753 |             {"project_id": project_id},
754 |         )
755 |         recently_updated = [
756 |             {
757 |                 "id": row[0],
758 |                 "title": row[1],
759 |                 "permalink": row[2],
760 |                 "entity_type": row[3],
761 |                 "updated_at": row[4],
762 |                 "file_path": row[5],
763 |             }
764 |             for row in updated_result.fetchall()
765 |         ]
766 | 
767 |         # Get monthly growth over the last 6 months
768 |         # Calculate the start of 6 months ago
769 |         now = datetime.now()
770 |         six_months_ago = datetime(
771 |             now.year - (1 if now.month <= 6 else 0), ((now.month - 6) % 12) or 12, 1
772 |         )
773 | 
774 |         # Query for monthly entity creation (project filtered)
775 |         # Use different date formatting for SQLite vs Postgres
776 |         from basic_memory.config import DatabaseBackend
777 | 
778 |         is_postgres = self.config_manager.config.database_backend == DatabaseBackend.POSTGRES
779 |         date_format = (
780 |             "to_char(created_at, 'YYYY-MM')" if is_postgres else "strftime('%Y-%m', created_at)"
781 |         )
782 | 
783 |         # Postgres needs datetime objects, SQLite needs ISO strings
784 |         six_months_param = six_months_ago if is_postgres else six_months_ago.isoformat()
785 | 
786 |         entity_growth_result = await self.repository.execute_query(
787 |             text(f"""
788 |             SELECT
789 |                 {date_format} AS month,
790 |                 COUNT(*) AS count
791 |             FROM entity
792 |             WHERE created_at >= :six_months_ago AND project_id = :project_id
793 |             GROUP BY month
794 |             ORDER BY month
795 |         """),
796 |             {"six_months_ago": six_months_param, "project_id": project_id},
797 |         )
798 |         entity_growth = {row[0]: row[1] for row in entity_growth_result.fetchall()}
799 | 
800 |         # Query for monthly observation creation (project filtered)
801 |         date_format_entity = (
802 |             "to_char(entity.created_at, 'YYYY-MM')"
803 |             if is_postgres
804 |             else "strftime('%Y-%m', entity.created_at)"
805 |         )
806 | 
807 |         observation_growth_result = await self.repository.execute_query(
808 |             text(f"""
809 |             SELECT
810 |                 {date_format_entity} AS month,
811 |                 COUNT(*) AS count
812 |             FROM observation
813 |             INNER JOIN entity ON observation.entity_id = entity.id
814 |             WHERE entity.created_at >= :six_months_ago AND entity.project_id = :project_id
815 |             GROUP BY month
816 |             ORDER BY month
817 |         """),
818 |             {"six_months_ago": six_months_param, "project_id": project_id},
819 |         )
820 |         observation_growth = {row[0]: row[1] for row in observation_growth_result.fetchall()}
821 | 
822 |         # Query for monthly relation creation (project filtered)
823 |         relation_growth_result = await self.repository.execute_query(
824 |             text(f"""
825 |             SELECT
826 |                 {date_format_entity} AS month,
827 |                 COUNT(*) AS count
828 |             FROM relation
829 |             INNER JOIN entity ON relation.from_id = entity.id
830 |             WHERE entity.created_at >= :six_months_ago AND entity.project_id = :project_id
831 |             GROUP BY month
832 |             ORDER BY month
833 |         """),
834 |             {"six_months_ago": six_months_param, "project_id": project_id},
835 |         )
836 |         relation_growth = {row[0]: row[1] for row in relation_growth_result.fetchall()}
837 | 
838 |         # Combine all monthly growth data
839 |         monthly_growth = {}
840 |         for month in set(
841 |             list(entity_growth.keys())
842 |             + list(observation_growth.keys())
843 |             + list(relation_growth.keys())
844 |         ):
845 |             monthly_growth[month] = {
846 |                 "entities": entity_growth.get(month, 0),
847 |                 "observations": observation_growth.get(month, 0),
848 |                 "relations": relation_growth.get(month, 0),
849 |                 "total": (
850 |                     entity_growth.get(month, 0)
851 |                     + observation_growth.get(month, 0)
852 |                     + relation_growth.get(month, 0)
853 |                 ),
854 |             }
855 | 
856 |         return ActivityMetrics(
857 |             recently_created=recently_created,
858 |             recently_updated=recently_updated,
859 |             monthly_growth=monthly_growth,
860 |         )
861 | 
862 |     def get_system_status(self) -> SystemStatus:
863 |         """Get system status information."""
864 |         import basic_memory
865 | 
866 |         # Get database information
867 |         db_path = self.config_manager.config.database_path
868 |         db_size = db_path.stat().st_size if db_path.exists() else 0
869 |         db_size_readable = f"{db_size / (1024 * 1024):.2f} MB"
870 | 
871 |         # Get watch service status if available
872 |         watch_status = None
873 |         watch_status_path = Path.home() / ".basic-memory" / WATCH_STATUS_JSON
874 |         if watch_status_path.exists():
875 |             try:  # pragma: no cover
876 |                 watch_status = json.loads(  # pragma: no cover
877 |                     watch_status_path.read_text(encoding="utf-8")
878 |                 )
879 |             except Exception:  # pragma: no cover
880 |                 pass
881 | 
882 |         return SystemStatus(
883 |             version=basic_memory.__version__,
884 |             database_path=str(db_path),
885 |             database_size=db_size_readable,
886 |             watch_status=watch_status,
887 |             timestamp=datetime.now(),
888 |         )
889 | 
```
Page 20/27FirstPrevNextLast