This is page 1 of 38. Use http://codebase.md/steveyegge/beads?page={x} to view the full context.
# Directory Structure
```
├── .agent
│ └── workflows
│ └── resolve-beads-conflict.md
├── .beads
│ ├── .gitignore
│ ├── BD_GUIDE.md
│ ├── config.yaml
│ ├── cost_state.json
│ ├── deletions.jsonl
│ ├── issues.jsonl
│ ├── metadata.json
│ └── README.md
├── .beads-hooks
│ ├── post-checkout
│ ├── post-merge
│ ├── pre-commit
│ └── pre-push
├── .claude
│ └── test-strategy.md
├── .claude-plugin
│ ├── agents
│ │ └── task-agent.md
│ ├── marketplace.json
│ └── plugin.json
├── .devcontainer
│ ├── devcontainer.json
│ ├── README.md
│ └── setup.sh
├── .gitattributes
├── .github
│ ├── copilot-instructions.md
│ ├── dependabot.yml
│ ├── images
│ │ └── agent-using-beads.jpg
│ └── workflows
│ ├── ci.yml
│ ├── nightly.yml
│ ├── release.yml
│ ├── test-pypi.yml
│ └── update-homebrew.yml
├── .gitignore
├── .golangci.yml
├── .goreleaser.yml
├── .test-skip
├── AGENT_INSTRUCTIONS.md
├── AGENTS.md
├── beads_test.go
├── beads.go
├── CHANGELOG.md
├── CLAUDE.md
├── cmd
│ └── bd
│ ├── autoflush.go
│ ├── autoimport_test.go
│ ├── autoimport.go
│ ├── autostart_test.go
│ ├── clean_security_test.go
│ ├── clean.go
│ ├── cleanup.go
│ ├── cli_fast_test.go
│ ├── comments_test.go
│ ├── comments.go
│ ├── compact_test.go
│ ├── compact.go
│ ├── config_test.go
│ ├── config.go
│ ├── count.go
│ ├── create_test.go
│ ├── create.go
│ ├── daemon_autoimport_test.go
│ ├── daemon_autostart.go
│ ├── daemon_config.go
│ ├── daemon_crash_test.go
│ ├── daemon_debouncer_test.go
│ ├── daemon_debouncer.go
│ ├── daemon_event_loop.go
│ ├── daemon_health_unix.go
│ ├── daemon_health_wasm.go
│ ├── daemon_health_windows.go
│ ├── daemon_lifecycle.go
│ ├── daemon_local_test.go
│ ├── daemon_lock_test.go
│ ├── daemon_lock_unix.go
│ ├── daemon_lock_wasm.go
│ ├── daemon_lock_windows.go
│ ├── daemon_lock.go
│ ├── daemon_logger.go
│ ├── daemon_parent_test.go
│ ├── daemon_rotation_test.go
│ ├── daemon_server.go
│ ├── daemon_sync_branch_test.go
│ ├── daemon_sync_branch.go
│ ├── daemon_sync_test.go
│ ├── daemon_sync.go
│ ├── daemon_test.go
│ ├── daemon_unix_test.go
│ ├── daemon_unix.go
│ ├── daemon_wasm.go
│ ├── daemon_watcher_platform_test.go
│ ├── daemon_watcher_test.go
│ ├── daemon_watcher.go
│ ├── daemon_windows.go
│ ├── daemon.go
│ ├── daemons_test.go
│ ├── daemons.go
│ ├── delete_recording_test.go
│ ├── delete_test.go
│ ├── delete.go
│ ├── deleted.go
│ ├── deletion_propagation_test.go
│ ├── deletion_tracking_test.go
│ ├── deletion_tracking.go
│ ├── dep_test.go
│ ├── dep.go
│ ├── detect_pollution.go
│ ├── direct_mode_test.go
│ ├── direct_mode.go
│ ├── doctor
│ │ ├── claude_test.go
│ │ ├── claude.go
│ │ ├── config_values_test.go
│ │ ├── config_values.go
│ │ ├── fix
│ │ │ ├── common_test.go
│ │ │ ├── common.go
│ │ │ ├── daemon.go
│ │ │ ├── database_config_test.go
│ │ │ ├── database_config.go
│ │ │ ├── deletions.go
│ │ │ ├── hooks.go
│ │ │ ├── merge_driver.go
│ │ │ ├── migrate.go
│ │ │ ├── permissions.go
│ │ │ ├── symlink_test.go
│ │ │ ├── sync_branch.go
│ │ │ ├── sync.go
│ │ │ └── untracked.go
│ │ ├── gitignore_test.go
│ │ ├── gitignore.go
│ │ ├── legacy_test.go
│ │ ├── legacy.go
│ │ └── perf.go
│ ├── doctor_test.go
│ ├── doctor.go
│ ├── duplicates_test.go
│ ├── duplicates.go
│ ├── epic_test.go
│ ├── epic.go
│ ├── errors.go
│ ├── export_import_test.go
│ ├── export_integrity_integration_test.go
│ ├── export_mtime_test.go
│ ├── export_staleness_test.go
│ ├── export_test.go
│ ├── export.go
│ ├── flags.go
│ ├── flush_manager_test.go
│ ├── flush_manager.go
│ ├── git_sync_test.go
│ ├── helpers_test.go
│ ├── hooks_test.go
│ ├── hooks.go
│ ├── import_bug_test.go
│ ├── import_cancellation_test.go
│ ├── import_idempotent_test.go
│ ├── import_mtime_test.go
│ ├── import_multipart_id_test.go
│ ├── import_profile_test.go
│ ├── import_shared.go
│ ├── import_uncommitted_test.go
│ ├── import.go
│ ├── info_test.go
│ ├── info.go
│ ├── init_contributor.go
│ ├── init_hooks_test.go
│ ├── init_team.go
│ ├── init_test.go
│ ├── init.go
│ ├── integrity_content_test.go
│ ├── integrity_test.go
│ ├── integrity.go
│ ├── jira_test.go
│ ├── jira.go
│ ├── jsonl_integrity_test.go
│ ├── label_test.go
│ ├── label.go
│ ├── list_test.go
│ ├── list.go
│ ├── main_test.go
│ ├── main.go
│ ├── markdown_test.go
│ ├── markdown.go
│ ├── merge_security_test.go
│ ├── merge.go
│ ├── message.go
│ ├── migrate_hash_ids_test.go
│ ├── migrate_hash_ids.go
│ ├── migrate_issues.go
│ ├── migrate_sync_test.go
│ ├── migrate_sync.go
│ ├── migrate_test.go
│ ├── migrate_tombstones_test.go
│ ├── migrate_tombstones.go
│ ├── migrate.go
│ ├── nodb_test.go
│ ├── nodb.go
│ ├── onboard_test.go
│ ├── onboard.go
│ ├── output_test.go
│ ├── prime_test.go
│ ├── prime.go
│ ├── quickstart.go
│ ├── readonly_test.go
│ ├── ready_test.go
│ ├── ready.go
│ ├── reinit_test.go
│ ├── rename_prefix_repair_test.go
│ ├── rename_prefix_test.go
│ ├── rename_prefix.go
│ ├── reopen_test.go
│ ├── reopen.go
│ ├── repair_deps_test.go
│ ├── repair_deps.go
│ ├── repo.go
│ ├── restore_test.go
│ ├── restore.go
│ ├── sandbox_test.go
│ ├── sandbox_unix.go
│ ├── sandbox_windows.go
│ ├── scripttest_test.go
│ ├── search_test.go
│ ├── search.go
│ ├── setup
│ │ ├── aider.go
│ │ ├── claude_test.go
│ │ ├── claude.go
│ │ ├── constants.go
│ │ ├── cursor.go
│ │ ├── utils_test.go
│ │ └── utils.go
│ ├── setup.go
│ ├── show.go
│ ├── simple_helpers_test.go
│ ├── snapshot_manager.go
│ ├── stale_test.go
│ ├── stale.go
│ ├── staleness.go
│ ├── status_test.go
│ ├── status.go
│ ├── sync_local_only_test.go
│ ├── sync_merge_test.go
│ ├── sync_test.go
│ ├── sync.go
│ ├── template_security_test.go
│ ├── template_test.go
│ ├── template.go
│ ├── templates
│ │ ├── bug.yaml
│ │ ├── epic.yaml
│ │ ├── feature.yaml
│ │ └── hooks
│ │ ├── post-checkout
│ │ ├── post-merge
│ │ ├── pre-commit
│ │ └── pre-push
│ ├── test_helpers_test.go
│ ├── test_wait_helper.go
│ ├── testdata
│ │ ├── blocked.txt
│ │ ├── close.txt
│ │ ├── create.txt
│ │ ├── dep_add.txt
│ │ ├── dep_remove.txt
│ │ ├── dep_tree.txt
│ │ ├── export.txt
│ │ ├── help.txt
│ │ ├── import.txt
│ │ ├── init.txt
│ │ ├── list.txt
│ │ ├── quickstart.txt
│ │ ├── ready.txt
│ │ ├── show.txt
│ │ ├── stats.txt
│ │ ├── update.txt
│ │ └── version.txt
│ ├── tips_example_test.go
│ ├── tips_test.go
│ ├── tips.go
│ ├── upgrade.go
│ ├── validate_test.go
│ ├── validate.go
│ ├── version_test.go
│ ├── version_tracking_test.go
│ ├── version_tracking.go
│ ├── version.go
│ ├── worktree_test.go
│ └── worktree.go
├── commands
│ ├── blocked.md
│ ├── close.md
│ ├── comments.md
│ ├── compact.md
│ ├── create.md
│ ├── daemon.md
│ ├── daemons.md
│ ├── delete.md
│ ├── dep.md
│ ├── epic.md
│ ├── export.md
│ ├── import.md
│ ├── init.md
│ ├── label.md
│ ├── list.md
│ ├── prime.md
│ ├── quickstart.md
│ ├── ready.md
│ ├── rename-prefix.md
│ ├── reopen.md
│ ├── restore.md
│ ├── search.md
│ ├── show.md
│ ├── stats.md
│ ├── sync.md
│ ├── template.md
│ ├── update.md
│ ├── version.md
│ └── workflow.md
├── CONTRIBUTING.md
├── default.nix
├── docs
│ ├── ADAPTIVE_IDS.md
│ ├── adr
│ │ └── 002-agent-mail-integration.md
│ ├── ADVANCED.md
│ ├── AGENT_MAIL_DEPLOYMENT.md
│ ├── AGENT_MAIL_MULTI_WORKSPACE_SETUP.md
│ ├── AGENT_MAIL_QUICKSTART.md
│ ├── AGENT_MAIL.md
│ ├── AIDER_INTEGRATION.md
│ ├── ANTIVIRUS.md
│ ├── ARCHITECTURE.md
│ ├── ATTRIBUTION.md
│ ├── CLAUDE_INTEGRATION.md
│ ├── CLAUDE.md
│ ├── CLI_REFERENCE.md
│ ├── COLLISION_MATH.md
│ ├── CONFIG.md
│ ├── DAEMON.md
│ ├── DELETIONS.md
│ ├── dev-notes
│ │ ├── ERROR_HANDLING_AUDIT.md
│ │ ├── MAIN_TEST_CLEANUP_PLAN.md
│ │ ├── MAIN_TEST_REFACTOR_NOTES.md
│ │ └── TEST_SUITE_AUDIT.md
│ ├── ERROR_HANDLING.md
│ ├── EXCLUSIVE_LOCK.md
│ ├── EXTENDING.md
│ ├── FAQ.md
│ ├── GIT_INTEGRATION.md
│ ├── INSTALLING.md
│ ├── INTERNALS.md
│ ├── LABELS.md
│ ├── LINTING.md
│ ├── MULTI_REPO_AGENTS.md
│ ├── MULTI_REPO_HYDRATION.md
│ ├── MULTI_REPO_MIGRATION.md
│ ├── PERFORMANCE_TESTING.md
│ ├── PLUGIN.md
│ ├── PROTECTED_BRANCHES.md
│ ├── QUICKSTART.md
│ ├── README_TESTING.md
│ ├── RELEASING.md
│ ├── ROUTING.md
│ ├── TESTING.md
│ ├── TROUBLESHOOTING.md
│ └── UNINSTALLING.md
├── examples
│ ├── bash-agent
│ │ ├── agent.sh
│ │ └── README.md
│ ├── bd-example-extension-go
│ │ ├── .gitignore
│ │ ├── go.mod
│ │ ├── go.sum
│ │ ├── main.go
│ │ ├── README.md
│ │ └── schema.sql
│ ├── claude-code-skill
│ │ ├── README.md
│ │ ├── references
│ │ │ ├── BOUNDARIES.md
│ │ │ ├── CLI_REFERENCE.md
│ │ │ ├── DEPENDENCIES.md
│ │ │ ├── ISSUE_CREATION.md
│ │ │ ├── RESUMABILITY.md
│ │ │ ├── STATIC_DATA.md
│ │ │ └── WORKFLOWS.md
│ │ └── SKILL.md
│ ├── claude-desktop-mcp
│ │ └── README.md
│ ├── compaction
│ │ ├── auto-compact.sh
│ │ ├── cron-compact.sh
│ │ ├── README.md
│ │ └── workflow.sh
│ ├── contributor-workflow
│ │ └── README.md
│ ├── git-hooks
│ │ ├── post-checkout
│ │ ├── post-merge
│ │ ├── pre-commit
│ │ ├── pre-push
│ │ └── README.md
│ ├── github-import
│ │ ├── example-issues.json
│ │ ├── gh2jsonl.py
│ │ └── README.md
│ ├── go-agent
│ │ ├── go.mod
│ │ ├── main.go
│ │ └── README.md
│ ├── jira-import
│ │ ├── jira2jsonl.py
│ │ ├── jsonl2jira.py
│ │ └── README.md
│ ├── library-usage
│ │ ├── go.mod
│ │ ├── go.sum
│ │ ├── main_test.go
│ │ ├── main.go
│ │ └── README.md
│ ├── markdown-to-jsonl
│ │ ├── example-feature.md
│ │ ├── md2jsonl.py
│ │ └── README.md
│ ├── monitor-webui
│ │ ├── go.mod
│ │ ├── go.sum
│ │ ├── main.go
│ │ ├── README.md
│ │ └── web
│ │ ├── index.html
│ │ └── static
│ │ ├── css
│ │ │ └── styles.css
│ │ └── js
│ │ └── app.js
│ ├── multi-phase-development
│ │ └── README.md
│ ├── multiple-personas
│ │ └── README.md
│ ├── protected-branch
│ │ └── README.md
│ ├── python-agent
│ │ ├── AGENT_MAIL_EXAMPLE.md
│ │ ├── agent_with_mail.py
│ │ ├── agent.py
│ │ └── README.md
│ ├── README.md
│ ├── startup-hooks
│ │ ├── bd-version-check.sh
│ │ └── README.md
│ └── team-workflow
│ └── README.md
├── flake.lock
├── flake.nix
├── go.mod
├── go.sum
├── install.ps1
├── integrations
│ └── beads-mcp
│ ├── .gitignore
│ ├── .python-version
│ ├── CONTEXT_MANAGEMENT.md
│ ├── LICENSE
│ ├── PYPI.md
│ ├── pyproject.toml
│ ├── README.md
│ ├── SETUP_DAEMON.md
│ ├── src
│ │ └── beads_mcp
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ ├── bd_client.py
│ │ ├── bd_daemon_client.py
│ │ ├── config.py
│ │ ├── mail_tools.py
│ │ ├── mail.py
│ │ ├── models.py
│ │ ├── py.typed
│ │ ├── server.py
│ │ └── tools.py
│ ├── test_multi_repo.py
│ ├── tests
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── test_bd_client_integration.py
│ │ ├── test_bd_client.py
│ │ ├── test_config.py
│ │ ├── test_daemon_health_check.py
│ │ ├── test_lifecycle.py
│ │ ├── test_mail.py
│ │ ├── test_mcp_server_integration.py
│ │ ├── test_multi_project_switching.py
│ │ ├── test_subprocess_stdin.py
│ │ ├── test_tools.py
│ │ ├── test_workspace_auto_detect.py
│ │ └── test_worktree_separate_dbs.py
│ └── uv.lock
├── internal
│ ├── autoimport
│ │ ├── autoimport_test.go
│ │ ├── autoimport.go
│ │ └── symlink_test.go
│ ├── beads
│ │ ├── beads_hash_multiclone_test.go
│ │ ├── beads_integration_test.go
│ │ ├── beads_multidb_test.go
│ │ ├── beads_symlink_test.go
│ │ ├── beads_test.go
│ │ ├── beads.go
│ │ ├── fingerprint.go
│ │ └── routing_integration_test.go
│ ├── compact
│ │ ├── compactor_test.go
│ │ ├── compactor.go
│ │ ├── git.go
│ │ ├── haiku_test.go
│ │ └── haiku.go
│ ├── config
│ │ ├── config_test.go
│ │ └── config.go
│ ├── configfile
│ │ ├── configfile_test.go
│ │ └── configfile.go
│ ├── daemon
│ │ ├── discovery_test.go
│ │ ├── discovery.go
│ │ ├── kill_unix.go
│ │ ├── kill_wasm.go
│ │ ├── kill_windows.go
│ │ ├── registry_test.go
│ │ └── registry.go
│ ├── debug
│ │ ├── debug_test.go
│ │ └── debug.go
│ ├── deletions
│ │ ├── deletions_test.go
│ │ └── deletions.go
│ ├── export
│ │ ├── config.go
│ │ ├── executor.go
│ │ ├── manifest.go
│ │ ├── policy_test.go
│ │ └── policy.go
│ ├── git
│ │ ├── worktree_test.go
│ │ └── worktree.go
│ ├── importer
│ │ ├── external_ref_test.go
│ │ ├── importer_githistory_test.go
│ │ ├── importer_integration_test.go
│ │ ├── importer_test.go
│ │ ├── importer.go
│ │ ├── purge_test.go
│ │ ├── sort_test.go
│ │ ├── sort.go
│ │ ├── timestamp_test.go
│ │ └── utils.go
│ ├── lockfile
│ │ ├── lock_test.go
│ │ ├── lock_unix.go
│ │ ├── lock_wasm.go
│ │ ├── lock_windows.go
│ │ ├── lock.go
│ │ ├── process_unix.go
│ │ ├── process_wasm.go
│ │ └── process_windows.go
│ ├── merge
│ │ ├── merge_test.go
│ │ └── merge.go
│ ├── routing
│ │ ├── routing_test.go
│ │ └── routing.go
│ ├── rpc
│ │ ├── bench_test.go
│ │ ├── client_selfheal_test.go
│ │ ├── client.go
│ │ ├── comments_test.go
│ │ ├── coverage_test.go
│ │ ├── isolation_test.go
│ │ ├── limits_test.go
│ │ ├── list_filters_test.go
│ │ ├── metrics_test.go
│ │ ├── metrics.go
│ │ ├── protocol_test.go
│ │ ├── protocol.go
│ │ ├── rpc_test.go
│ │ ├── server_compact.go
│ │ ├── server_core.go
│ │ ├── server_export_import_auto.go
│ │ ├── server_issues_epics.go
│ │ ├── server_labels_deps_comments.go
│ │ ├── server_lifecycle_conn.go
│ │ ├── server_mutations_test.go
│ │ ├── server_routing_validation_diagnostics.go
│ │ ├── server.go
│ │ ├── signals_unix.go
│ │ ├── signals_windows.go
│ │ ├── status_test.go
│ │ ├── test_helpers.go
│ │ ├── transport_unix.go
│ │ ├── transport_windows.go
│ │ └── version_test.go
│ ├── storage
│ │ ├── memory
│ │ │ ├── hyphenated_test.go
│ │ │ ├── memory_test.go
│ │ │ └── memory.go
│ │ ├── sqlite
│ │ │ ├── adaptive_e2e_test.go
│ │ │ ├── adaptive_length_test.go
│ │ │ ├── adaptive_length.go
│ │ │ ├── batch_ops_test.go
│ │ │ ├── batch_ops.go
│ │ │ ├── batch_test.go
│ │ │ ├── bench_helpers_test.go
│ │ │ ├── blocked_cache_test.go
│ │ │ ├── blocked_cache.go
│ │ │ ├── child_counters_migration_test.go
│ │ │ ├── child_counters_test.go
│ │ │ ├── child_id_test.go
│ │ │ ├── collision_test.go
│ │ │ ├── collision.go
│ │ │ ├── comments_test.go
│ │ │ ├── comments.go
│ │ │ ├── compact_bench_test.go
│ │ │ ├── compact_test.go
│ │ │ ├── compact.go
│ │ │ ├── config.go
│ │ │ ├── cycle_bench_test.go
│ │ │ ├── cycle_detection_test.go
│ │ │ ├── delete_test.go
│ │ │ ├── dependencies_test.go
│ │ │ ├── dependencies.go
│ │ │ ├── dirty_helpers.go
│ │ │ ├── dirty_test.go
│ │ │ ├── dirty.go
│ │ │ ├── epics_test.go
│ │ │ ├── epics.go
│ │ │ ├── errors_test.go
│ │ │ ├── errors.go
│ │ │ ├── events_helpers.go
│ │ │ ├── events_test.go
│ │ │ ├── events.go
│ │ │ ├── external_ref_test.go
│ │ │ ├── hash_id_test.go
│ │ │ ├── hash_ids.go
│ │ │ ├── hash.go
│ │ │ ├── ids.go
│ │ │ ├── issues.go
│ │ │ ├── labels_test.go
│ │ │ ├── labels.go
│ │ │ ├── migration_invariants_test.go
│ │ │ ├── migration_invariants.go
│ │ │ ├── migrations
│ │ │ │ ├── 001_dirty_issues_table.go
│ │ │ │ ├── 002_external_ref_column.go
│ │ │ │ ├── 003_composite_indexes.go
│ │ │ │ ├── 004_closed_at_constraint.go
│ │ │ │ ├── 005_compaction_columns.go
│ │ │ │ ├── 006_snapshots_table.go
│ │ │ │ ├── 007_compaction_config.go
│ │ │ │ ├── 008_compacted_at_commit_column.go
│ │ │ │ ├── 009_export_hashes_table.go
│ │ │ │ ├── 010_content_hash_column.go
│ │ │ │ ├── 011_external_ref_unique.go
│ │ │ │ ├── 012_source_repo_column.go
│ │ │ │ ├── 013_repo_mtimes_table.go
│ │ │ │ ├── 014_child_counters_table.go
│ │ │ │ ├── 015_blocked_issues_cache.go
│ │ │ │ ├── 016_orphan_detection.go
│ │ │ │ ├── 017_close_reason_column.go
│ │ │ │ └── 018_tombstone_columns.go
│ │ │ ├── migrations_test.go
│ │ │ ├── migrations.go
│ │ │ ├── multirepo_export.go
│ │ │ ├── multirepo_test.go
│ │ │ ├── multirepo.go
│ │ │ ├── orphan_handling_test.go
│ │ │ ├── prefix_validation_test.go
│ │ │ ├── queries.go
│ │ │ ├── ready_test.go
│ │ │ ├── ready.go
│ │ │ ├── resurrection_test.go
│ │ │ ├── resurrection.go
│ │ │ ├── schema_probe_test.go
│ │ │ ├── schema_probe.go
│ │ │ ├── schema.go
│ │ │ ├── sqlite_bench_test.go
│ │ │ ├── sqlite_test.go
│ │ │ ├── sqlite.go
│ │ │ ├── store.go
│ │ │ ├── test_helpers.go
│ │ │ ├── tombstone_test.go
│ │ │ ├── transaction_test.go
│ │ │ ├── transaction.go
│ │ │ ├── underlying_db_test.go
│ │ │ ├── util_test.go
│ │ │ ├── util.go
│ │ │ ├── validators_test.go
│ │ │ └── validators.go
│ │ └── storage.go
│ ├── syncbranch
│ │ ├── syncbranch_test.go
│ │ ├── syncbranch.go
│ │ ├── worktree_divergence_test.go
│ │ └── worktree.go
│ ├── testutil
│ │ ├── fixtures
│ │ │ ├── fixtures_test.go
│ │ │ └── fixtures.go
│ │ ├── tmpfs_test.go
│ │ └── tmpfs.go
│ ├── types
│ │ ├── id_generator_test.go
│ │ ├── id_generator.go
│ │ ├── lock_check_test.go
│ │ ├── lock_check.go
│ │ ├── lock_test.go
│ │ ├── lock.go
│ │ ├── process_test.go
│ │ ├── process.go
│ │ ├── types_test.go
│ │ └── types.go
│ ├── util
│ │ ├── strings_test.go
│ │ └── strings.go
│ ├── utils
│ │ ├── id_parser_test.go
│ │ ├── id_parser.go
│ │ ├── issue_446_test.go
│ │ ├── issue_id.go
│ │ ├── path_test.go
│ │ └── path.go
│ └── validation
│ ├── bead_test.go
│ └── bead.go
├── lib
│ ├── beads_mail_adapter.py
│ ├── README.md
│ └── test_beads_mail_adapter.py
├── LICENSE
├── Makefile
├── npm-package
│ ├── .npmignore
│ ├── bin
│ │ └── bd.js
│ ├── CLAUDE_CODE_WEB.md
│ ├── INTEGRATION_GUIDE.md
│ ├── LAUNCH.md
│ ├── LICENSE
│ ├── package.json
│ ├── PUBLISHING.md
│ ├── README.md
│ ├── scripts
│ │ ├── postinstall.js
│ │ └── test.js
│ ├── SUMMARY.md
│ ├── test
│ │ └── integration.test.js
│ └── TESTING.md
├── README.md
├── RELEASING.md
├── scripts
│ ├── bump-version.sh
│ ├── hooks
│ │ └── post-push
│ ├── install-hooks.sh
│ ├── install.sh
│ ├── README.md
│ ├── release.sh
│ ├── test.sh
│ └── update-homebrew.sh
├── SECURITY.md
└── tests
├── benchmarks
│ ├── git_traffic_50_issues.md
│ ├── git_traffic.py
│ └── README.md
└── integration
├── AGENT_MAIL_TEST_COVERAGE.md
├── README.md
├── test_agent_race.py
├── test_mail_failures.py
├── test_multi_agent_coordination.py
└── test_reservation_ttl.py
```
# Files
--------------------------------------------------------------------------------
/integrations/beads-mcp/.python-version:
--------------------------------------------------------------------------------
```
3.13
```
--------------------------------------------------------------------------------
/examples/bd-example-extension-go/.gitignore:
--------------------------------------------------------------------------------
```
bd-example-extension-go
```
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
```
.beads/issues.jsonl merge=beads
```
--------------------------------------------------------------------------------
/integrations/beads-mcp/.gitignore:
--------------------------------------------------------------------------------
```
# Python-generated files
__pycache__/
build/
dist/
wheels/
*.egg-info
__pycache__
# Testing artifacts
.pytest_cache/
.coverage
# Virtual environments
.venv
/.env
/CLAUDE.md
/TODO.md
```
--------------------------------------------------------------------------------
/.test-skip:
--------------------------------------------------------------------------------
```
# Tests to skip due to known issues
# Format: one test name per line (regex patterns supported)
# Issue #356: Expects wrong JSONL filename (issues.jsonl vs beads.jsonl)
TestFindJSONLPathDefault
```
--------------------------------------------------------------------------------
/npm-package/.npmignore:
--------------------------------------------------------------------------------
```
# Don't include these in the published package
*.log
*.tar.gz
node_modules/
.DS_Store
.git/
.github/
test/
tests/
*.test.js
coverage/
# Include only the necessary files (see package.json "files" field)
```
--------------------------------------------------------------------------------
/.beads/.gitignore:
--------------------------------------------------------------------------------
```
# SQLite databases
*.db
*.db?*
*.db-journal
*.db-wal
*.db-shm
# Daemon runtime files
daemon.lock
daemon.log
daemon.pid
bd.sock
# Local version tracking (prevents upgrade notification spam after git operations)
# bd-tok: Store version locally instead of in tracked metadata.json
.local_version
# Legacy database files
db.sqlite
bd.db
# Merge artifacts (temporary files from 3-way merge)
beads.base.jsonl
beads.base.meta.json
beads.left.jsonl
beads.left.meta.json
beads.right.jsonl
beads.right.meta.json
# Keep JSONL exports and config (source of truth for git)
!issues.jsonl
!metadata.json
!config.json
```
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
# Binaries
/beads
/bd
*.exe
*.dll
*.so
*.dylib
# Test binaries
*.test
*.out
/bd-test
# Go workspace file
go.work
# Go build cache
pkg/
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# Claude Code
.claude/settings.local.json
# OS
.DS_Store
Thumbs.db
# SQLite databases (now using JSONL as source of truth)
*.db
*.db-journal
*.db-wal
*.db-shm
# Daemon runtime files
.beads/daemon.log
.beads/daemon.pid
.beads/daemon.lock
.beads/daemon-error
.beads/bd.sock
.beads/.exclusive-lock
# .beads directory files (keep JSONL only)
.beads/.gitignore
.beads/db.sqlite
.beads/bd.db
# Keep JSONL exports (source of truth for git)
!.beads/*.jsonl
# 3-way merge snapshot files (local-only, for deletion tracking)
.beads/beads.base.jsonl
.beads/beads.left.jsonl
.beads/beads.base.meta.json
.beads/beads.left.meta.json
# Note: .beads/deletions.jsonl is intentionally NOT ignored
# It must be tracked in git for cross-clone deletion propagation (bd-imj)
# Git merge driver temp files (created during conflicts with numbered extensions)
.beads/*.json[0-9]
.beads/*.jsonl[0-9]
# Ignore nix result
result
# direnv (machine-specific environment)
.envrc
# GoReleaser build artifacts
dist/
Formula/bd.rb
# Git worktrees
.worktrees/
.beads/pollution-backup.jsonl
# npm package - exclude downloaded binaries and archives
npm-package/bin/bd
npm-package/bin/*.tar.gz
npm-package/bin/*.zip
npm-package/bin/CHANGELOG.md
npm-package/bin/LICENSE
npm-package/bin/README.md
npm-package/node_modules/
npm-package/package-lock.json
.beads/beads.db?*
bd-original
mcp_agent_mail/
bd-test
bd-fixed
.cursor/
# Python cache files
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
.envrc
# Performance profiling files (benchmarks, bd doctor --perf, and bd --profile)
*.prof
*.out
beads-perf-*.prof
bench-cpu-*.prof
bd-profile-*.prof
bd-trace-*.out
```
--------------------------------------------------------------------------------
/.golangci.yml:
--------------------------------------------------------------------------------
```yaml
version: "2"
run:
timeout: 5m
tests: false
linters:
default: 'none'
enable:
- errcheck
- gosec
- misspell
- unconvert
- unparam
settings:
errcheck:
exclude-functions:
- (*database/sql.DB).Close
- (*database/sql.Rows).Close
- (*database/sql.Tx).Rollback
- (*database/sql.Stmt).Close
- (*database/sql.Conn).Close
- (*os.File).Close
- (os).RemoveAll
- (os).Remove
- (os).Setenv
- (os).Unsetenv
- (os).Chdir
- (os).MkdirAll
- (fmt).Sscanf
misspell:
locale: US
exclusions:
rules:
# G304: File inclusion via variable in tests is safe (test data)
- path: '_test\.go'
linters:
- gosec
text: "G304"
# G306: File permissions 0644 in tests are acceptable (test fixtures)
- path: '_test\.go'
linters:
- gosec
text: "G306"
# G304: Safe file reads from known JSONL and error paths
- path: 'cmd/bd/autoflush\.go|internal/beads/beads\.go|internal/daemon/discovery\.go|internal/daemonrunner/sync\.go|internal/syncbranch/worktree\.go'
linters:
- gosec
text: "G304"
# G302/G306: Directory/file permissions 0700/0750 are acceptable
- linters:
- gosec
text: "G302.*0700|G301.*0750"
# G302/G306: JSONL files and error logs need 0644 for debugging/sharing
- path: 'cmd/bd/autoflush\.go|cmd/bd/daemon\.go|cmd/bd/daemon_sync_branch\.go|internal/daemon/registry\.go|internal/daemonrunner/daemon\.go|internal/git/worktree\.go'
linters:
- gosec
text: "G306"
# G306: Git hooks must be executable (0700)
- path: 'cmd/bd/init\.go'
linters:
- gosec
text: "G306.*0700"
# G204: Safe subprocess launches with validated arguments
- path: 'cmd/bd/daemon_autostart\.go|cmd/bd/daemon_sync_branch\.go|cmd/bd/doctor\.go|cmd/bd/doctor/fix/sync_branch\.go|cmd/bd/jira\.go|cmd/bd/migrate_sync\.go|cmd/bd/show\.go|cmd/bd/sync\.go|internal/git/worktree\.go|internal/syncbranch/worktree\.go'
linters:
- gosec
text: 'G204'
# G104: Deferred file closes - errors are non-critical
- path: 'cmd/bd/show\.go'
linters:
- gosec
text: "G104.*Close"
# G115: Safe integer conversions in backoff calculations
- path: 'cmd/bd/daemon_autostart\.go'
linters:
- gosec
text: "G115"
# G201: SQL with fmt.Sprintf using placeholders (IN clause expansion)
- path: 'internal/storage/sqlite/dependencies\.go'
linters:
- gosec
text: "G201"
# errcheck: Ignore unchecked errors in test files for common cleanup patterns
- path: '_test\.go'
linters:
- errcheck
text: "Error return value of .*(Close|Rollback|RemoveAll|Setenv|Unsetenv|Chdir|MkdirAll|Remove|Write|SetReadDeadline|SetDeadline|Start|Stop).* is not checked"
# unparam: Placeholder functions that may return errors in future implementation
- path: 'cmd/bd/jira\.go'
linters:
- unparam
text: 'reimportConflicts|resolveConflictsByTimestamp'
issues:
uniq-by-line: true
```
--------------------------------------------------------------------------------
/.goreleaser.yml:
--------------------------------------------------------------------------------
```yaml
# GoReleaser configuration for beads
# See https://goreleaser.com for documentation
version: 2
before:
hooks:
# Ensure dependencies are up to date
- go mod tidy
builds:
- id: bd-linux-amd64
main: ./cmd/bd
binary: bd
env:
- CGO_ENABLED=1
goos:
- linux
goarch:
- amd64
ldflags:
- -s -w
- -X main.Version={{.Version}}
- -X main.Build={{.ShortCommit}}
- id: bd-linux-arm64
main: ./cmd/bd
binary: bd
env:
- CGO_ENABLED=1
- CC=aarch64-linux-gnu-gcc
- CXX=aarch64-linux-gnu-g++
goos:
- linux
goarch:
- arm64
ldflags:
- -s -w
- -X main.Version={{.Version}}
- -X main.Build={{.ShortCommit}}
- id: bd-darwin-amd64
main: ./cmd/bd
binary: bd
env:
- CGO_ENABLED=1
goos:
- darwin
goarch:
- amd64
ldflags:
- -s -w
- -X main.Version={{.Version}}
- -X main.Build={{.ShortCommit}}
- id: bd-darwin-arm64
main: ./cmd/bd
binary: bd
env:
- CGO_ENABLED=1
goos:
- darwin
goarch:
- arm64
ldflags:
- -s -w
- -X main.Version={{.Version}}
- -X main.Build={{.ShortCommit}}
- id: bd-windows-amd64
main: ./cmd/bd
binary: bd
env:
- CGO_ENABLED=1
- CC=x86_64-w64-mingw32-gcc
- CXX=x86_64-w64-mingw32-g++
goos:
- windows
goarch:
- amd64
ldflags:
- -s -w
- -X main.Version={{.Version}}
- -X main.Build={{.ShortCommit}}
- -buildmode=exe
archives:
- id: bd-archive
format: tar.gz
name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"
format_overrides:
- goos: windows
format: zip
files:
- LICENSE
- README.md
- CHANGELOG.md
checksum:
name_template: "checksums.txt"
algorithm: sha256
snapshot:
version_template: "{{ incpatch .Version }}-next"
changelog:
sort: asc
filters:
exclude:
- "^docs:"
- "^test:"
- "^chore:"
- "Merge pull request"
- "Merge branch"
groups:
- title: "Features"
regexp: '^.*feat(\(\w+\))?:.*$'
order: 0
- title: "Bug Fixes"
regexp: '^.*fix(\(\w+\))?:.*$'
order: 1
- title: "Others"
order: 999
release:
github:
owner: steveyegge
name: beads
draft: false
prerelease: auto
name_template: "v{{.Version}}"
header: |
## beads v{{.Version}}
Pre-compiled binaries for Linux, macOS (Intel & Apple Silicon), and Windows.
### Installation
**Homebrew (macOS/Linux):**
```bash
brew install steveyegge/beads/bd
```
**Quick Install (macOS/Linux):**
```bash
curl -sSL https://raw.githubusercontent.com/steveyegge/beads/main/scripts/install.sh | bash
```
**Windows (PowerShell):**
```powershell
irm https://raw.githubusercontent.com/steveyegge/beads/main/install.ps1 | iex
```
**Manual Install:**
Download the appropriate binary for your platform below, extract it, and place it in your PATH.
# Homebrew tap - disabled, handled by separate workflow
# brews:
# - name: bd
# repository:
# owner: steveyegge
# name: homebrew-beads
# branch: main
# directory: Formula
# homepage: https://github.com/steveyegge/beads
# description: "AI-supervised issue tracker for coding workflows"
# license: MIT
# test: |
# system "#{bin}/bd", "version"
# install: |
# bin.install "bd"
# Announce the release
announce:
skip: false
```
--------------------------------------------------------------------------------
/tests/benchmarks/README.md:
--------------------------------------------------------------------------------
```markdown
# Beads Benchmarks
Automated benchmarks for measuring Beads performance and Agent Mail coordination efficiency.
## Git Traffic Reduction Benchmark
**File:** `git_traffic.py`
### Purpose
Measures the reduction in git operations (pulls, commits, pushes) when using Agent Mail for multi-agent coordination compared to pure git-based synchronization.
### Usage
```bash
# Run with default settings (50 issues)
python3 tests/benchmarks/git_traffic.py
# Customize number of issues
python3 tests/benchmarks/git_traffic.py -n 100
# Verbose output
python3 tests/benchmarks/git_traffic.py -v
# Save report to file
python3 tests/benchmarks/git_traffic.py -o report.md
```
### How It Works
The benchmark compares two workflows:
**Without Agent Mail (Git-only mode):**
- Each issue update requires git pull + commit + push
- Other agents pull to check for updates
- Total: ~4 git operations per issue
**With Agent Mail:**
- Coordination via HTTP messages (no git operations)
- Status updates, reservations, notifications via Agent Mail
- Single batched commit/push at end of workflow
- Total: 3 git operations for entire batch
### Expected Results
For 50 issues:
- **Without Agent Mail:** ~200 git operations
- **With Agent Mail:** 3 git operations
- **Reduction:** ≥70% (typically 95-98%)
### Exit Codes
- `0`: Success - achieved ≥70% reduction
- `1`: Failure - regression detected
### Example Output
```
======================================================================
SUMMARY
======================================================================
Without Agent Mail: 200 git operations
With Agent Mail: 3 git operations
Reduction: 98.5%
Target: 70%
Status: ✅ PASS
======================================================================
```
## Requirements
- Python 3.7+
- bd (beads) CLI installed
- git
- Agent Mail server (optional - falls back to simulation if unavailable)
## CI Integration
This benchmark can be used in CI to detect regressions in Agent Mail coordination efficiency:
```bash
python3 tests/benchmarks/git_traffic.py -n 50
# Exits with status 1 if reduction < 70%
```
```
--------------------------------------------------------------------------------
/.beads/README.md:
--------------------------------------------------------------------------------
```markdown
# Beads - AI-Native Issue Tracking
Welcome to Beads! This repository uses **Beads** for issue tracking - a modern, AI-native tool designed to live directly in your codebase alongside your code.
## What is Beads?
Beads is issue tracking that lives in your repo, making it perfect for AI coding agents and developers who want their issues close to their code. No web UI required - everything works through the CLI and integrates seamlessly with git.
**Learn more:** [github.com/steveyegge/beads](https://github.com/steveyegge/beads)
## Quick Start
### Essential Commands
```bash
# Create new issues
bd create "Add user authentication"
# View all issues
bd list
# View issue details
bd show <issue-id>
# Update issue status
bd update <issue-id> --status in_progress
bd update <issue-id> --status done
# Sync with git remote
bd sync
```
### Working with Issues
Issues in Beads are:
- **Git-native**: Stored in `.beads/issues.jsonl` and synced like code
- **AI-friendly**: CLI-first design works perfectly with AI coding agents
- **Branch-aware**: Issues can follow your branch workflow
- **Always in sync**: Auto-syncs with your commits
## Why Beads?
✨ **AI-Native Design**
- Built specifically for AI-assisted development workflows
- CLI-first interface works seamlessly with AI coding agents
- No context switching to web UIs
🚀 **Developer Focused**
- Issues live in your repo, right next to your code
- Works offline, syncs when you push
- Fast, lightweight, and stays out of your way
🔧 **Git Integration**
- Automatic sync with git commits
- Branch-aware issue tracking
- Intelligent JSONL merge resolution
## Get Started with Beads
Try Beads in your own projects:
```bash
# Install Beads
curl -sSL https://raw.githubusercontent.com/steveyegge/beads/main/scripts/install.sh | bash
# Initialize in your repo
bd init
# Create your first issue
bd create "Try out Beads"
```
## Learn More
- **Documentation**: [github.com/steveyegge/beads/docs](https://github.com/steveyegge/beads/tree/main/docs)
- **Quick Start Guide**: Run `bd quickstart`
- **Examples**: [github.com/steveyegge/beads/examples](https://github.com/steveyegge/beads/tree/main/examples)
---
*Beads: Issue tracking that moves at the speed of thought* ⚡
```
--------------------------------------------------------------------------------
/.devcontainer/README.md:
--------------------------------------------------------------------------------
```markdown
# beads Development Container
This devcontainer configuration provides a fully-configured development environment for beads with:
- Go 1.23 development environment
- bd CLI built and installed from source
- Git hooks automatically installed
- All dependencies pre-installed
## Quick Start
### GitHub Codespaces
1. Click the "Code" button on GitHub
2. Select "Create codespace on main"
3. Wait for the container to build (~2-3 minutes)
4. The environment will be ready with bd installed and configured
### VS Code Remote Containers
1. Install the [Remote - Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension
2. Open the beads repository in VS Code
3. Click "Reopen in Container" when prompted (or use Command Palette: "Remote-Containers: Reopen in Container")
4. Wait for the container to build
## What Gets Installed
The `setup.sh` script automatically:
1. Builds bd from source (`go build ./cmd/bd`)
2. Installs bd to `/usr/local/bin/bd`
3. Runs `bd init --quiet` (non-interactive initialization)
4. Installs git hooks from `examples/git-hooks/`
5. Downloads Go module dependencies
## Verification
After the container starts, verify everything works:
```bash
# Check bd is installed
bd --version
# Check for ready tasks
bd ready
# View project stats
bd stats
```
## Git Configuration
Your local `.gitconfig` is mounted into the container so your git identity is preserved. If you need to configure git:
```bash
git config --global user.name "Your Name"
git config --global user.email "[email protected]"
```
## Troubleshooting
**bd command not found:**
- The setup script should install bd automatically
- Manually run: `bash .devcontainer/setup.sh`
**Git hooks not working:**
- Check if hooks are installed: `ls -la .git/hooks/`
- Manually install: `bash examples/git-hooks/install.sh`
**Container fails to build:**
- Check the container logs for specific errors
- Ensure Docker/Podman is running and has sufficient resources
- Try rebuilding: Command Palette → "Remote-Containers: Rebuild Container"
## Related Issues
- GitHub Issue [#229](https://github.com/steveyegge/beads/issues/229): Git hooks not available in devcontainers
- bd-ry1u: Publish official devcontainer configuration
```
--------------------------------------------------------------------------------
/examples/python-agent/README.md:
--------------------------------------------------------------------------------
```markdown
# Python Agent Example
A simple Python script demonstrating how an AI agent can use bd to manage tasks.
## Features
- Finds ready work using `bd ready --json`
- Claims tasks by updating status
- Simulates discovering new issues during work
- Links discovered issues with `discovered-from` dependency
- Completes tasks and moves to the next one
## Prerequisites
- Python 3.7+
- bd installed: `go install github.com/steveyegge/beads/cmd/bd@latest`
- A beads database initialized: `bd init`
## Usage
```bash
# Make the script executable
chmod +x agent.py
# Run the agent
./agent.py
```
## What It Does
1. Queries for ready work (no blocking dependencies)
2. Claims the highest priority task
3. "Works" on the task (simulated)
4. If the task involves implementation, discovers a testing task
5. Creates the new testing task and links it with `discovered-from`
6. Completes the original task
7. Repeats until no ready work remains
## Example Output
```
🚀 Beads Agent starting...
============================================================
Iteration 1/10
============================================================
📋 Claiming task: bd-1
🤖 Working on: Implement user authentication (bd-1)
Priority: 1, Type: feature
💡 Discovered: Missing test coverage for this feature
✨ Creating issue: Add tests for Implement user authentication
🔗 Linking bd-2 ← discovered-from ← bd-1
✅ Completing task: bd-1 - Implemented successfully
🔄 New work discovered and linked. Running another cycle...
```
## Integration with Real Agents
To integrate with a real LLM-based agent:
1. Replace `simulate_work()` with actual LLM calls
2. Parse the LLM's response for discovered issues/bugs
3. Use the issue ID to track context across conversations
4. Export/import JSONL to share state across agent sessions
## Advanced Usage
```python
# Create an agent with custom behavior
agent = BeadsAgent()
# Find specific types of work
ready = agent.run_bd("ready", "--priority", "1", "--assignee", "bot")
# Create issues with labels
agent.run_bd("create", "New task", "-l", "urgent,backend")
# Query dependency tree
tree = agent.run_bd("dep", "tree", "bd-1")
```
## See Also
- [AGENT_MAIL_EXAMPLE.md](AGENT_MAIL_EXAMPLE.md) - Multi-agent coordination with Agent Mail
- [../bash-agent/](../bash-agent/) - Bash version of this example
- [../claude-desktop-mcp/](../claude-desktop-mcp/) - MCP server for Claude Desktop
```
--------------------------------------------------------------------------------
/examples/README.md:
--------------------------------------------------------------------------------
```markdown
# Beads Examples
This directory contains examples of how to integrate bd with AI agents and workflows.
## Examples
### Agent Integration
- **[python-agent/](python-agent/)** - Simple Python agent that discovers ready work and completes tasks
- **[AGENT_MAIL_EXAMPLE.md](python-agent/AGENT_MAIL_EXAMPLE.md)** - Multi-agent coordination with Agent Mail
- **[bash-agent/](bash-agent/)** - Bash script showing the full agent workflow
- **[startup-hooks/](startup-hooks/)** - Session startup scripts for automatic bd upgrade detection
- **[claude-desktop-mcp/](claude-desktop-mcp/)** - MCP server for Claude Desktop integration
- **[claude-code-skill/](claude-code-skill/)** - Claude Code skill for effective beads usage patterns
### Tools & Utilities
- **[monitor-webui/](monitor-webui/)** - Standalone web interface for real-time issue monitoring and visualization
- **[markdown-to-jsonl/](markdown-to-jsonl/)** - Convert markdown planning docs to bd issues
- **[github-import/](github-import/)** - Import issues from GitHub repositories
- **[git-hooks/](git-hooks/)** - Pre-configured git hooks for automatic export/import
<!-- REMOVED (bd-4c74): branch-merge example - collision resolution no longer needed with hash IDs -->
### Workflow Patterns
- **[contributor-workflow/](contributor-workflow/)** - OSS contributor setup with separate planning repo
- **[team-workflow/](team-workflow/)** - Team collaboration with shared repositories
- **[multi-phase-development/](multi-phase-development/)** - Organize large projects by phases (planning, MVP, iteration, polish)
- **[multiple-personas/](multiple-personas/)** - Architect/implementer/reviewer role separation
- **[protected-branch/](protected-branch/)** - Protected branch workflow for team collaboration
## Quick Start
```bash
# Try the Python agent example
cd python-agent
python agent.py
# Try the bash agent example
cd bash-agent
./agent.sh
# Install git hooks
cd git-hooks
./install.sh
# REMOVED (bd-4c74): branch-merge demo - hash IDs eliminate collision resolution
```
## Creating Your Own Agent
The basic agent workflow:
1. **Find ready work**: `bd ready --json --limit 1`
2. **Claim the task**: `bd update <id> --status in_progress --json`
3. **Do the work**: Execute the task
4. **Discover new issues**: `bd create "Found bug" --json`
5. **Link discoveries**: `bd dep add <new-id> <parent-id> --type discovered-from`
6. **Complete the task**: `bd close <id> --reason "Done" --json`
All commands support `--json` for easy parsing.
```
--------------------------------------------------------------------------------
/examples/go-agent/README.md:
--------------------------------------------------------------------------------
```markdown
# Go Agent Example
Example Go agent that uses bd with optional Agent Mail coordination for multi-agent workflows.
## Features
- Uses native Go Agent Mail client (`pkg/agentmail`)
- Graceful degradation when Agent Mail unavailable
- Handles reservation conflicts
- Discovers and links new work
- Environment-based configuration
## Usage
### Git-only mode (no Agent Mail)
```bash
cd examples/go-agent
go run main.go --agent-name agent-alpha --max-iterations 5
```
### With Agent Mail coordination
```bash
# Start Agent Mail server (in separate terminal)
cd integrations/agent-mail
python server.py
# Run agent
cd examples/go-agent
go run main.go \
--agent-name agent-alpha \
--project-id my-project \
--agent-mail-url http://127.0.0.1:8765 \
--max-iterations 10
```
### Environment Variables
```bash
export BEADS_AGENT_NAME=agent-alpha
export BEADS_PROJECT_ID=my-project
export BEADS_AGENT_MAIL_URL=http://127.0.0.1:8765
go run main.go
```
## Multi-Agent Demo
Run multiple agents concurrently with Agent Mail:
```bash
# Terminal 1: Start Agent Mail server
cd integrations/agent-mail
python server.py
# Terminal 2: Agent Alpha
cd examples/go-agent
go run main.go --agent-name agent-alpha --agent-mail-url http://127.0.0.1:8765
# Terminal 3: Agent Beta
go run main.go --agent-name agent-beta --agent-mail-url http://127.0.0.1:8765
```
## How It Works
1. **Initialization**: Creates Agent Mail client with health check
2. **Find work**: Queries `bd ready` for unblocked issues
3. **Claim issue**: Reserves via Agent Mail (if enabled) and updates status to `in_progress`
4. **Work simulation**: Processes the issue (sleeps 1s in this example)
5. **Discover work**: 33% chance to create linked issue via `discovered-from` dependency
6. **Complete**: Closes issue and releases Agent Mail reservation
## Collision Handling
When Agent Mail is enabled:
- Issues are reserved before claiming (prevents race conditions)
- Conflicts return immediately (<100ms latency)
- Agents gracefully skip reserved issues
Without Agent Mail:
- Relies on git-based eventual consistency
- Higher latency (2-5s for sync)
- Collision detection via git merge conflicts
## Comparison with Python Agent
The Go implementation mirrors the Python agent (`examples/python-agent/agent_with_mail.py`):
- ✅ Same API surface (ReserveIssue, ReleaseIssue, Notify, CheckInbox)
- ✅ Same graceful degradation behavior
- ✅ Same environment variable configuration
- ✅ Native Go types and idioms (no shell exec for Agent Mail)
Key differences:
- Go uses `pkg/agentmail.Client` instead of `lib/beads_mail_adapter.py`
- Go struct methods vs Python class methods
- Type safety with Go structs
```
--------------------------------------------------------------------------------
/tests/integration/README.md:
--------------------------------------------------------------------------------
```markdown
# Integration Tests
This directory contains integration tests for bd (beads) that test end-to-end functionality.
## Tests
### test_agent_race.py
Multi-agent race condition test that validates collision prevention with Agent Mail.
**What it tests:**
- Multiple agents simultaneously attempting to claim the same issue
- WITH Agent Mail: Only one agent succeeds (via reservation)
- WITHOUT Agent Mail: Multiple agents may succeed (collision)
- Verification via JSONL that no duplicate claims occur
### test_mail_failures.py
Agent Mail server failure scenarios test that validates graceful degradation.
**What it tests:**
- Server never started (connection refused)
- Server crashes during operation
- Network partition (timeout)
- Server returns 500 errors
- Invalid bearer token (401)
- Malformed JSON responses
- JSONL consistency under multiple failures
**Performance:**
- Uses `--no-daemon` flag for fast tests (~33s total)
- 1s HTTP timeouts for quick failure detection
- Mock HTTP server avoids real network calls
### test_reservation_ttl.py
Reservation TTL and expiration test that validates time-based reservation behavior.
**What it tests:**
- Short TTL reservations (30s)
- Reservation blocking verification (agent2 cannot claim while agent1 holds reservation)
- Auto-release after expiration (expired reservations become available)
- Renewal/heartbeat mechanism (re-reserving extends expiration)
**Performance:**
- Uses `--no-daemon` flag for fast tests
- 30s TTL for expiration tests (includes wait time)
- Total test time: ~57s (includes 30s+ waiting for expiration)
- Mock HTTP server with full TTL management
## Prerequisites
- bd installed: `go install github.com/steveyegge/beads/cmd/bd@latest`
- Agent Mail server running (optional, for full test suite):
```bash
cd ~/src/mcp_agent_mail
source .venv/bin/activate
uv run python -m mcp_agent_mail.cli serve-http
```
## Running Tests
**Run test_agent_race.py:**
```bash
python3 tests/integration/test_agent_race.py
```
**Run test_mail_failures.py:**
```bash
python3 tests/integration/test_mail_failures.py
```
**Run test_reservation_ttl.py:**
```bash
python3 tests/integration/test_reservation_ttl.py
```
**Run all integration tests:**
```bash
python3 tests/integration/test_agent_race.py
python3 tests/integration/test_mail_failures.py
python3 tests/integration/test_reservation_ttl.py
```
## Expected Results
### test_agent_race.py
- **WITH Agent Mail running:** Test 1 passes (only 1 claim), Test 2 shows collision, Test 3 passes
- **WITHOUT Agent Mail running:** All tests demonstrate collision (expected behavior without reservation system)
### test_mail_failures.py
- All 7 tests should pass in ~30-35 seconds
- Each test validates graceful degradation to Beads-only mode
- JSONL remains consistent across all failure scenarios
### test_reservation_ttl.py
- All 4 tests should pass in ~57 seconds
- Tests verify TTL-based reservation expiration and renewal
- Includes 30s+ wait time to validate actual expiration behavior
## Adding New Tests
Integration tests should:
1. Use temporary workspaces (cleaned up automatically)
2. Test real bd CLI commands, not just internal APIs
3. Use `--no-daemon` flag for fast execution
4. Verify behavior in `.beads/issues.jsonl` when relevant
5. Clean up resources in `finally` blocks
6. Provide clear output showing what's being tested
```
--------------------------------------------------------------------------------
/npm-package/README.md:
--------------------------------------------------------------------------------
```markdown
# @beads/bd - Beads Issue Tracker
[](https://www.npmjs.com/package/@beads/bd)
[](https://opensource.org/licenses/MIT)
**Give your coding agent a memory upgrade**
Beads is a lightweight memory system for coding agents, using a graph-based issue tracker. This npm package provides easy installation of the native bd binary for Node.js environments, including Claude Code for Web.
## Installation
```bash
npm install -g @beads/bd
```
Or as a project dependency:
```bash
npm install --save-dev @beads/bd
```
## What is Beads?
Beads is an issue tracker designed specifically for AI coding agents. It provides:
- ✨ **Zero setup** - `bd init` creates project-local database
- 🔗 **Dependency tracking** - Four dependency types (blocks, related, parent-child, discovered-from)
- 📋 **Ready work detection** - Automatically finds issues with no open blockers
- 🤖 **Agent-friendly** - `--json` flags for programmatic integration
- 📦 **Git-versioned** - JSONL records stored in git, synced across machines
- 🌍 **Distributed by design** - Share one logical database via git
## Quick Start
After installation, initialize beads in your project:
```bash
bd init
```
Then tell your AI agent to use bd for task tracking instead of markdown:
```bash
echo "Use 'bd' commands for issue tracking instead of markdown TODOs" >> AGENTS.md
```
Your agent will automatically:
- Create and track issues during work
- Manage dependencies between tasks
- Find ready work with `bd ready`
- Keep long-term context across sessions
## Common Commands
```bash
# Find ready work
bd ready --json
# Create an issue
bd create "Fix bug" -t bug -p 1
# Show issue details
bd show bd-a1b2
# List all issues
bd list --json
# Update status
bd update bd-a1b2 --status in_progress
# Add dependency
bd dep add bd-f14c bd-a1b2
# Close issue
bd close bd-a1b2 --reason "Fixed"
```
## Claude Code for Web Integration
To auto-install bd in Claude Code for Web sessions, add to your SessionStart hook:
```bash
# .claude/hooks/session-start.sh
npm install -g @beads/bd
bd init --quiet
```
This ensures bd is available in every new session without manual setup.
## Platform Support
This package downloads the appropriate native binary for your platform:
- **macOS**: darwin-amd64, darwin-arm64
- **Linux**: linux-amd64, linux-arm64
- **Windows**: windows-amd64
## Full Documentation
For complete documentation, see the [beads GitHub repository](https://github.com/steveyegge/beads):
- [Complete README](https://github.com/steveyegge/beads#readme)
- [Quick Start Guide](https://github.com/steveyegge/beads/blob/main/QUICKSTART.md)
- [Installation Guide](https://github.com/steveyegge/beads/blob/main/INSTALLING.md)
- [FAQ](https://github.com/steveyegge/beads/blob/main/FAQ.md)
- [Troubleshooting](https://github.com/steveyegge/beads/blob/main/TROUBLESHOOTING.md)
## Why npm Package vs WASM?
This npm package wraps the native bd binary rather than using WebAssembly because:
- ✅ Full SQLite support (no custom VFS needed)
- ✅ All features work identically to native bd
- ✅ Better performance (native vs WASM overhead)
- ✅ Simpler maintenance
## License
MIT - See [LICENSE](LICENSE) for details.
## Support
- [GitHub Issues](https://github.com/steveyegge/beads/issues)
- [Documentation](https://github.com/steveyegge/beads)
```
--------------------------------------------------------------------------------
/examples/compaction/README.md:
--------------------------------------------------------------------------------
```markdown
# Compaction Examples
This directory contains example scripts for automating database compaction.
## Scripts
### workflow.sh
Interactive compaction workflow with prompts. Perfect for manual compaction runs.
```bash
chmod +x workflow.sh
export ANTHROPIC_API_KEY="sk-ant-..."
./workflow.sh
```
**Features:**
- Previews candidates before compaction
- Prompts for confirmation at each tier
- Shows final statistics
- Provides next-step guidance
**When to use:** Manual monthly/quarterly compaction
### cron-compact.sh
Fully automated compaction for cron jobs. No interaction required.
```bash
# Configure
export BD_REPO_PATH="/path/to/your/repo"
export BD_LOG_FILE="$HOME/.bd-compact.log"
export ANTHROPIC_API_KEY="sk-ant-..."
# Test manually
./cron-compact.sh
# Install to cron (monthly)
cp cron-compact.sh /etc/cron.monthly/bd-compact
chmod +x /etc/cron.monthly/bd-compact
# Or add to crontab
crontab -e
# Add: 0 2 1 * * /path/to/cron-compact.sh
```
**Features:**
- Pulls latest changes before compacting
- Logs all output
- Auto-commits and pushes results
- Reports counts of compacted issues
**When to use:** Automated monthly compaction for active projects
### auto-compact.sh
Smart auto-compaction with thresholds. Only runs if enough eligible issues exist.
```bash
chmod +x auto-compact.sh
# Compact if 10+ eligible issues
./auto-compact.sh
# Custom threshold
./auto-compact.sh --threshold 50
# Tier 2 ultra-compression
./auto-compact.sh --tier 2 --threshold 20
# Preview without compacting
./auto-compact.sh --dry-run
```
**Features:**
- Configurable eligibility threshold
- Skips compaction if below threshold
- Supports both tiers
- Dry-run mode for testing
**When to use:**
- Pre-commit hooks (if ANTHROPIC_API_KEY set)
- CI/CD pipelines
- Conditional automation
## Configuration
All scripts require:
```bash
export ANTHROPIC_API_KEY="sk-ant-..."
```
Additional environment variables:
- `BD_REPO_PATH`: Repository path (cron-compact.sh)
- `BD_LOG_FILE`: Log file location (cron-compact.sh)
## Recommendations
### Small Projects (<500 issues)
Use `workflow.sh` manually, once or twice per year.
### Medium Projects (500-5000 issues)
Use `cron-compact.sh` quarterly or `auto-compact.sh` in CI.
### Large Projects (5000+ issues)
Use `cron-compact.sh` monthly with both tiers:
```bash
# Modify cron-compact.sh to run both tiers
```
### High-Velocity Teams
Combine approaches:
- `auto-compact.sh --threshold 50` in CI (Tier 1 only)
- `cron-compact.sh` monthly for Tier 2
## Testing
Before deploying to cron, test scripts manually:
```bash
# Test workflow
export ANTHROPIC_API_KEY="sk-ant-..."
./workflow.sh
# Test cron script
export BD_REPO_PATH="$(pwd)"
./cron-compact.sh
# Test auto-compact (dry run)
./auto-compact.sh --dry-run --threshold 1
```
## Troubleshooting
### Script says "bd command not found"
Ensure bd is in PATH:
```bash
which bd
export PATH="$PATH:/usr/local/bin"
```
### "ANTHROPIC_API_KEY not set"
```bash
export ANTHROPIC_API_KEY="sk-ant-..."
# Add to ~/.zshrc or ~/.bashrc for persistence
```
### Cron job not running
Check cron logs:
```bash
# Linux
grep CRON /var/log/syslog
# macOS
log show --predicate 'process == "cron"' --last 1h
```
Verify script is executable:
```bash
chmod +x /etc/cron.monthly/bd-compact
```
## Cost Monitoring
Track compaction costs:
```bash
# Show stats after compaction
bd compact --stats
# Estimate monthly cost
# (issues_compacted / 1000) * $1.00
```
Set up alerts if costs exceed budget (future feature: bd-cost-alert).
## See Also
- [COMPACTION.md](../../COMPACTION.md) - Comprehensive compaction guide
- [README.md](../../README.md) - Main documentation
- [GIT_WORKFLOW.md](../../GIT_WORKFLOW.md) - Multi-machine collaboration
```
--------------------------------------------------------------------------------
/examples/markdown-to-jsonl/README.md:
--------------------------------------------------------------------------------
```markdown
# Markdown to JSONL Converter
Convert markdown planning documents into `bd` issues.
## Overview
This example shows how to bridge the gap between markdown planning docs and tracked issues, without adding complexity to the `bd` core tool.
The converter script (`md2jsonl.py`) parses markdown files and outputs JSONL that can be imported into `bd`.
## Features
- ✅ **YAML Frontmatter** - Extract metadata (priority, type, assignee)
- ✅ **Headings as Issues** - Each H1/H2 becomes an issue
- ✅ **Task Lists** - Markdown checklists become sub-issues
- ✅ **Dependency Parsing** - Extract "blocks: bd-10" references
- ✅ **Customizable** - Modify the script for your conventions
## Usage
### Basic conversion
```bash
python md2jsonl.py feature.md | bd import
```
### Save to file first
```bash
python md2jsonl.py feature.md > issues.jsonl
bd import -i issues.jsonl
```
### Preview before importing
```bash
python md2jsonl.py feature.md | jq .
```
## Markdown Format
### Frontmatter (Optional)
```markdown
---
priority: 1
type: feature
assignee: alice
---
```
### Headings
Each heading becomes an issue:
```markdown
# Main Feature
Description of the feature...
## Sub-task 1
Details about sub-task...
## Sub-task 2
More details...
```
### Task Lists
Task lists are converted to separate issues:
```markdown
## Setup Tasks
- [ ] Install dependencies
- [x] Configure database
- [ ] Set up CI/CD
```
Creates 3 issues (second one marked as closed).
### Dependencies
Reference other issues in the description:
```markdown
## Implement API
This task requires the database schema to be ready first.
Dependencies:
- blocks: bd-5
- related: bd-10, bd-15
```
The script extracts these and creates dependency records.
## Example
See `example-feature.md` for a complete example.
```bash
# Convert the example
python md2jsonl.py example-feature.md > example-issues.jsonl
# View the output
cat example-issues.jsonl | jq .
# Import into bd
bd import -i example-issues.jsonl
```
## Customization
The script is intentionally simple so you can customize it for your needs:
1. **Different heading levels** - Modify which headings become issues (H1 only? H1-H3?)
2. **Custom metadata** - Parse additional frontmatter fields
3. **Labels** - Extract hashtags or keywords as labels
4. **Epic detection** - Top-level headings become epics
5. **Issue templates** - Map different markdown structures to issue types
## Limitations
This is a simple example, not a production tool:
- Basic YAML parsing (no nested structures)
- Simple dependency extraction (regex-based)
- No validation of referenced issue IDs
- Doesn't handle all markdown edge cases
For production use, you might want to:
- Use a proper YAML parser (`pip install pyyaml`)
- Use a markdown parser (`pip install markdown` or `python-markdown2`)
- Add validation and error handling
- Support more dependency formats
## Philosophy
This example demonstrates the **lightweight extension pattern**:
- ✅ Keep `bd` core focused and minimal
- ✅ Let users customize for their workflows
- ✅ Use existing import infrastructure
- ✅ Easy to understand and modify
Rather than adding markdown support to `bd` core (800+ LOC + dependencies + maintenance), we provide a simple converter that users can adapt.
## Contributing
Have improvements? Found a bug? This is just an example, but contributions are welcome!
Consider:
- Better error messages
- More markdown patterns
- Integration with popular markdown formats
- Support for GFM (GitHub Flavored Markdown) extensions
## See Also
- [bd README](../../README.md) - Main documentation
- [Python Agent Example](../python-agent/) - Full agent workflow
- [JSONL Format](../../TEXT_FORMATS.md) - Understanding bd's JSONL structure
```
--------------------------------------------------------------------------------
/lib/README.md:
--------------------------------------------------------------------------------
```markdown
# Beads Agent Mail Adapter
Lightweight Python library for integrating [MCP Agent Mail](https://github.com/Dicklesworthstone/mcp_agent_mail) with Beads issue tracking.
## Features
- **Collision Prevention**: Reserve issues to prevent duplicate work across agents
- **Real-Time Coordination**: <100ms latency vs 2-5s with git-only sync
- **Graceful Degradation**: Automatically falls back to git-only mode when server unavailable
- **Zero Configuration**: Works without Agent Mail (optional enhancement)
## Installation
No installation required - just copy `beads_mail_adapter.py` to your project:
```bash
cp lib/beads_mail_adapter.py /path/to/your/agent/
```
## Quick Start
```python
from beads_mail_adapter import AgentMailAdapter
# Initialize adapter (automatically detects server availability)
adapter = AgentMailAdapter()
if adapter.enabled:
print("✅ Agent Mail coordination enabled")
else:
print("⚠️ Agent Mail unavailable, using git-only mode")
# Reserve issue before claiming
if adapter.reserve_issue("bd-123"):
# Claim issue in Beads
subprocess.run(["bd", "update", "bd-123", "--status", "in_progress"])
# Do work...
# Notify other agents
adapter.notify("status_changed", {"issue_id": "bd-123", "status": "completed"})
# Release reservation
adapter.release_issue("bd-123")
else:
print("❌ Issue bd-123 already reserved by another agent")
```
## Configuration
Configure via environment variables:
```bash
# Agent Mail server URL (default: http://127.0.0.1:8765)
export AGENT_MAIL_URL=http://localhost:8765
# Authentication token (optional)
export AGENT_MAIL_TOKEN=your-bearer-token
# Agent identifier (default: hostname)
export BEADS_AGENT_NAME=assistant-alpha
# Request timeout in seconds (default: 5)
export AGENT_MAIL_TIMEOUT=5
```
Or pass directly to constructor:
```python
adapter = AgentMailAdapter(
url="http://localhost:8765",
token="your-token",
agent_name="assistant-alpha",
timeout=5
)
```
## API Reference
### `AgentMailAdapter(url=None, token=None, agent_name=None, timeout=5)`
Initialize adapter with optional configuration overrides.
**Attributes:**
- `enabled` (bool): True if server is available, False otherwise
### `reserve_issue(issue_id: str, ttl: int = 3600) -> bool`
Reserve an issue to prevent other agents from claiming it.
**Args:**
- `issue_id`: Issue ID (e.g., "bd-123")
- `ttl`: Reservation time-to-live in seconds (default: 1 hour)
**Returns:** True if reservation successful, False if already reserved
### `release_issue(issue_id: str) -> bool`
Release a previously reserved issue.
**Returns:** True on success
### `notify(event_type: str, data: Dict[str, Any]) -> bool`
Send notification to other agents.
**Args:**
- `event_type`: Event type (e.g., "status_changed", "issue_completed")
- `data`: Event payload
**Returns:** True on success
### `check_inbox() -> List[Dict[str, Any]]`
Check for incoming notifications from other agents.
**Returns:** List of notification messages (empty if none or server unavailable)
### `get_reservations() -> List[Dict[str, Any]]`
Get all active reservations.
**Returns:** List of active reservations
## Testing
Run the test suite:
```bash
cd lib
python3 test_beads_mail_adapter.py -v
```
Coverage includes:
- Server available/unavailable scenarios
- Graceful degradation
- Reservation conflicts
- Environment variable configuration
## Integration Examples
See [examples/python-agent/agent.py](../examples/python-agent/agent.py) for a complete agent implementation.
## Graceful Degradation
The adapter is designed to **never block or fail** your agent:
- If server is unavailable on init → `enabled = False`, all operations no-op
- If server dies mid-operation → methods return success (graceful degradation)
- If network timeout → operations continue (no blocking)
- If 409 conflict on reservation → returns `False` (expected behavior)
This ensures your agent works identically with or without Agent Mail.
## When to Use Agent Mail
**Use Agent Mail when:**
- Running multiple AI agents concurrently
- Need real-time collision prevention
- Want to reduce git commit noise
- Need <100ms coordination latency
**Stick with git-only when:**
- Single agent workflow
- No concurrent work
- Simplicity over speed
- No server infrastructure available
## Resources
- [ADR 002: Agent Mail Integration](../docs/adr/002-agent-mail-integration.md)
- [MCP Agent Mail Repository](https://github.com/Dicklesworthstone/mcp_agent_mail)
- [Latency Benchmark Results](../latency_results.md)
```
--------------------------------------------------------------------------------
/scripts/README.md:
--------------------------------------------------------------------------------
```markdown
# Beads Scripts
Utility scripts for maintaining the beads project.
## release.sh (⭐ The Easy Button)
**One-command release** from version bump to local installation.
### Usage
```bash
# Full release (does everything)
./scripts/release.sh 0.9.3
# Preview what would happen
./scripts/release.sh 0.9.3 --dry-run
```
### What It Does
This master script automates the **entire release process**:
1. ✅ Kills running daemons (avoids version conflicts)
2. ✅ Runs tests and linting
3. ✅ Bumps version in all files
4. ✅ Commits and pushes version bump
5. ✅ Creates and pushes git tag
6. ✅ Updates Homebrew formula
7. ✅ Upgrades local brew installation
8. ✅ Verifies everything works
**After this script completes, your system is running the new version!**
### Examples
```bash
# Release version 0.9.3
./scripts/release.sh 0.9.3
# Preview a release (no changes made)
./scripts/release.sh 1.0.0 --dry-run
```
### Prerequisites
- Clean git working directory
- All changes committed
- golangci-lint installed
- Homebrew installed (for local upgrade)
- Push access to steveyegge/beads and steveyegge/homebrew-beads
### Output
The script provides colorful, step-by-step progress output:
- 🟨 Yellow: Current step
- 🟩 Green: Step completed
- 🟥 Red: Errors
- 🟦 Blue: Section headers
### What Happens Next
After the script finishes:
- GitHub Actions builds binaries for all platforms (~5 minutes)
- PyPI package is published automatically
- Users can `brew upgrade bd` to get the new version
- GitHub Release is created with binaries and changelog
---
## bump-version.sh
Bumps the version number across all beads components in a single command.
### Usage
```bash
# Show usage
./scripts/bump-version.sh
# Update versions (shows diff, no commit)
./scripts/bump-version.sh 0.9.3
# Update versions and auto-commit
./scripts/bump-version.sh 0.9.3 --commit
```
### What It Does
Updates version in all these files:
- `cmd/bd/version.go` - bd CLI version constant
- `.claude-plugin/plugin.json` - Plugin version
- `.claude-plugin/marketplace.json` - Marketplace plugin version
- `integrations/beads-mcp/pyproject.toml` - MCP server version
- `README.md` - Alpha status version
- `PLUGIN.md` - Version requirements
### Features
- **Validates** semantic versioning format (MAJOR.MINOR.PATCH)
- **Verifies** all versions match after update
- **Shows** git diff of changes
- **Auto-commits** with standardized message (optional)
- **Cross-platform** compatible (macOS and Linux)
### Examples
```bash
# Bump to 0.9.3 and review changes
./scripts/bump-version.sh 0.9.3
# Review the diff, then manually commit
# Bump to 1.0.0 and auto-commit
./scripts/bump-version.sh 1.0.0 --commit
git push origin main
```
### Why This Script Exists
Previously, version bumps only updated `cmd/bd/version.go`, leaving other components out of sync. This script ensures all version numbers stay consistent across the project.
### Safety
- Checks for uncommitted changes before proceeding
- Refuses to auto-commit if there are existing uncommitted changes
- Validates version format before making any changes
- Verifies all versions match after update
- Shows diff for review before commit
---
## update-homebrew.sh
Automatically updates the Homebrew formula with GoReleaser release artifacts.
### Usage
```bash
# Update formula after pushing git tag
./scripts/update-homebrew.sh 0.9.3
# Use custom tap directory
TAP_DIR=~/homebrew-beads ./scripts/update-homebrew.sh 0.9.3
```
### What It Does
This script automates the Homebrew formula update process:
1. **Waits** for GitHub Actions release build (~5 minutes, checks every 30s)
2. **Downloads** checksums.txt from the GitHub release
3. **Extracts** SHA256s for all platform-specific binaries:
- macOS ARM64 (Apple Silicon)
- macOS AMD64 (Intel)
- Linux AMD64
- Linux ARM64
4. **Clones/updates** the homebrew-beads tap repository
5. **Updates** Formula/bd.rb with new version and all SHA256s
6. **Commits and pushes** the changes
### Important Notes
- **Run AFTER pushing the git tag** - the script waits for GitHub Actions to finish
- **Uses GoReleaser artifacts**, not source tarballs (fixed in v0.23.0)
- **Automatically waits** up to 7.5 minutes for release build to complete
- **Updates all platforms** in a single operation
### Examples
```bash
# Standard usage (after git tag push)
git tag v0.9.3 && git push origin v0.9.3
./scripts/update-homebrew.sh 0.9.3
# Custom tap directory
TAP_DIR=/path/to/homebrew-beads ./scripts/update-homebrew.sh 0.9.3
```
### Why This Script Exists
Previously, the Homebrew formula update was manual and error-prone:
- Used source tarball SHA256 instead of GoReleaser artifacts (wrong!)
- Required manually computing 4 separate SHA256s
- Easy to forget updating all platforms
- No automation for waiting on GitHub Actions
This script fixes all those issues and is now used by `release.sh`.
---
## Future Scripts
Additional maintenance scripts may be added here as needed.
```
--------------------------------------------------------------------------------
/examples/contributor-workflow/README.md:
--------------------------------------------------------------------------------
```markdown
# OSS Contributor Workflow Example
This example demonstrates how to use beads' contributor workflow to keep your planning issues separate from upstream PRs when contributing to open-source projects.
## Problem
When contributing to OSS projects, you want to:
- Track your planning, todos, and design notes
- Keep experimental work organized
- **NOT** pollute upstream PRs with your personal planning issues
## Solution
Use `bd init --contributor` to set up a separate planning repository that never gets committed to the upstream project.
## Setup
### Step 1: Fork and Clone
```bash
# Fork the project on GitHub, then clone your fork
git clone https://github.com/YOUR_USERNAME/project.git
cd project
# Add upstream remote (important for fork detection!)
git remote add upstream https://github.com/ORIGINAL_OWNER/project.git
```
### Step 2: Initialize Beads with Contributor Wizard
```bash
# Run the contributor setup wizard
bd init --contributor
```
The wizard will:
1. ✅ Detect that you're in a fork (checks for 'upstream' remote)
2. ✅ Prompt you to create a planning repo (`~/.beads-planning` by default)
3. ✅ Configure auto-routing so your planning stays separate
4. ✅ Initialize the planning repo with git
### Step 3: Start Working
```bash
# Create a planning issue
bd create "Plan how to fix bug X" -p 2
# This issue goes to ~/.beads-planning automatically!
```
## How It Works
### Auto-Routing
When you create issues as a contributor:
```bash
bd create "Fix authentication bug" -p 1
```
Beads automatically routes this to your planning repo (`~/.beads-planning/.beads/beads.jsonl`), not the current repo.
### Viewing Issues
```bash
# See all issues (from both repos)
bd list
# See only current repo issues
bd list --source-repo .
# See only planning issues
bd list --source-repo ~/.beads-planning
```
### Discovered Work
When you discover work while implementing:
```bash
# The new issue inherits source_repo from parent
bd create "Found edge case in auth" -p 1 --deps discovered-from:bd-42
```
### Committing Code (Not Planning)
Your code changes get committed to the fork, but planning issues stay separate:
```bash
# Only commits to fork (not planning repo)
git add src/auth.go
git commit -m "Fix: authentication bug"
git push origin my-feature-branch
```
Your planning issues in `~/.beads-planning` **never appear in PRs**.
## Example Workflow
```bash
# 1. Create fork and clone
git clone https://github.com/you/upstream-project.git
cd upstream-project
git remote add upstream https://github.com/upstream/upstream-project.git
# 2. Run contributor setup
bd init --contributor
# Wizard detects fork ✓
# Creates ~/.beads-planning ✓
# Configures auto-routing ✓
# 3. Plan your work (routes to planning repo)
bd create "Research how auth module works" -p 2
bd create "Design fix for bug #123" -p 1
bd ready # Shows planning issues
# 4. Implement (commit code only)
git checkout -b fix-auth-bug
# ... make changes ...
git add . && git commit -m "Fix: auth bug"
# 5. Track discovered work (stays in planning repo)
bd create "Found related issue in logout" -p 2 --deps discovered-from:bd-abc
# 6. Push code (planning never included)
git push origin fix-auth-bug
# Create PR on GitHub - zero planning pollution!
# 7. Clean up after PR merges
bd close bd-abc --reason "PR merged"
```
## Configuration
The wizard configures these settings in `.beads/beads.db`:
```yaml
contributor:
planning_repo: ~/.beads-planning
auto_route: true
```
### Manual Configuration
If you prefer manual setup:
```bash
# Initialize beads normally
bd init
# Configure planning repo
bd config set contributor.planning_repo ~/.beads-planning
bd config set contributor.auto_route true
```
## Multi-Repository View
Beads aggregates issues from multiple repos:
```bash
# List issues from all configured repos
bd list
# Filter by source repository
bd list --source-repo . # Current repo only
bd list --source-repo ~/.beads-planning # Planning repo only
```
## Benefits
✅ **Clean PRs** - No personal todos in upstream contributions
✅ **Private planning** - Experimental work stays local
✅ **Git ledger** - Everything is version controlled
✅ **Unified view** - See all issues with `bd list`
✅ **Auto-routing** - No manual sorting needed
## Common Questions
### Q: What if I want some issues in the upstream repo?
A: Override auto-routing with `--source-repo` flag:
```bash
bd create "Document new API" -p 2 --source-repo .
```
### Q: Can I change the planning repo location?
A: Yes, configure it:
```bash
bd config set contributor.planning_repo /path/to/my-planning
```
### Q: What if I have push access to upstream?
A: The wizard will ask if you want a planning repo anyway. You can say "no" to store everything in the current repo.
### Q: How do I disable auto-routing?
A: Turn it off:
```bash
bd config set contributor.auto_route false
```
## See Also
- [Multi-Repo Migration Guide](../../docs/MULTI_REPO_MIGRATION.md)
- [Team Workflow Example](../team-workflow/)
- [Protected Branch Setup](../protected-branch/)
```
--------------------------------------------------------------------------------
/examples/library-usage/README.md:
--------------------------------------------------------------------------------
```markdown
# Beads Library Usage Example
This example demonstrates using Beads as a Go library in external projects (like VC).
## Why Use Beads as a Library?
Instead of spawning `bd` CLI processes:
- ✅ **Direct API access** - Call functions directly instead of parsing JSON output
- ✅ **Type safety** - Compile-time checking of types and interfaces
- ✅ **Performance** - No process spawn overhead
- ✅ **Transactions** - Access to database transactions for complex operations
- ✅ **Shared database** - Multiple components can use same database connection
- ✅ **Error handling** - Proper Go error types instead of parsing stderr
## Installation
In your Go project:
```bash
go get github.com/steveyegge/beads@latest
```
## Basic Usage
```go
package main
import (
"context"
"log"
"github.com/steveyegge/beads"
)
func main() {
ctx := context.Background()
// Find and open database
dbPath := beads.FindDatabasePath()
store, err := beads.NewSQLiteStorage(dbPath)
if err != nil {
log.Fatal(err)
}
defer store.Close()
// Get ready work
ready, err := store.GetReadyWork(ctx, beads.WorkFilter{
Status: beads.StatusOpen,
Limit: 10,
})
if err != nil {
log.Fatal(err)
}
// Process ready issues...
}
```
## Running This Example
```bash
# From this directory
cd examples/library-usage
# Make sure there's a Beads database
bd init --prefix demo
# Run the example
go run main.go
```
## Available Operations
The `beads.Storage` interface provides:
### Issues
- `CreateIssue(ctx, issue, actor)` - Create a new issue
- `CreateIssues(ctx, issues, actor)` - Batch create issues
- `GetIssue(ctx, id)` - Get issue by ID
- `UpdateIssue(ctx, id, updates, actor)` - Update issue fields
- `CloseIssue(ctx, id, reason, actor)` - Close an issue
- `SearchIssues(ctx, query, filter)` - Search with filters
### Dependencies
- `AddDependency(ctx, dep, actor)` - Add dependency between issues
- `RemoveDependency(ctx, issueID, dependsOnID, actor)` - Remove dependency
- `GetDependencies(ctx, issueID)` - Get what this issue depends on
- `GetDependents(ctx, issueID)` - Get what depends on this issue
- `GetDependencyTree(ctx, issueID, maxDepth, showAllPaths)` - Visualize tree
### Labels
- `AddLabel(ctx, issueID, label, actor)` - Add label to issue
- `RemoveLabel(ctx, issueID, label, actor)` - Remove label
- `GetLabels(ctx, issueID)` - Get all labels for an issue
- `GetIssuesByLabel(ctx, label)` - Find issues with label
### Ready Work & Blocking
- `GetReadyWork(ctx, filter)` - Find issues with no blockers
- `GetBlockedIssues(ctx)` - Find blocked issues with blocker info
- `GetEpicsEligibleForClosure(ctx)` - Find completable epics
### Comments & Events
- `AddIssueComment(ctx, issueID, author, text)` - Add comment
- `GetIssueComments(ctx, issueID)` - Get all comments
- `GetEvents(ctx, issueID, limit)` - Get audit trail
### Statistics
- `GetStatistics(ctx)` - Get aggregate metrics
## Types
All types are exported via the `beads` package:
```go
// Core types
beads.Issue
beads.Status (Open, InProgress, Closed, Blocked)
beads.IssueType (Bug, Feature, Task, Epic, Chore)
beads.Priority (0-4)
// Relationships
beads.Dependency
beads.DependencyType (Blocks, Related, ParentChild, DiscoveredFrom)
// Metadata
beads.Label
beads.Comment
beads.Event
// Queries
beads.IssueFilter
beads.WorkFilter
beads.BlockedIssue
beads.EpicStatus
beads.Statistics
```
## VC Integration Example
For VC (VibeCoder), the integration would look like:
```go
// In VC's storage layer
type VCStorage struct {
beads beads.Storage
}
func NewVCStorage(dbPath string) (*VCStorage, error) {
store, err := beads.NewSQLiteStorage(dbPath)
if err != nil {
return nil, err
}
return &VCStorage{beads: store}, nil
}
// Claim ready work for executor
func (s *VCStorage) ClaimWork(ctx context.Context, executorID string) (*beads.Issue, error) {
ready, err := s.beads.GetReadyWork(ctx, beads.WorkFilter{
Status: beads.StatusOpen,
Limit: 1,
})
if err != nil {
return nil, err
}
if len(ready) == 0 {
return nil, nil // No work available
}
issue := ready[0]
// Claim it
updates := map[string]interface{}{
"status": beads.StatusInProgress,
"assignee": executorID,
}
if err := s.beads.UpdateIssue(ctx, issue.ID, updates, executorID); err != nil {
return nil, err
}
return issue, nil
}
```
## Best Practices
1. **Context** - Always pass `context.Context` for cancellation support
2. **Actor** - Provide meaningful actor strings for audit trail
3. **Error handling** - Check all errors; database operations can fail
4. **Close** - Always `defer store.Close()` after opening
5. **Transactions** - For complex multi-step operations, consider using the underlying database connection directly
## See Also
- [EXTENDING.md](../../EXTENDING.md) - Detailed extension guide
- [beads.go](../../beads.go) - Public API source
- [internal/storage/storage.go](../../internal/storage/storage.go) - Storage interface
```
--------------------------------------------------------------------------------
/examples/git-hooks/README.md:
--------------------------------------------------------------------------------
```markdown
# bd Git Hooks
This directory contains git hooks that integrate bd (beads) with your git workflow, preventing stale JSONL from being pushed to remote.
## The Problem
Two race conditions can occur:
1. **Between operations and commits**: Daemon auto-flush (5s debounce) may fire after commit
- User closes issue via MCP → daemon schedules flush (5 sec delay)
- User commits code changes → JSONL appears clean
- Daemon flush fires → JSONL modified after commit
- Result: dirty working tree showing JSONL changes
2. **Between commits and pushes**: Changes made after commit but before push (bd-my64)
- User commits → pre-commit hook flushes JSONL
- User adds comments or updates issues
- User pushes → outdated JSONL is pushed
- Result: remote has stale JSONL
## The Solution
These git hooks ensure bd changes are always synchronized with your commits and pushes:
- **pre-commit** - Flushes pending bd changes to JSONL before commit and stages it
- **pre-push** - Blocks push if JSONL has uncommitted changes (bd-my64)
- **post-merge** - Imports updated JSONL after git pull/merge
## Installation
### Quick Install (Recommended)
Use `bd hooks install` to install hooks automatically:
```bash
bd hooks install
```
Alternatively, use `bd init --quiet` which installs hooks during initialization.
**Hook Chaining (New in v0.23):** If you already have git hooks installed (e.g., pre-commit framework), bd will:
- Detect existing hooks
- Offer to chain with them (recommended)
- Preserve your existing hooks while adding bd functionality
- Back up hooks if you choose to overwrite
This prevents bd from silently overwriting workflows like pre-commit framework, which previously caused test failures to slip through.
The installer will:
- Copy hooks to `.git/hooks/`
- Make them executable
- Detect and preserve existing hooks
### Shared Hooks for Teams (New in v0.24.3)
For teams that need to share hooks across members (especially when using pre-built containers or CI/CD):
```bash
bd hooks install --shared
```
This installs hooks to `.beads-hooks/` (a versioned directory) instead of `.git/hooks/`, and configures git to use them via `git config core.hooksPath .beads-hooks`.
**Benefits:**
- ✅ Hooks are versioned and can be committed to your repository
- ✅ Team members get hooks automatically when they clone/pull
- ✅ Security teams can scan and audit hook contents before deployment
- ✅ Works with pre-built containers (hooks are already in the repo)
- ✅ Hooks stay in sync when you run `bd hooks install --shared` after upgrades
**Use cases:**
- Teams building containers in CI that need hooks pre-installed
- Organizations requiring security scanning of all code (including hooks)
- Projects where consistent tooling across team members is critical
- Devcontainer workflows where bd is installed during container build
After running `bd hooks install --shared`, commit `.beads-hooks/` to your repository:
```bash
git add .beads-hooks/
git commit -m "Add bd git hooks for team"
```
### Manual Install
```bash
cp examples/git-hooks/pre-commit .git/hooks/pre-commit
cp examples/git-hooks/pre-push .git/hooks/pre-push
cp examples/git-hooks/post-merge .git/hooks/post-merge
chmod +x .git/hooks/pre-commit .git/hooks/pre-push .git/hooks/post-merge
```
## How It Works
### pre-commit
Before each commit, the hook runs:
```bash
bd sync --flush-only
```
This:
1. Exports any pending database changes to `.beads/issues.jsonl`
2. Stages the JSONL file if modified
3. Allows the commit to proceed with clean state
The hook is silent on success, fast (no git operations), and safe (fails commit if flush fails).
### pre-push
Before each push, the hook:
```bash
bd sync --flush-only # Flush pending changes (if bd available)
git status --porcelain .beads/*.jsonl # Check for uncommitted changes
```
This prevents pushing stale JSONL by:
1. Flushing pending in-memory changes from daemon's 5s debounce
2. Checking for uncommitted changes (staged, unstaged, untracked, deleted)
3. Failing the push with clear error message if changes exist
4. Instructing user to commit JSONL before pushing again
This solves bd-my64: changes made between commit and push (or pending debounced flushes) are caught before reaching remote.
### post-merge
After a git pull or merge, the hook runs:
```bash
bd import -i .beads/beads.jsonl
```
This ensures your local database reflects the merged state. The hook:
- Only runs if `.beads/beads.jsonl` exists (also checks `issues.jsonl` for backward compat)
- Imports any new issues or updates from the merge
- Warns on failure but doesn't block the merge
**Note:** With hash-based IDs (v0.20.1+), ID collisions don't occur - different issues get different hash IDs.
## Compatibility
- **Auto-sync**: Works alongside bd's automatic 5-second debounce
- **Direct mode**: Hooks work in both daemon and `--no-daemon` mode
- **Worktrees**: Safe to use with git worktrees
## Benefits
✅ No more dirty working tree after commits
✅ Database always in sync with git
✅ Automatic collision resolution on merge
✅ Fast and silent operation
✅ Optional - manual `bd sync` still works
## Uninstall
Remove the hooks:
```bash
rm .git/hooks/pre-commit .git/hooks/pre-push .git/hooks/post-merge
```
Your backed-up hooks (if any) are in `.git/hooks/*.backup-*`.
## Related
- See [bd-51](../../.beads/bd-51) for the race condition bug report
- See [AGENTS.md](../../AGENTS.md) for the full git workflow
- See [examples/](../) for other integrations
```
--------------------------------------------------------------------------------
/examples/claude-code-skill/README.md:
--------------------------------------------------------------------------------
```markdown
# Claude Code Skill for Beads
A comprehensive Claude Code skill that teaches Claude how to use beads effectively for issue tracking in multi-session coding workflows.
## What is This?
This is a [Claude Code](https://claude.com/claude-code) skill - a markdown-based instruction set that teaches Claude AI how to use beads. While the [beads plugin](../../.claude-plugin/) provides slash commands and MCP tools for basic operations, this skill complements it by teaching the **philosophy and patterns** of effective beads usage.
## What Does It Provide?
**Main skill file:**
- Core workflow patterns (discovery, execution, planning phases)
- Decision criteria for when to use bd vs TodoWrite/markdown
- Session start protocols and ready work checks
- Compaction survival patterns (critical for Claude Code context limits)
- Issue lifecycle management with self-check checklists
- Integration patterns with other tools
**Reference documentation:**
- `references/BOUNDARIES.md` - Detailed decision criteria for bd vs TodoWrite with examples
- `references/CLI_REFERENCE.md` - Complete command reference with all flags
- `references/DEPENDENCIES.md` - Deep dive into dependency types and relationship patterns
- `references/WORKFLOWS.md` - Step-by-step workflows with checklists
- `references/ISSUE_CREATION.md` - When to ask vs create issues, quality guidelines
- `references/RESUMABILITY.md` - Making issues resumable across sessions with working code examples
- `references/STATIC_DATA.md` - Using bd for reference databases and glossaries
## Why is This Useful?
The skill helps Claude understand:
1. **When to use beads** - Not every task needs bd. The skill teaches when bd helps vs when markdown/TodoWrite is better (per Steve Yegge's insight about markdown "losing its way in the middle")
2. **How to structure issues** - Proper use of dependency types, issue metadata, and relationship patterns
3. **Workflow patterns** - Proactive issue creation during discovery, status maintenance during execution, dependency graphs during planning
4. **Integration with other tools** - How bd and TodoWrite can coexist, each serving its purpose
## Installation
### Prerequisites
1. Install beads CLI:
```bash
curl -sSL https://raw.githubusercontent.com/steveyegge/beads/main/install.sh | bash
```
2. Have [Claude Code](https://claude.com/claude-code) installed
### Install the Skill
You can install this skill in two ways:
#### Option 1: Copy to Claude Code Skills Directory
```bash
# Clone this repo (if you haven't already)
git clone https://github.com/steveyegge/beads.git
cd beads/examples/claude-code-skill
# Create a symlink in your Claude Code skills directory
ln -s "$(pwd)" ~/.claude/skills/bd-issue-tracking
```
#### Option 2: Copy Files Directly
```bash
# Create the skill directory
mkdir -p ~/.claude/skills/bd-issue-tracking
# Copy the skill files
cp -r beads/examples/claude-code-skill/* ~/.claude/skills/bd-issue-tracking/
```
### Verify Installation
Restart Claude Code, then in a new session, ask:
```
Do you have the bd skill installed?
```
Claude should confirm it has access to the bd skill and can help with beads issue tracking.
## How It Works
Claude Code automatically loads skills from `~/.claude/skills/`. When this skill is installed:
1. Claude gets the core workflow from `SKILL.md` immediately
2. Claude can read reference docs when it needs detailed information
3. The skill uses progressive disclosure - quick reference in SKILL.md, details in references/
## Usage Examples
Once installed, Claude will automatically:
- Check for ready work at session start (if `.beads/` exists)
- Suggest creating bd issues for multi-session work
- Use appropriate dependency types when linking issues
- Maintain proper issue lifecycle (create → in_progress → close)
- Know when to use bd vs TodoWrite
You can also explicitly ask Claude to use beads:
```
Let's track this work in bd since it spans multiple sessions
```
```
Create a bd issue for this bug we discovered
```
```
Show me what's ready to work on in bd
```
## Relationship to Beads Plugin
This skill complements the [beads plugin](../../.claude-plugin/):
- **Plugin** (`.claude-plugin/`): Provides slash commands (`/bd-create`, `/bd-ready`) and MCP tools for basic operations
- **Skill** (this directory): Teaches Claude the patterns, philosophy, and decision-making for effective beads usage
You can use both together for the best experience:
- Plugin for quick operations
- Skill for intelligent workflow decisions
### Why CLI Instead of MCP?
This skill teaches Claude to use the bd CLI directly (via Bash commands like `bd ready`, `bd create`, etc.) rather than relying on MCP tools. This approach has several benefits:
- **Lower context usage** - No MCP server prompt loaded into every session, saving tokens
- **Works everywhere** - Only requires bd binary installed, no MCP server setup needed
- **Explicit operations** - All bd commands visible in conversation history for transparency
- **Full functionality** - CLI supports `--json` flag for programmatic parsing just like MCP
The MCP server is excellent for interactive use, but for autonomous agent workflows where context efficiency matters, direct CLI usage is more practical. The skill provides the guidance Claude needs to use the CLI effectively.
## Contributing
Found ways to improve the skill? Contributions welcome! See [CONTRIBUTING.md](../../CONTRIBUTING.md) for guidelines.
## License
Same as beads - MIT License. See [LICENSE](../../LICENSE).
```
--------------------------------------------------------------------------------
/examples/claude-desktop-mcp/README.md:
--------------------------------------------------------------------------------
```markdown
# Claude Desktop MCP Server for Beads
> **Note**: The beads MCP server is now fully implemented! See [integrations/beads-mcp](../../integrations/beads-mcp/) for the production implementation.
> **Recommendation**: For environments with shell access (Claude Code, Cursor, Windsurf), use **CLI + hooks** instead of MCP. It uses ~1-2k tokens vs 10-50k for MCP schemas, resulting in lower compute cost and latency. **Use MCP only for MCP-only environments** like Claude Desktop where CLI is unavailable.
## What This Provides
An MCP server that exposes bd functionality to Claude Desktop and other MCP clients, allowing Claude to:
- Query ready work
- Create and update issues
- Manage dependencies
- Track discovered work
## Quick Start
Install the beads MCP server:
```bash
# Using uv (recommended)
uv tool install beads-mcp
# Or using pip
pip install beads-mcp
```
Add to your Claude Desktop config (`~/Library/Application Support/Claude/claude_desktop_config.json` on macOS):
```json
{
"mcpServers": {
"beads": {
"command": "beads-mcp"
}
}
}
```
Restart Claude Desktop and you're done! Claude can now manage your beads issues.
## Full Documentation
See the [beads-mcp README](../../integrations/beads-mcp/README.md) for:
- Installation instructions
- Configuration options
- Environment variables
- Development guide
---
## Original Design Documentation (Historical)
## Planned Features
```typescript
// MCP server will expose these tools to Claude:
// Find ready work
{
"name": "beads_ready_work",
"description": "Find issues with no blocking dependencies",
"parameters": {
"limit": "number",
"priority": "number (0-4)",
"assignee": "string"
}
}
// Create issue
{
"name": "beads_create_issue",
"description": "Create a new issue",
"parameters": {
"title": "string",
"description": "string",
"priority": "number (0-4)",
"type": "bug|feature|task|epic|chore"
}
}
// Update issue
{
"name": "beads_update_issue",
"description": "Update issue status or fields",
"parameters": {
"id": "string",
"status": "open|in_progress|blocked|closed",
"priority": "number",
"assignee": "string"
}
}
// Add dependency
{
"name": "beads_add_dependency",
"description": "Link issues with dependencies",
"parameters": {
"from": "string (issue ID)",
"to": "string (issue ID)",
"type": "blocks|related|parent-child|discovered-from"
}
}
```
## How It Would Work
1. Install the MCP server for Claude Desktop
2. Claude gains access to bd commands
3. During coding sessions, Claude can:
- Check for ready work: "What should I work on next?"
- Create issues: "I found a bug, let me track it"
- Link discoveries: "This is related to the issue we're working on"
- Update status: "Marking bd-5 as completed"
## Example Conversation
```
User: What's ready to work on?
Claude: Let me check the ready work queue...
[Calls beads_ready_work()]
I found 3 ready issues:
1. bd-12: Fix authentication bug (priority 0, bug)
2. bd-15: Add dark mode toggle (priority 1, feature)
3. bd-18: Update documentation (priority 2, task)
Which would you like to work on?
User: Let's do bd-12
Claude: Great! I'll mark bd-12 as in progress.
[Calls beads_update_issue("bd-12", status="in_progress")]
Let me investigate the authentication code...
[Works on the issue]
I found the bug in auth.js:45 - there's a race condition. While fixing this, I noticed we're also missing rate limiting. Should I track that as a separate issue?
User: Yes please
Claude: [Calls beads_create_issue("Add rate limiting to auth endpoints", type="task", priority=1)]
Created bd-23: Add rate limiting to auth endpoints
[Calls beads_add_dependency("bd-23", "bd-12", type="discovered-from")]
Linked bd-23 as discovered from bd-12
Now I'll complete the auth bug fix...
```
## Implementation Guide
To implement this MCP server:
1. Create a Node.js/TypeScript project
2. Use the MCP SDK: `npm install @anthropic/mcp-sdk`
3. Implement tool handlers that call `bd` commands via child_process
4. Register with Claude Desktop's config
Example skeleton:
```typescript
import { Server } from "@anthropic/mcp-sdk";
import { exec } from "child_process";
import { promisify } from "util";
const execAsync = promisify(exec);
const server = new Server({
name: "beads",
version: "1.0.0"
});
// Register ready work tool
server.tool("beads_ready_work", async (params) => {
const { stdout } = await execAsync(
`bd ready --json --limit ${params.limit || 10}`
);
return JSON.parse(stdout);
});
// Register create issue tool
server.tool("beads_create_issue", async (params) => {
const { stdout } = await execAsync(
`bd create "${params.title}" -d "${params.description}" -p ${params.priority} -t ${params.type} --json`
);
return JSON.parse(stdout);
});
// ... more tools ...
server.start();
```
## Installation (Future)
```bash
# Install the MCP server
npm install -g beads-mcp-server
# Configure Claude Desktop
# Add to ~/Library/Application Support/Claude/claude_desktop_config.json
{
"mcpServers": {
"beads": {
"command": "beads-mcp-server",
"args": []
}
}
}
# Restart Claude Desktop
```
## Alternative: Direct bd Usage
Until the MCP server is available, you can instruct Claude to use bd directly:
```markdown
# In your CLAUDE.md or project instructions:
We use Beads (bd) for issue tracking. Available commands:
- `bd ready --json` - Find ready work
- `bd create "title" -p 1 -t bug --json` - Create issue
- `bd update bd-1 --status in_progress --json` - Update status
- `bd dep add bd-2 bd-1 --type discovered-from` - Link issues
- `bd close bd-1 --reason "Done" --json` - Complete work
All commands support --json for parsing. Please use bd to track work during our sessions.
```
## Contributing
Interested in building this MCP server? We welcome contributions!
See [CONTRIBUTING.md](../../CONTRIBUTING.md) for guidelines.
## See Also
- [MCP Documentation](https://docs.anthropic.com/claude/docs/model-context-protocol)
- [MCP SDK](https://github.com/anthropics/mcp-sdk)
- [Claude Desktop](https://claude.ai/desktop)
- [../python-agent/](../python-agent/) - Python implementation pattern
```
--------------------------------------------------------------------------------
/examples/bd-example-extension-go/README.md:
--------------------------------------------------------------------------------
```markdown
# BD Extension Example (Go)
This example demonstrates how to extend bd with custom tables for application-specific orchestration, following the patterns described in [EXTENDING.md](../../EXTENDING.md).
## What This Example Shows
1. **Schema Extension**: Adding custom tables (`example_executions`, `example_checkpoints`) to bd's SQLite database
2. **Foreign Key Integration**: Linking extension tables to bd's `issues` table with proper cascading
3. **Dual-Layer Access**: Using bd's Go API for issue management while directly querying extension tables
4. **Complex Queries**: Joining bd's issues with extension tables for powerful insights
5. **Execution Tracking**: Implementing agent assignment, checkpointing, and crash recovery patterns
## Key Patterns Illustrated
### Pattern 1: Namespace Your Tables
All tables are prefixed with `example_` to avoid conflicts:
```sql
CREATE TABLE example_executions (...)
CREATE TABLE example_checkpoints (...)
```
### Pattern 2: Foreign Key Relationships
Extension tables link to bd's issues with cascading deletes:
```sql
FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE
```
### Pattern 3: Index Common Queries
Indexes are created for frequent query patterns:
```sql
CREATE INDEX idx_executions_status ON example_executions(status);
CREATE INDEX idx_executions_issue ON example_executions(issue_id);
```
### Pattern 4: Layer Separation
- **bd layer**: Issue tracking, dependencies, ready work
- **Extension layer**: Execution state, agent assignments, checkpoints
### Pattern 5: Join Queries
Powerful queries join both layers:
```sql
SELECT i.id, i.title, i.priority, e.status, e.agent_id, COUNT(c.id)
FROM issues i
LEFT JOIN example_executions e ON i.id = e.issue_id
LEFT JOIN example_checkpoints c ON e.id = c.execution_id
GROUP BY i.id, e.id
```
## Building and Running
### Prerequisites
- Go 1.21 or later
- bd initialized in a directory (run `bd init --prefix demo`)
### Install
```bash
# Install from the repository
go install github.com/steveyegge/beads/examples/bd-example-extension-go@latest
# Or install from local source
cd examples/bd-example-extension-go
go install .
```
The binary will be installed as `bd-example-extension-go` in your `$GOPATH/bin` (or `$GOBIN` if set).
### Running
```bash
# Auto-discover database and run
bd-example-extension-go
# Or specify database path
bd-example-extension-go -db .beads/demo.db
```
**Output:**
```
Claiming: demo-5
✓ assess
✓ implement
✓ test
Status:
demo-4: Fix memory leak [closed] agent=agent-demo checkpoints=3
demo-1: Implement auth [in_progress] agent=agent-alice checkpoints=0
demo-5: Test minimized [closed] agent=demo-agent checkpoints=3
```
## Code Structure
**Just 116 lines total** - minimal, focused extension example.
- **main.go** (93 lines): Complete workflow with embedded schema
- **schema.sql** (23 lines): Extension tables (`example_executions`, `example_checkpoints`) with foreign keys and indexes
Demonstrates:
1. Auto-discover database (`beads.FindDatabasePath`)
2. Dual-layer access (bd API + direct SQL)
3. Execution tracking with checkpoints
4. Complex joined queries across layers
## Example Queries
### Find Running Executions with Checkpoint Count
```go
query := `
SELECT i.id, i.title, e.status, e.agent_id, COUNT(c.id) as checkpoints
FROM issues i
INNER JOIN example_executions e ON i.id = e.issue_id
LEFT JOIN example_checkpoints c ON e.id = c.execution_id
WHERE e.status = 'running'
GROUP BY i.id, e.id
`
```
### Find Failed Executions
```go
query := `
SELECT i.id, i.title, e.error, e.completed_at
FROM issues i
INNER JOIN example_executions e ON i.id = e.issue_id
WHERE e.status = 'failed'
ORDER BY e.completed_at DESC
`
```
### Get Latest Checkpoint for Recovery
```go
query := `
SELECT checkpoint_data
FROM example_checkpoints
WHERE execution_id = ?
ORDER BY created_at DESC
LIMIT 1
`
```
## Integration with bd
### Using bd's Go API
```go
// Auto-discover database path
dbPath := beads.FindDatabasePath()
if dbPath == "" {
log.Fatal("No bd database found")
}
// Open bd storage
store, err := beads.NewSQLiteStorage(dbPath)
// Find ready work
readyIssues, err := store.GetReadyWork(ctx, beads.WorkFilter{Limit: 10})
// Update issue status
updates := map[string]interface{}{"status": beads.StatusInProgress}
err = store.UpdateIssue(ctx, issueID, updates, "agent-name")
// Close issue
err = store.CloseIssue(ctx, issueID, "Completed", "agent-name")
// Find corresponding JSONL path (for git hooks, monitoring, etc.)
jsonlPath := beads.FindJSONLPath(dbPath)
```
### Direct Database Access
```go
// Open same database for extension tables
db, err := sql.Open("sqlite3", dbPath)
// Initialize extension schema
_, err = db.Exec(Schema)
// Query extension tables
rows, err := db.Query("SELECT * FROM example_executions WHERE status = ?", "running")
```
## Testing the Example
1. **Initialize bd:**
```bash
bd init --prefix demo
```
2. **Create some test issues:**
```bash
bd create "Implement authentication" -p 1 -t feature
bd create "Add API documentation" -p 1 -t task
bd create "Refactor database layer" -p 2 -t task
```
3. **Run the demo:**
```bash
bd-example-extension-go -cmd demo
```
4. **Check the results:**
```bash
bd list
sqlite3 .beads/demo.db "SELECT * FROM example_executions"
```
## Real-World Usage
This pattern is used in production by:
- **VC (VibeCoder)**: Multi-agent orchestration with state machines
- **CI/CD Systems**: Build tracking and artifact management
- **Task Runners**: Parallel execution with dependency resolution
See [EXTENDING.md](../../EXTENDING.md) for more patterns and the VC implementation example.
## Next Steps
1. **Add Your Own Tables**: Extend the schema with application-specific tables
2. **Implement State Machines**: Use checkpoints for resumable workflows
3. **Add Metrics**: Track execution times, retry counts, success rates
4. **Build Dashboards**: Query joined data for visibility
5. **Integrate with Agents**: Use bd's ready work queue for agent orchestration
## See Also
- [EXTENDING.md](../../EXTENDING.md) - Complete extension guide
- [../../README.md](../../README.md) - bd documentation
- Run `bd quickstart` for an interactive tutorial
```
--------------------------------------------------------------------------------
/examples/monitor-webui/README.md:
--------------------------------------------------------------------------------
```markdown
# Monitor WebUI - Real-time Issue Tracking Dashboard
A standalone web-based monitoring interface for beads that provides real-time issue tracking through a clean, responsive web UI.
## Overview
The Monitor WebUI is a separate runtime that connects to the beads daemon via RPC to provide:
- **Real-time updates** via WebSocket connections
- **Responsive design** with desktop table view and mobile card view
- **Issue filtering** by status and priority
- **Statistics dashboard** showing issue counts by status
- **Detailed issue views** with full metadata
- **Clean, modern UI** styled with Milligram CSS
## Architecture
The Monitor WebUI demonstrates how to build custom interfaces on top of beads using:
- **RPC Protocol**: Connects to the daemon's Unix socket for database operations
- **WebSocket Broadcasting**: Polls mutation events and broadcasts to connected clients
- **Embedded Web Assets**: HTML, CSS, and JavaScript served from the binary
- **Standalone Binary**: Runs independently from the `bd` CLI
## Prerequisites
Before running the monitor, you must have:
1. A beads database initialized (run `bd init` in your project)
2. The beads daemon running (run `bd daemon`)
## Building
From this directory:
```bash
go build
```
Or using bun (if available):
```bash
bun run go build
```
This creates a `monitor-webui` binary in the current directory.
## Usage
### Basic Usage
Start the monitor on default port 8080:
```bash
./monitor-webui
```
Then open your browser to http://localhost:8080
### Custom Port
Start on a different port:
```bash
./monitor-webui -port 3000
```
### Bind to All Interfaces
To access from other machines on your network:
```bash
./monitor-webui -host 0.0.0.0 -port 8080
```
### Custom Database Path
If your database is not in the current directory:
```bash
./monitor-webui -db /path/to/your/beads.db
```
### Custom Socket Path
If you need to specify a custom daemon socket:
```bash
./monitor-webui -socket /path/to/beads.db.sock
```
## Command-Line Flags
- `-port` - Port for web server (default: 8080)
- `-host` - Host to bind to (default: "localhost")
- `-db` - Path to beads database (optional, will auto-detect)
- `-socket` - Path to daemon socket (optional, will auto-detect)
## API Endpoints
The monitor exposes several HTTP endpoints:
### Web UI
- `GET /` - Main HTML interface
- `GET /static/*` - Static assets (CSS, JavaScript)
### REST API
- `GET /api/issues` - List all issues as JSON
- `GET /api/issues/:id` - Get specific issue details
- `GET /api/ready` - Get ready work (no blockers)
- `GET /api/stats` - Get issue statistics
### WebSocket
- `WS /ws` - WebSocket endpoint for real-time updates
## Features
### Real-time Updates
The monitor polls the daemon every 2 seconds for mutation events and broadcasts them to all connected WebSocket clients. This provides instant updates when issues are created, modified, or closed.
### Responsive Design
- **Desktop**: Full table view with sortable columns
- **Mobile**: Card-based view optimized for small screens
- **Tablet**: Adapts to medium screen sizes
### Filtering
- **Status Filter**: Multi-select for Open, In Progress, and Closed
- **Priority Filter**: Single-select for P1, P2, P3, or All
### Statistics
Real-time statistics showing:
- Total issues
- In-progress issues
- Open issues
- Closed issues
## Development
### Project Structure
```
monitor-webui/
├── main.go # Main application with HTTP server and RPC client
├── go.mod # Go module dependencies
├── go.sum # (generated) Dependency checksums
├── README.md # This file
└── web/ # Web assets (embedded in binary)
├── index.html # Main HTML page
└── static/
├── css/
│ └── styles.css # Custom styles
└── js/
└── app.js # JavaScript application logic
```
### Modifying the Web Assets
The HTML, CSS, and JavaScript files are embedded into the binary using Go's `embed` package. After making changes to files in the `web/` directory, rebuild the binary to see your changes.
### Extending the API
To add new API endpoints:
1. Define a new handler function in `main.go`
2. Register it with `http.HandleFunc()` in the `main()` function
3. Use `daemonClient` to make RPC calls to the daemon
4. Return JSON responses using `json.NewEncoder(w).Encode()`
## Deployment
### As a Standalone Service
You can run the monitor as a systemd service. Example service file:
```ini
[Unit]
Description=Beads Monitor WebUI
After=network.target
[Service]
Type=simple
User=youruser
WorkingDirectory=/path/to/your/project
ExecStart=/path/to/monitor-webui -host 0.0.0.0 -port 8080
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target
```
Save as `/etc/systemd/system/beads-monitor.service` and enable:
```bash
sudo systemctl enable beads-monitor
sudo systemctl start beads-monitor
```
### Behind a Reverse Proxy
Example nginx configuration:
```nginx
server {
listen 80;
server_name monitor.example.com;
location / {
proxy_pass http://localhost:8080;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
}
}
```
## Troubleshooting
### "No beads database found"
Make sure you've initialized a beads database with `bd init` or specify the database path with `-db`.
### "Daemon is not running"
The monitor requires the daemon to avoid SQLite locking conflicts. Start the daemon first:
```bash
bd daemon
```
### WebSocket disconnects frequently
Check if there's a reverse proxy or firewall between the client and server that might be closing idle connections. Consider adjusting timeout settings.
### Port already in use
If port 8080 is already in use, specify a different port:
```bash
./monitor-webui -port 3001
```
## Security Considerations
### Production Deployment
When deploying to production:
1. **Restrict Origins**: Update the `CheckOrigin` function in `main.go` to validate WebSocket origins
2. **Use HTTPS**: Deploy behind a reverse proxy with TLS (nginx, Caddy, etc.)
3. **Authentication**: Add authentication middleware if exposing publicly
4. **Firewall**: Use firewall rules to restrict access to trusted networks
### Current Security Model
The current implementation:
- Allows WebSocket connections from any origin
- Provides read-only access to issue data
- Does not include authentication
- Connects to local daemon socket only
This is appropriate for local development but requires additional security measures for production use.
## License
Same as the main beads project.
```
--------------------------------------------------------------------------------
/examples/team-workflow/README.md:
--------------------------------------------------------------------------------
```markdown
# Team Workflow Example
This example demonstrates how to use beads for team collaboration with shared repositories.
## Problem
When working as a team on a shared repository, you want to:
- Track issues collaboratively
- Keep everyone in sync via git
- Handle protected main branches
- Maintain clean git history
## Solution
Use `bd init --team` to set up team collaboration with automatic sync and optional protected branch support.
## Setup
### Step 1: Initialize Team Workflow
```bash
# In your shared repository
cd my-project
# Run the team setup wizard
bd init --team
```
The wizard will:
1. ✅ Detect your git configuration
2. ✅ Ask if main branch is protected
3. ✅ Configure sync branch (if needed)
4. ✅ Set up automatic sync
5. ✅ Enable team mode
### Step 2: Protected Branch Configuration
If your main branch is protected (GitHub/GitLab), the wizard will:
- Create a separate `beads-metadata` branch for issue updates
- Configure beads to commit to this branch automatically
- Set up periodic PR workflow for merging to main
### Step 3: Team Members Join
Other team members just need to:
```bash
# Clone the repository
git clone https://github.com/org/project.git
cd project
# Initialize beads (auto-imports existing issues)
bd init
# Start working!
bd ready
```
## How It Works
### Direct Commits (No Protected Branch)
If main isn't protected:
```bash
# Create issue
bd create "Implement feature X" -p 1
# Daemon auto-commits to main
# (or run 'bd sync' manually)
# Pull to see team's issues
git pull
bd list
```
### Protected Branch Workflow
If main is protected:
```bash
# Create issue
bd create "Implement feature X" -p 1
# Daemon commits to beads-metadata branch
# (or run 'bd sync' manually)
# Push beads-metadata
git push origin beads-metadata
# Periodically: merge beads-metadata to main via PR
```
## Configuration
The wizard configures:
```yaml
team:
enabled: true
sync_branch: beads-metadata # or main if not protected
daemon:
auto_commit: true
auto_push: true
```
### Manual Configuration
```bash
# Enable team mode
bd config set team.enabled true
# Set sync branch
bd config set team.sync_branch beads-metadata
# Enable auto-sync
bd config set daemon.auto_commit true
bd config set daemon.auto_push true
```
## Example Workflows
### Scenario 1: Unprotected Main
```bash
# Alice creates an issue
bd create "Fix authentication bug" -p 1
# Daemon commits and pushes to main
# (auto-sync enabled)
# Bob pulls changes
git pull
bd list # Sees Alice's issue
# Bob claims it
bd update bd-abc --status in_progress
# Daemon commits Bob's update
# Alice pulls and sees Bob is working on it
```
### Scenario 2: Protected Main
```bash
# Alice creates an issue
bd create "Add new API endpoint" -p 1
# Daemon commits to beads-metadata
git push origin beads-metadata
# Bob pulls beads-metadata
git pull origin beads-metadata
bd list # Sees Alice's issue
# Later: merge beads-metadata to main via PR
git checkout main
git pull origin main
git merge beads-metadata
# Create PR, get approval, merge
```
## Team Workflows
### Daily Standup
```bash
# See what everyone's working on
bd list --status in_progress
# See what's ready for work
bd ready
# See recently closed issues
bd list --status closed --limit 10
```
### Sprint Planning
```bash
# Create sprint issues
bd create "Implement user auth" -p 1
bd create "Add profile page" -p 1
bd create "Fix responsive layout" -p 2
# Assign to team members
bd update bd-abc --assignee alice
bd update bd-def --assignee bob
# Track dependencies
bd dep add bd-def bd-abc --type blocks
```
### PR Integration
```bash
# Create issue for PR work
bd create "Refactor auth module" -p 1
# Work on it
bd update bd-abc --status in_progress
# Open PR with issue reference
git push origin feature-branch
# PR title: "feat: refactor auth module (bd-abc)"
# Close when PR merges
bd close bd-abc --reason "PR #123 merged"
```
## Sync Strategies
### Auto-Sync (Recommended)
Daemon commits and pushes automatically:
```bash
bd daemon start --auto-commit --auto-push
```
Benefits:
- ✅ Always in sync
- ✅ No manual intervention
- ✅ Real-time collaboration
### Manual Sync
Sync when you want:
```bash
bd sync # Export, commit, pull, import, push
```
Benefits:
- ✅ Full control
- ✅ Batch updates
- ✅ Review before push
## Conflict Resolution
Hash-based IDs prevent most conflicts. If conflicts occur:
```bash
# During git pull/merge
git pull origin beads-metadata
# CONFLICT in .beads/beads.jsonl
# Option 1: Accept remote
git checkout --theirs .beads/beads.jsonl
bd import -i .beads/beads.jsonl
# Option 2: Accept local
git checkout --ours .beads/beads.jsonl
bd import -i .beads/beads.jsonl
# Option 3: Use beads-merge tool (recommended)
# See docs/GIT_INTEGRATION.md for merge conflict resolution
git add .beads/beads.jsonl
git commit
```
## Protected Branch Best Practices
### For Protected Main:
1. **Create beads-metadata branch**
```bash
git checkout -b beads-metadata
git push origin beads-metadata
```
2. **Configure protection rules**
- Allow direct pushes to beads-metadata
- Require PR for main
3. **Periodic PR workflow**
```bash
# Once per day/sprint
git checkout main
git pull origin main
git checkout beads-metadata
git pull origin beads-metadata
git checkout main
git merge beads-metadata
# Create PR, get approval, merge
```
4. **Keep beads-metadata clean**
```bash
# After PR merges
git checkout beads-metadata
git rebase main
git push origin beads-metadata --force-with-lease
```
## Common Questions
### Q: How do team members see each other's issues?
A: Issues are stored in `.beads/beads.jsonl` which is version-controlled. Pull from git to sync.
```bash
git pull
bd list # See everyone's issues
```
### Q: What if two people create issues at the same time?
A: Hash-based IDs prevent collisions. Even if created simultaneously, they get different IDs.
### Q: How do I disable auto-sync?
A: Turn it off:
```bash
bd config set daemon.auto_commit false
bd config set daemon.auto_push false
# Sync manually
bd sync
```
### Q: Can we use different sync branches per person?
A: Not recommended. Use a single shared branch for consistency. If needed:
```bash
bd config set sync.branch my-custom-branch
```
### Q: What about CI/CD integration?
A: Add to your CI pipeline:
```bash
# In .github/workflows/main.yml
- name: Sync beads issues
run: |
bd sync
git push origin beads-metadata
```
## Troubleshooting
### Issue: Daemon not committing
Check daemon status:
```bash
bd daemon status
bd daemons list
```
Verify config:
```bash
bd config get daemon.auto_commit
bd config get daemon.auto_push
```
Restart daemon:
```bash
bd daemon stop
bd daemon start --auto-commit --auto-push
```
### Issue: Merge conflicts in JSONL
Use beads-merge or resolve manually (see [GIT_INTEGRATION.md](../../docs/GIT_INTEGRATION.md)):
```bash
git checkout --theirs .beads/beads.jsonl
bd import -i .beads/beads.jsonl
git add .beads/beads.jsonl
git commit
```
### Issue: Issues not syncing
Manually sync:
```bash
bd sync
git push
```
Check for conflicts:
```bash
git status
bd validate --checks=conflicts
```
## See Also
- [Protected Branch Setup](../protected-branch/)
- [Contributor Workflow](../contributor-workflow/)
- [Multi-Repo Migration Guide](../../docs/MULTI_REPO_MIGRATION.md)
- [Git Integration Guide](../../docs/GIT_INTEGRATION.md)
```
--------------------------------------------------------------------------------
/examples/github-import/README.md:
--------------------------------------------------------------------------------
```markdown
# GitHub Issues to bd Importer
Import issues from GitHub repositories into `bd`.
## Overview
This tool converts GitHub Issues to bd's JSONL format, supporting both:
1. **GitHub API** - Fetch issues directly from a repository
2. **JSON Export** - Parse manually exported GitHub issues
## Features
- ✅ **Fetch from GitHub API** - Direct import from any public/private repo
- ✅ **JSON file import** - Parse exported GitHub issues JSON
- ✅ **Label mapping** - Auto-map GitHub labels to bd priority/type
- ✅ **Preserve metadata** - Keep assignees, timestamps, descriptions
- ✅ **Cross-references** - Convert `#123` references to dependencies
- ✅ **External links** - Preserve URLs back to original GitHub issues
- ✅ **Filter PRs** - Automatically excludes pull requests
## Installation
No dependencies required! Uses Python 3 standard library.
For API access, set up a GitHub token:
```bash
# Create token at: https://github.com/settings/tokens
# Permissions needed: public_repo (or repo for private repos)
export GITHUB_TOKEN=ghp_your_token_here
```
**Security Note:** Use the `GITHUB_TOKEN` environment variable instead of `--token` flag when possible. The `--token` flag may appear in shell history and process listings.
## Usage
### From GitHub API
```bash
# Fetch all issues from a repository
python gh2jsonl.py --repo owner/repo | bd import
# Save to file first (recommended)
python gh2jsonl.py --repo owner/repo > issues.jsonl
bd import -i issues.jsonl --dry-run # Preview
bd import -i issues.jsonl # Import
# Fetch only open issues
python gh2jsonl.py --repo owner/repo --state open
# Fetch only closed issues
python gh2jsonl.py --repo owner/repo --state closed
```
### From JSON File
Export issues from GitHub (via API or manually), then:
```bash
# Single issue
curl -H "Authorization: token $GITHUB_TOKEN" \
https://api.github.com/repos/owner/repo/issues/123 > issue.json
python gh2jsonl.py --file issue.json | bd import
# Multiple issues
curl -H "Authorization: token $GITHUB_TOKEN" \
https://api.github.com/repos/owner/repo/issues > issues.json
python gh2jsonl.py --file issues.json | bd import
```
### Custom Options
```bash
# Use custom prefix (instead of 'bd')
python gh2jsonl.py --repo owner/repo --prefix myproject
# Start numbering from specific ID
python gh2jsonl.py --repo owner/repo --start-id 100
# Pass token directly (instead of env var)
python gh2jsonl.py --repo owner/repo --token ghp_...
```
## Label Mapping
The script maps GitHub labels to bd fields:
### Priority Mapping
| GitHub Labels | bd Priority |
|--------------|-------------|
| `critical`, `p0`, `urgent` | 0 (Critical) |
| `high`, `p1`, `important` | 1 (High) |
| (default) | 2 (Medium) |
| `low`, `p3`, `minor` | 3 (Low) |
| `backlog`, `p4`, `someday` | 4 (Backlog) |
### Type Mapping
| GitHub Labels | bd Type |
|--------------|---------|
| `bug`, `defect` | bug |
| `feature`, `enhancement` | feature |
| `epic`, `milestone` | epic |
| `chore`, `maintenance`, `dependencies` | chore |
| (default) | task |
### Status Mapping
| GitHub State | GitHub Labels | bd Status |
|-------------|---------------|-----------|
| closed | (any) | closed |
| open | `in progress`, `in-progress`, `wip` | in_progress |
| open | `blocked` | blocked |
| open | (default) | open |
### Labels
All other labels are preserved in the `labels` field. Labels used for mapping (priority, type, status) are filtered out to avoid duplication.
## Field Mapping
| GitHub Field | bd Field | Notes |
|--------------|----------|-------|
| `number` | (internal mapping) | GH#123 → bd-1, etc. |
| `title` | `title` | Direct copy |
| `body` | `description` | Direct copy |
| `state` | `status` | See status mapping |
| `labels` | `priority`, `issue_type`, `labels` | See label mapping |
| `assignee.login` | `assignee` | First assignee only |
| `created_at` | `created_at` | ISO 8601 timestamp |
| `updated_at` | `updated_at` | ISO 8601 timestamp |
| `closed_at` | `closed_at` | ISO 8601 timestamp |
| `html_url` | `external_ref` | Link back to GitHub |
## Cross-References
Issue references in the body text are converted to dependencies:
**GitHub:**
```markdown
This depends on #123 and fixes #456.
See also owner/other-repo#789.
```
**Result:**
- If GH#123 was imported, creates `related` dependency to its bd ID
- If GH#456 was imported, creates `related` dependency to its bd ID
- Cross-repo references (#789) are ignored (unless those issues were also imported)
**Note:** Dependency records use `"issue_id": ""` format, which the bd importer automatically fills. This matches the behavior of the markdown-to-jsonl converter.
## Examples
### Example 1: Import Active Issues
```bash
# Import only open issues for active work
export GITHUB_TOKEN=ghp_...
python gh2jsonl.py --repo mycompany/myapp --state open > open-issues.jsonl
# Preview
cat open-issues.jsonl | jq .
# Import
bd import -i open-issues.jsonl
bd ready # See what's ready to work on
```
### Example 2: Full Repository Migration
```bash
# Import all issues (open and closed)
python gh2jsonl.py --repo mycompany/myapp > all-issues.jsonl
# Preview import (check for new issues and updates)
bd import -i all-issues.jsonl --dry-run
# Import issues
bd import -i all-issues.jsonl
# View stats
bd stats
```
### Example 3: Partial Import from JSON
```bash
# Manually export specific issues via GitHub API
gh api repos/owner/repo/issues?labels=p1,bug > high-priority-bugs.json
# Import
python gh2jsonl.py --file high-priority-bugs.json | bd import
```
## Customization
The script is intentionally simple to customize for your workflow:
### 1. Adjust Label Mappings
Edit `map_priority()`, `map_issue_type()`, and `map_status()` to match your label conventions:
```python
def map_priority(self, labels: List[str]) -> int:
label_names = [label.get("name", "").lower() if isinstance(label, dict) else label.lower() for label in labels]
# Add your custom mappings
if any(l in label_names for l in ["sev1", "emergency"]):
return 0
# ... etc
```
### 2. Add Custom Fields
Map additional GitHub fields to bd:
```python
def convert_issue(self, gh_issue: Dict[str, Any]) -> Dict[str, Any]:
# ... existing code ...
# Add milestone to design field
if gh_issue.get("milestone"):
issue["design"] = f"Milestone: {gh_issue['milestone']['title']}"
return issue
```
### 3. Enhanced Dependency Detection
Parse more dependency patterns from body text:
```python
def extract_dependencies_from_body(self, body: str) -> List[str]:
# ... existing code ...
# Add: "Blocks: #123, #456"
blocks_pattern = r'Blocks:\s*((?:#\d+(?:\s*,\s*)?)+)'
# ... etc
```
## Limitations
- **Single assignee**: GitHub supports multiple assignees, bd supports one
- **No milestones**: GitHub milestones aren't mapped (consider using design field)
- **Simple cross-refs**: Only basic `#123` patterns detected
- **No comments**: Issue comments aren't imported (only the body)
- **No reactions**: GitHub reactions/emoji aren't imported
- **No projects**: GitHub project board info isn't imported
## API Rate Limits
GitHub API has rate limits:
- **Authenticated**: 5,000 requests/hour
- **Unauthenticated**: 60 requests/hour
This script uses 1 request per 100 issues (pagination), so:
- Can fetch ~500,000 issues/hour (authenticated)
- Can fetch ~6,000 issues/hour (unauthenticated)
For large repositories (>1000 issues), authentication is recommended.
**Note:** The script automatically includes a `User-Agent` header (required by GitHub) and provides actionable error messages when rate limits are exceeded, including the reset timestamp.
## Troubleshooting
### "GitHub token required"
Set the `GITHUB_TOKEN` environment variable:
```bash
export GITHUB_TOKEN=ghp_your_token_here
```
Or pass directly:
```bash
python gh2jsonl.py --repo owner/repo --token ghp_...
```
### "GitHub API error: 404"
- Check repository name format: `owner/repo`
- Check repository exists and is accessible
- For private repos, ensure token has `repo` scope
### "GitHub API error: 403"
- Rate limit exceeded (wait or use authentication)
- Token doesn't have required permissions
- Repository requires different permissions
### Issue numbers don't match
This is expected! GitHub issue numbers (e.g., #123) are mapped to bd IDs (e.g., bd-1) based on import order. The original GitHub URL is preserved in `external_ref`.
## See Also
- [bd README](../../README.md) - Main documentation
- [Markdown Import Example](../markdown-to-jsonl/) - Import from markdown
- [TEXT_FORMATS.md](../../TEXT_FORMATS.md) - Understanding bd's JSONL format
- [JSONL Import Guide](../../README.md#import) - Import collision handling
```
--------------------------------------------------------------------------------
/integrations/beads-mcp/README.md:
--------------------------------------------------------------------------------
```markdown
# beads-mcp
MCP server for [beads](https://github.com/steveyegge/beads) issue tracker and agentic memory system.
Enables AI agents to manage tasks using bd CLI through Model Context Protocol.
> **Note:** For environments with shell access (Claude Code, Cursor, Windsurf), the **CLI + hooks approach is recommended** over MCP. It uses ~1-2k tokens vs 10-50k for MCP schemas, resulting in lower compute cost and latency. See the [main README](../../README.md) for CLI setup.
>
> **Use this MCP server** for MCP-only environments like Claude Desktop where CLI access is unavailable.
## Installing
Install from PyPI:
```bash
# Using uv (recommended)
uv tool install beads-mcp
# Or using pip
pip install beads-mcp
```
Add to your Claude Desktop config:
```json
{
"mcpServers": {
"beads": {
"command": "beads-mcp"
}
}
}
```
### Development Installation
For development, clone the repository:
```bash
git clone https://github.com/steveyegge/beads
cd beads/integrations/beads-mcp
uv sync
```
Then use in Claude Desktop config:
```json
{
"mcpServers": {
"beads": {
"command": "uv",
"args": [
"--directory",
"/path/to/beads-mcp",
"run",
"beads-mcp"
]
}
}
}
```
**Environment Variables** (all optional):
- `BEADS_USE_DAEMON` - Use daemon RPC instead of CLI (default: `1`, set to `0` to disable)
- `BEADS_PATH` - Path to bd executable (default: `~/.local/bin/bd`)
- `BEADS_DB` - Path to beads database file (default: auto-discover from cwd)
- `BEADS_WORKING_DIR` - Working directory for bd commands (default: `$PWD` or current directory). Used for multi-repo setups - see below
- `BEADS_ACTOR` - Actor name for audit trail (default: `$USER`)
- `BEADS_NO_AUTO_FLUSH` - Disable automatic JSONL sync (default: `false`)
- `BEADS_NO_AUTO_IMPORT` - Disable automatic JSONL import (default: `false`)
## Multi-Repository Setup
**Recommended:** Use a single MCP server instance for all beads projects - it automatically routes to per-project local daemons.
### Single MCP Server (Recommended)
**Simple config - works for all projects:**
```json
{
"mcpServers": {
"beads": {
"command": "beads-mcp"
}
}
}
```
**How it works (LSP model):**
1. MCP server checks for local daemon socket (`.beads/bd.sock`) in your current workspace
2. Routes requests to the **per-project daemon** based on working directory
3. Auto-starts the local daemon if not running
4. **Each project gets its own isolated daemon** serving only its database
**Architecture:**
```
MCP Server (one instance)
↓
Per-Project Daemons (one per workspace)
↓
SQLite Databases (complete isolation)
```
**Why per-project daemons?**
- ✅ Complete database isolation between projects
- ✅ No cross-project pollution or git worktree conflicts
- ✅ Simpler mental model: one project = one database = one daemon
- ✅ Follows LSP (Language Server Protocol) architecture
- ✅ One MCP config works for unlimited projects
**Note:** Global daemon support was removed in v0.16.0 to prevent cross-project database pollution.
### Alternative: Per-Project MCP Instances (Not Recommended)
Configure separate MCP servers for specific projects using `BEADS_WORKING_DIR`:
```json
{
"mcpServers": {
"beads-webapp": {
"command": "beads-mcp",
"env": {
"BEADS_WORKING_DIR": "/Users/yourname/projects/webapp"
}
},
"beads-api": {
"command": "beads-mcp",
"env": {
"BEADS_WORKING_DIR": "/Users/yourname/projects/api"
}
}
}
}
```
⚠️ **Problem**: AI may select the wrong MCP server for your workspace, causing commands to operate on the wrong database. Use single MCP server instead.
## Multi-Project Support
The MCP server supports managing multiple beads projects in a single session using per-request workspace routing.
### Using `workspace_root` Parameter
Every tool accepts an optional `workspace_root` parameter for explicit project targeting:
```python
# Query issues from different projects concurrently
results = await asyncio.gather(
beads_ready_work(workspace_root="/Users/you/project-a"),
beads_ready_work(workspace_root="/Users/you/project-b"),
)
# Create issue in specific project
await beads_create_issue(
title="Fix auth bug",
priority=1,
workspace_root="/Users/you/project-a"
)
```
### Architecture
**Connection Pool**: The MCP server maintains a connection pool keyed by canonical workspace path:
- Each workspace gets its own daemon socket connection
- Paths are canonicalized (symlinks resolved, git toplevel detected)
- Concurrent requests use `asyncio.Lock` to prevent race conditions
- No LRU eviction (keeps all connections open for session)
**ContextVar Routing**: Per-request workspace context is managed via Python's `ContextVar`:
- Each tool call sets the workspace for its duration
- Properly isolated for concurrent calls (no cross-contamination)
- Falls back to `BEADS_WORKING_DIR` if `workspace_root` not provided
**Path Canonicalization**:
- Symlinks are resolved to physical paths (prevents duplicate connections)
- Git submodules with `.beads` directories use local context
- Git toplevel is used for non-initialized directories
- Results are cached for performance
### Backward Compatibility
The `set_context()` tool still works and sets a default workspace:
```python
# Old way (still supported)
await set_context(workspace_root="/Users/you/project-a")
await beads_ready_work() # Uses project-a
# New way (more flexible)
await beads_ready_work(workspace_root="/Users/you/project-a")
```
### Concurrency Gotchas
⚠️ **IMPORTANT**: Tool implementations must NOT spawn background tasks using `asyncio.create_task()`.
**Why?** ContextVar doesn't propagate to spawned tasks, which can cause cross-project data leakage.
**Solution**: Keep all tool logic synchronous or use sequential `await` calls.
### Troubleshooting
**Symlink aliasing**: Different paths to same project are deduplicated automatically via `realpath`.
**Submodule handling**: Submodules with their own `.beads` directory are treated as separate projects.
**Stale sockets**: Currently no health checks. Phase 2 will add retry-on-failure if monitoring shows need.
**Version mismatches**: Daemon version is auto-checked since v0.16.0. Mismatched daemons are automatically restarted.
## Features
**Resource:**
- `beads://quickstart` - Quickstart guide for using beads
**Tools (all support `workspace_root` parameter):**
- `init` - Initialize bd in current directory
- `create` - Create new issue (bug, feature, task, epic, chore)
- `list` - List issues with filters (status, priority, type, assignee)
- `ready` - Find tasks with no blockers ready to work on
- `show` - Show detailed issue info including dependencies
- `update` - Update issue (status, priority, design, notes, etc). Note: `status="closed"` or `status="open"` automatically route to `close` or `reopen` tools to respect approval workflows
- `close` - Close completed issue
- `dep` - Add dependency (blocks, related, parent-child, discovered-from)
- `blocked` - Get blocked issues
- `stats` - Get project statistics
- `reopen` - Reopen a closed issue with optional reason
- `set_context` - Set default workspace for subsequent calls (backward compatibility)
## Known Issues
### ~~MCP Tools Not Loading in Claude Code~~ (Issue [#346](https://github.com/steveyegge/beads/issues/346)) - RESOLVED
**Status:** ✅ Fixed in v0.24.0+
This issue affected versions prior to v0.24.0. The problem was caused by self-referential Pydantic models (`Issue` with `dependencies: list["Issue"]`) generating invalid MCP schemas with `$ref` at root level.
**Solution:** The issue was fixed in commit f3a678f by refactoring the data models:
- Created `IssueBase` with common fields
- Created `LinkedIssue(IssueBase)` for dependency references
- Changed `Issue` to use `list[LinkedIssue]` instead of `list["Issue"]`
This breaks the circular reference and ensures all tool outputSchemas have `type: object` at root level.
**Upgrade:** If you're running beads-mcp < 0.24.0:
```bash
pip install --upgrade beads-mcp
```
All MCP tools now load correctly in Claude Code with v0.24.0+.
## Development
Run MCP inspector:
```bash
# inside beads-mcp dir
uv run fastmcp dev src/beads_mcp/server.py
```
Type checking:
```bash
uv run mypy src/beads_mcp
```
Linting and formatting:
```bash
uv run ruff check src/beads_mcp
uv run ruff format src/beads_mcp
```
## Testing
Run all tests:
```bash
uv run pytest
```
With coverage:
```bash
uv run pytest --cov=beads_mcp tests/
```
Test suite includes both mocked unit tests and integration tests with real `bd` CLI.
### Multi-Repo Integration Test
Test daemon RPC with multiple repositories:
```bash
# Start the daemon first
cd /path/to/beads
./bd daemon start
# Run multi-repo test
cd integrations/beads-mcp
uv run python test_multi_repo.py
```
This test verifies that the daemon can handle operations across multiple repositories simultaneously using per-request context routing.
```
--------------------------------------------------------------------------------
/examples/multi-phase-development/README.md:
--------------------------------------------------------------------------------
```markdown
# Multi-Phase Development Workflow Example
This example demonstrates how to use beads for large projects with multiple development phases (planning, MVP, iteration, polish).
## Problem
When building complex features, you want to:
- **Phase 1:** Research and planning
- **Phase 2:** Build MVP quickly
- **Phase 3:** Iterate based on feedback
- **Phase 4:** Polish and production-ready
- Track discovered work at each phase
- Keep priorities clear across phases
## Solution
Use beads epics and hierarchical issues to organize work by phase, with priority-based focus.
## Setup
```bash
# Initialize beads in your project
cd my-project
bd init
# Start daemon for auto-sync (optional)
bd daemon start --auto-commit --auto-push
```
## Phase 1: Research & Planning
Create the epic and initial planning issues:
```bash
# Create the main epic
bd create "Build real-time collaboration system" -t epic -p 1
# Returns: bd-a1b2c3
# Plan the phases (hierarchical children)
bd create "Phase 1: Research WebSocket libraries" -p 1
# Auto-assigned: bd-a1b2c3.1
bd create "Phase 2: Build MVP (basic sync)" -p 1
# Auto-assigned: bd-a1b2c3.2
bd create "Phase 3: Add conflict resolution" -p 2
# Auto-assigned: bd-a1b2c3.3
bd create "Phase 4: Production hardening" -p 3
# Auto-assigned: bd-a1b2c3.4
# Add blocking dependencies (phases must happen in order)
bd dep add bd-a1b2c3.2 bd-a1b2c3.1 --type blocks
bd dep add bd-a1b2c3.3 bd-a1b2c3.2 --type blocks
bd dep add bd-a1b2c3.4 bd-a1b2c3.3 --type blocks
```
### Research Phase Tasks
```bash
# Add research tasks for Phase 1
bd create "Evaluate Socket.IO vs native WebSockets" -p 1 \
--deps discovered-from:bd-a1b2c3.1
bd create "Research operational transform vs CRDT" -p 1 \
--deps discovered-from:bd-a1b2c3.1
bd create "Document technical decisions" -p 2 \
--deps discovered-from:bd-a1b2c3.1
# See what's ready to work on
bd ready
# Shows only Phase 1 tasks (nothing blocks them)
```
## Phase 2: Build MVP
After completing Phase 1 research:
```bash
# Close Phase 1
bd close bd-a1b2c3.1 --reason "Research complete, chose Socket.IO + CRDT"
# Phase 2 is now unblocked
bd ready
# Shows Phase 2 and its tasks
# Break down MVP work
bd create "Set up Socket.IO server" -p 1 \
--deps discovered-from:bd-a1b2c3.2
bd create "Implement basic CRDT for text" -p 1 \
--deps discovered-from:bd-a1b2c3.2
bd create "Build simple UI for testing" -p 2 \
--deps discovered-from:bd-a1b2c3.2
# Start implementing
bd update bd-xyz --status in_progress
```
### Discovered Work During MVP
You'll discover issues during implementation:
```bash
# Found a bug while implementing
bd create "Socket.IO disconnects on network change" -t bug -p 1 \
--deps discovered-from:bd-xyz
# Found missing feature
bd create "Need reconnection logic" -p 1 \
--deps discovered-from:bd-xyz
# Technical debt to address later
bd create "Refactor CRDT code for performance" -p 3 \
--deps discovered-from:bd-xyz
```
## Phase 3: Iteration
After MVP is working:
```bash
# Close Phase 2
bd close bd-a1b2c3.2 --reason "MVP working, tested with 2 users"
# Phase 3 is now unblocked
bd ready
# Add iteration tasks
bd create "Handle concurrent edits properly" -p 1 \
--deps discovered-from:bd-a1b2c3.3
bd create "Add conflict indicators in UI" -p 2 \
--deps discovered-from:bd-a1b2c3.3
bd create "Test with 10+ concurrent users" -p 1 \
--deps discovered-from:bd-a1b2c3.3
```
### Feedback-Driven Discovery
```bash
# User testing reveals issues
bd create "Cursor positions get out of sync" -t bug -p 0 \
--deps discovered-from:bd-a1b2c3.3
bd create "Large documents cause lag" -t bug -p 1 \
--deps discovered-from:bd-a1b2c3.3
# Feature requests
bd create "Add presence awareness (who's online)" -p 2 \
--deps discovered-from:bd-a1b2c3.3
```
## Phase 4: Production Hardening
Final polish before production:
```bash
# Close Phase 3
bd close bd-a1b2c3.3 --reason "Conflict resolution working well"
# Phase 4 is now unblocked
bd ready
# Add hardening tasks
bd create "Add error monitoring (Sentry)" -p 1 \
--deps discovered-from:bd-a1b2c3.4
bd create "Load test with 100 users" -p 1 \
--deps discovered-from:bd-a1b2c3.4
bd create "Security audit: XSS, injection" -p 0 \
--deps discovered-from:bd-a1b2c3.4
bd create "Write deployment runbook" -p 2 \
--deps discovered-from:bd-a1b2c3.4
bd create "Add metrics and dashboards" -p 2 \
--deps discovered-from:bd-a1b2c3.4
```
## Viewing Progress
### See All Phases
```bash
# View the entire dependency tree
bd dep tree bd-a1b2c3
# Example output:
# bd-a1b2c3 (epic) - Build real-time collaboration system
# ├─ bd-a1b2c3.1 [CLOSED] - Phase 1: Research
# │ ├─ bd-abc [CLOSED] - Evaluate Socket.IO
# │ ├─ bd-def [CLOSED] - Research CRDT
# │ └─ bd-ghi [CLOSED] - Document decisions
# ├─ bd-a1b2c3.2 [CLOSED] - Phase 2: MVP
# │ ├─ bd-jkl [CLOSED] - Socket.IO server
# │ ├─ bd-mno [CLOSED] - Basic CRDT
# │ └─ bd-pqr [IN_PROGRESS] - Testing UI
# ├─ bd-a1b2c3.3 [OPEN] - Phase 3: Iteration
# │ └─ (blocked by bd-a1b2c3.2)
# └─ bd-a1b2c3.4 [OPEN] - Phase 4: Hardening
# └─ (blocked by bd-a1b2c3.3)
```
### Current Phase Status
```bash
# See only open issues
bd list --status open
# See current phase's ready work
bd ready
# See high-priority issues across all phases
bd list --priority 0 --status open
bd list --priority 1 --status open
```
### Progress Metrics
```bash
# Overall stats
bd stats
# Issues by phase
bd list | grep "Phase 1"
bd list | grep "Phase 2"
```
## Priority Management Across Phases
### Dynamic Priority Adjustment
As you learn more, priorities change:
```bash
# Started as P2, but user feedback made it critical
bd update bd-xyz --priority 0
# Started as P1, but can wait until later phase
bd update bd-abc --priority 3
```
### Focus on Current Phase
```bash
# See only P0-P1 issues (urgent work)
bd ready | grep -E "P0|P1"
# See backlog for future phases (P3-P4)
bd list --priority 3 --status open
bd list --priority 4 --status open
```
## Example: Full Workflow
```bash
# Day 1: Planning
bd create "Build auth system" -t epic -p 1 # bd-a1b2
bd create "Phase 1: Research OAuth providers" -p 1 # bd-a1b2.1
bd create "Phase 2: Implement OAuth flow" -p 1 # bd-a1b2.2
bd create "Phase 3: Add session management" -p 2 # bd-a1b2.3
bd create "Phase 4: Security audit" -p 1 # bd-a1b2.4
bd dep add bd-a1b2.2 bd-a1b2.1 --type blocks
bd dep add bd-a1b2.3 bd-a1b2.2 --type blocks
bd dep add bd-a1b2.4 bd-a1b2.3 --type blocks
# Week 1: Phase 1 (Research)
bd ready # Shows Phase 1 tasks
bd create "Compare Auth0 vs Firebase" -p 1 --deps discovered-from:bd-a1b2.1
bd update bd-xyz --status in_progress
# ... research complete ...
bd close bd-a1b2.1 --reason "Chose Auth0"
# Week 2-3: Phase 2 (Implementation)
bd ready # Now shows Phase 2 tasks
bd create "Set up Auth0 tenant" -p 1 --deps discovered-from:bd-a1b2.2
bd create "Implement login callback" -p 1 --deps discovered-from:bd-a1b2.2
bd create "Handle token refresh" -p 1 --deps discovered-from:bd-a1b2.2
# ... discovered bugs ...
bd create "Callback fails on Safari" -t bug -p 0 --deps discovered-from:bd-abc
bd close bd-a1b2.2 --reason "OAuth flow working"
# Week 4: Phase 3 (Sessions)
bd ready # Shows Phase 3 tasks
bd create "Implement Redis session store" -p 1 --deps discovered-from:bd-a1b2.3
bd create "Add session timeout handling" -p 2 --deps discovered-from:bd-a1b2.3
bd close bd-a1b2.3 --reason "Sessions working"
# Week 5: Phase 4 (Security)
bd ready # Shows Phase 4 tasks
bd create "Review OWASP top 10" -p 1 --deps discovered-from:bd-a1b2.4
bd create "Add CSRF protection" -p 0 --deps discovered-from:bd-a1b2.4
bd create "Pen test with security team" -p 1 --deps discovered-from:bd-a1b2.4
bd close bd-a1b2.4 --reason "Security audit passed"
# Epic complete!
bd close bd-a1b2 --reason "Auth system in production"
```
## Best Practices
### 1. Keep Phases Focused
Each phase should have clear exit criteria:
```bash
# Good: Specific, measurable
bd create "Phase 1: Research (exit: chosen solution + ADR doc)" -p 1
# Bad: Vague
bd create "Phase 1: Look at stuff" -p 1
```
### 2. Use Priorities Within Phases
Not everything in a phase is equally urgent:
```bash
# Critical path
bd create "Implement core sync algorithm" -p 0 --deps discovered-from:bd-a1b2.2
# Nice to have, can wait
bd create "Add dark mode to test UI" -p 3 --deps discovered-from:bd-a1b2.2
```
### 3. Link Discovered Work
Always link to parent issue/phase:
```bash
# Maintains context
bd create "Bug found during testing" -t bug -p 1 \
--deps discovered-from:bd-a1b2.3
# Can trace back to which phase/feature it came from
bd dep tree bd-a1b2
```
### 4. Don't Block on Low-Priority Work
If a phase has P3-P4 issues, don't let them block the next phase:
```bash
# Move nice-to-haves to backlog, unblock Phase 2
bd update bd-xyz --priority 4
bd close bd-a1b2.1 --reason "Core research done, polish can wait"
```
### 5. Regular Review
Check progress weekly:
```bash
# What's done?
bd list --status closed --limit 20
# What's stuck?
bd list --status blocked
# What's ready?
bd ready
```
## Common Patterns
### MVP → Iteration Loop
```bash
# MVP phase
bd create "Phase 2: MVP (basic features)" -p 1
bd create "Phase 3: Iteration (feedback loop)" -p 2
bd dep add bd-phase3 bd-phase2 --type blocks
# After MVP, discover improvements
bd create "Add feature X (user requested)" -p 1 \
--deps discovered-from:bd-phase3
bd create "Fix UX issue Y" -p 2 \
--deps discovered-from:bd-phase3
```
### Parallel Workstreams
Not all phases must be sequential:
```bash
# Frontend and backend can happen in parallel
bd create "Frontend: Build UI mockups" -p 1
bd create "Backend: API design" -p 1
# No blocking dependency between them
# Both show up in 'bd ready'
```
### Rollback Planning
Plan for failure:
```bash
# Phase 3: Launch
bd create "Phase 3: Deploy to production" -p 1
# Contingency plan (related, not blocking)
bd create "Rollback plan if deploy fails" -p 1
bd dep add bd-rollback bd-phase3 --type related
```
## See Also
- [Team Workflow](../team-workflow/) - Collaborate across phases
- [Contributor Workflow](../contributor-workflow/) - External contributions
- [Multiple Personas Example](../multiple-personas/) - Architect/implementer split
```
--------------------------------------------------------------------------------
/examples/jira-import/README.md:
--------------------------------------------------------------------------------
```markdown
# Jira Integration for bd
Two-way synchronization between Jira and bd (beads).
## Scripts
| Script | Purpose |
|--------|---------|
| `jira2jsonl.py` | **Import** - Fetch Jira issues into bd |
| `jsonl2jira.py` | **Export** - Push bd issues to Jira |
## Overview
These tools enable bidirectional sync between Jira and bd:
**Import (Jira → bd):**
1. **Jira REST API** - Fetch issues directly from any Jira instance
2. **JSON Export** - Parse exported Jira issues JSON
3. **bd config integration** - Read credentials and mappings from `bd config`
**Export (bd → Jira):**
1. **Create issues** - Push new bd issues to Jira
2. **Update issues** - Sync changes to existing Jira issues
3. **Status transitions** - Handle Jira workflow transitions automatically
## Features
### Import (jira2jsonl.py)
- Fetch from Jira Cloud or Server/Data Center
- JQL query support for flexible filtering
- Configurable field mappings (status, priority, type)
- Preserve timestamps, assignees, labels
- Extract issue links as dependencies
- Set `external_ref` for re-sync capability
- Hash-based or sequential ID generation
### Export (jsonl2jira.py)
- Create new Jira issues from bd issues
- Update existing Jira issues (matched by `external_ref`)
- Handle Jira workflow transitions for status changes
- Reverse field mappings (bd → Jira)
- Dry-run mode for previewing changes
- Auto-update `external_ref` after creation
## Installation
No dependencies required! Uses Python 3 standard library.
## Quick Start
### Option 1: Using bd config (Recommended)
Set up your Jira credentials once:
```bash
# Required settings
bd config set jira.url "https://company.atlassian.net"
bd config set jira.project "PROJ"
bd config set jira.api_token "YOUR_API_TOKEN"
# For Jira Cloud, also set username (your email)
bd config set jira.username "[email protected]"
```
Then import:
```bash
python jira2jsonl.py --from-config | bd import
```
### Option 2: Using environment variables
```bash
export JIRA_API_TOKEN=your_token
export [email protected] # For Jira Cloud
python jira2jsonl.py \
--url https://company.atlassian.net \
--project PROJ \
| bd import
```
### Option 3: Command-line arguments
```bash
python jira2jsonl.py \
--url https://company.atlassian.net \
--project PROJ \
--username [email protected] \
--api-token YOUR_TOKEN \
| bd import
```
## Authentication
### Jira Cloud
Jira Cloud requires:
1. **Username**: Your email address
2. **API Token**: Create at https://id.atlassian.com/manage-profile/security/api-tokens
```bash
bd config set jira.username "[email protected]"
bd config set jira.api_token "your_api_token"
```
### Jira Server/Data Center
Jira Server/DC can use:
- **Personal Access Token (PAT)** - Just set the token, no username needed
- **Username + Password** - Set both username and password as the token
```bash
# Using PAT (recommended)
bd config set jira.api_token "your_pat_token"
# Using username/password
bd config set jira.username "your_username"
bd config set jira.api_token "your_password"
```
## Usage
### Basic Usage
```bash
# Fetch all issues from a project
python jira2jsonl.py --from-config | bd import
# Save to file first (recommended for large projects)
python jira2jsonl.py --from-config > issues.jsonl
bd import -i issues.jsonl --dry-run # Preview
bd import -i issues.jsonl # Import
```
### Filtering Issues
```bash
# Only open issues
python jira2jsonl.py --from-config --state open
# Only closed issues
python jira2jsonl.py --from-config --state closed
# Custom JQL query
python jira2jsonl.py --url https://company.atlassian.net \
--jql "project = PROJ AND priority = High AND status != Done"
```
### ID Generation Modes
```bash
# Sequential IDs (bd-1, bd-2, ...) - default
python jira2jsonl.py --from-config
# Hash-based IDs (bd-a3f2dd, ...) - matches bd create
python jira2jsonl.py --from-config --id-mode hash
# Custom hash length (3-8 chars)
python jira2jsonl.py --from-config --id-mode hash --hash-length 4
# Custom prefix
python jira2jsonl.py --from-config --prefix myproject
```
### From JSON File
If you have an exported JSON file:
```bash
python jira2jsonl.py --file issues.json | bd import
```
## Field Mapping
### Default Mappings
| Jira Field | bd Field | Notes |
|------------|----------|-------|
| `key` | (internal) | Used for dependency resolution |
| `summary` | `title` | Direct copy |
| `description` | `description` | Direct copy |
| `status.name` | `status` | Mapped via status_map |
| `priority.name` | `priority` | Mapped via priority_map |
| `issuetype.name` | `issue_type` | Mapped via type_map |
| `assignee` | `assignee` | Display name or username |
| `labels` | `labels` | Direct copy |
| `created` | `created_at` | ISO 8601 timestamp |
| `updated` | `updated_at` | ISO 8601 timestamp |
| `resolutiondate` | `closed_at` | ISO 8601 timestamp |
| (computed) | `external_ref` | URL to Jira issue |
| `issuelinks` | `dependencies` | Mapped to blocks/related |
| `parent` | `dependencies` | Mapped to parent-child |
### Status Mapping
Default status mappings (Jira status -> bd status):
| Jira Status | bd Status |
|-------------|-----------|
| To Do, Open, Backlog, New | `open` |
| In Progress, In Development, In Review | `in_progress` |
| Blocked, On Hold | `blocked` |
| Done, Closed, Resolved, Complete | `closed` |
Custom mappings via bd config:
```bash
bd config set jira.status_map.backlog "open"
bd config set jira.status_map.in_review "in_progress"
bd config set jira.status_map.on_hold "blocked"
```
### Priority Mapping
Default priority mappings (Jira priority -> bd priority 0-4):
| Jira Priority | bd Priority |
|---------------|-------------|
| Highest, Critical, Blocker | 0 (Critical) |
| High, Major | 1 (High) |
| Medium, Normal | 2 (Medium) |
| Low, Minor | 3 (Low) |
| Lowest, Trivial | 4 (Backlog) |
Custom mappings:
```bash
bd config set jira.priority_map.urgent "0"
bd config set jira.priority_map.nice_to_have "4"
```
### Issue Type Mapping
Default type mappings (Jira type -> bd type):
| Jira Type | bd Type |
|-----------|---------|
| Bug, Defect | `bug` |
| Story, Feature, Enhancement | `feature` |
| Task, Sub-task | `task` |
| Epic, Initiative | `epic` |
| Technical Task, Maintenance | `chore` |
Custom mappings:
```bash
bd config set jira.type_map.story "feature"
bd config set jira.type_map.spike "task"
bd config set jira.type_map.tech_debt "chore"
```
## Issue Links & Dependencies
Jira issue links are converted to bd dependencies:
| Jira Link Type | bd Dependency Type |
|----------------|-------------------|
| Blocks/Is blocked by | `blocks` |
| Parent (Epic/Story) | `parent-child` |
| All others | `related` |
**Note:** Only links to issues included in the import are preserved. Links to issues outside the query results are ignored.
## Re-syncing from Jira
Each imported issue has an `external_ref` field containing the Jira issue URL. On subsequent imports:
1. Issues are matched by `external_ref` first
2. If matched, the existing bd issue is updated (if Jira is newer)
3. If not matched, a new bd issue is created
This enables incremental sync:
```bash
# Initial import
python jira2jsonl.py --from-config | bd import
# Later: import only recent changes
python jira2jsonl.py --from-config \
--jql "project = PROJ AND updated >= -7d" \
| bd import
```
## Examples
### Example 1: Import Active Sprint
```bash
python jira2jsonl.py --url https://company.atlassian.net \
--jql "project = PROJ AND sprint in openSprints()" \
| bd import
bd ready # See what's ready to work on
```
### Example 2: Full Project Migration
```bash
# Export all issues
python jira2jsonl.py --from-config > all-issues.jsonl
# Preview import
bd import -i all-issues.jsonl --dry-run
# Import
bd import -i all-issues.jsonl
# View stats
bd stats
```
### Example 3: Sync High Priority Bugs
```bash
python jira2jsonl.py --from-config \
--jql "project = PROJ AND type = Bug AND priority in (Highest, High)" \
| bd import
```
### Example 4: Import with Hash IDs
```bash
# Use hash IDs for collision-free distributed work
python jira2jsonl.py --from-config --id-mode hash | bd import
```
## Limitations
- **Single assignee**: Jira supports multiple assignees (watchers), bd supports one
- **Custom fields**: Only standard fields are mapped; custom fields are ignored
- **Attachments**: Not imported
- **Comments**: Not imported (only description)
- **Worklogs**: Not imported
- **Sprints**: Sprint metadata not preserved (use labels or JQL filtering)
- **Components/Versions**: Not mapped to bd (consider using labels)
## Troubleshooting
### "Authentication failed"
**Jira Cloud:**
- Verify you're using your email as username
- Create a fresh API token at https://id.atlassian.com/manage-profile/security/api-tokens
- Ensure the token has access to the project
**Jira Server/DC:**
- Try using a Personal Access Token instead of password
- Check that your account has permission to access the project
### "403 Forbidden"
- Check project permissions in Jira
- Verify API token has correct scopes
- Some Jira instances restrict API access by IP
### "400 Bad Request"
- Check JQL syntax
- Verify project key exists
- Check for special characters in JQL (escape with backslash)
### Rate Limits
Jira Cloud has rate limits. For large imports:
- Add delays between requests (not implemented yet)
- Import in batches using JQL date ranges
- Use the `--file` option with a manual export
## API Rate Limits
- **Jira Cloud**: ~100 requests/minute (varies by plan)
- **Jira Server/DC**: Depends on configuration
This script fetches 100 issues per request, so a 1000-issue project requires ~10 API calls.
---
# Export: jsonl2jira.py
Push bd issues to Jira.
## Export Quick Start
```bash
# Export all issues (create new, update existing)
bd export | python jsonl2jira.py --from-config
# Create only (don't update existing Jira issues)
bd export | python jsonl2jira.py --from-config --create-only
# Dry run (preview what would happen)
bd export | python jsonl2jira.py --from-config --dry-run
# Auto-update bd with new external_refs
bd export | python jsonl2jira.py --from-config --update-refs
```
## Export Modes
### Create Only
Only create new Jira issues for bd issues that don't have an `external_ref`:
```bash
bd export | python jsonl2jira.py --from-config --create-only
```
### Create and Update
Create new issues AND update existing ones (matched by `external_ref`):
```bash
bd export | python jsonl2jira.py --from-config
```
### Dry Run
Preview what would happen without making any changes:
```bash
bd export | python jsonl2jira.py --from-config --dry-run
```
## Workflow Transitions
Jira often requires workflow transitions to change issue status (you can't just set `status=Done`). The export script automatically:
1. Fetches available transitions for each issue
2. Finds a transition that leads to the target status
3. Executes the transition
If no valid transition is found, the status change is skipped with a warning.
## Reverse Field Mappings
For export, you need mappings from bd → Jira (reverse of import):
```bash
# Status: bd status -> Jira status name
bd config set jira.reverse_status_map.open "To Do"
bd config set jira.reverse_status_map.in_progress "In Progress"
bd config set jira.reverse_status_map.blocked "Blocked"
bd config set jira.reverse_status_map.closed "Done"
# Type: bd type -> Jira issue type name
bd config set jira.reverse_type_map.bug "Bug"
bd config set jira.reverse_type_map.feature "Story"
bd config set jira.reverse_type_map.task "Task"
bd config set jira.reverse_type_map.epic "Epic"
bd config set jira.reverse_type_map.chore "Task"
# Priority: bd priority (0-4) -> Jira priority name
bd config set jira.reverse_priority_map.0 "Highest"
bd config set jira.reverse_priority_map.1 "High"
bd config set jira.reverse_priority_map.2 "Medium"
bd config set jira.reverse_priority_map.3 "Low"
bd config set jira.reverse_priority_map.4 "Lowest"
```
If not configured, sensible defaults are used.
## Updating external_ref
After creating a Jira issue, you'll want to link it back to the bd issue:
```bash
# Option 1: Auto-update with --update-refs flag
bd export | python jsonl2jira.py --from-config --update-refs
# Option 2: Manual update from script output
bd export | python jsonl2jira.py --from-config | while read line; do
bd_id=$(echo "$line" | jq -r '.bd_id')
ext_ref=$(echo "$line" | jq -r '.external_ref')
bd update "$bd_id" --external-ref="$ext_ref"
done
```
## Export Examples
### Example 1: Initial Export to Jira
```bash
# First, export all open issues
bd list --status open --json | python jsonl2jira.py --from-config --update-refs
# Now those issues have external_ref set
bd list --status open
```
### Example 2: Sync Changes Back to Jira
```bash
# Export issues modified today
bd list --json | python jsonl2jira.py --from-config
```
### Example 3: Preview Before Export
```bash
# See what would happen
bd export | python jsonl2jira.py --from-config --dry-run
# If it looks good, run for real
bd export | python jsonl2jira.py --from-config --update-refs
```
## Export Limitations
- **Assignee**: Not set (requires Jira account ID lookup)
- **Dependencies**: Not synced to Jira issue links
- **Comments**: Not exported
- **Custom fields**: design, acceptance_criteria, notes not exported
- **Attachments**: Not exported
## Bidirectional Sync Workflow
For ongoing synchronization between Jira and bd:
```bash
# 1. Pull changes from Jira
python jira2jsonl.py --from-config --jql "project=PROJ AND updated >= -1d" | bd import
# 2. Do local work in bd
bd update bd-xxx --status in_progress
# ... work ...
bd close bd-xxx
# 3. Push changes to Jira
bd export | python jsonl2jira.py --from-config
# 4. Repeat daily/weekly
```
## See Also
- [bd README](../../README.md) - Main documentation
- [GitHub Import Example](../github-import/) - Similar import for GitHub Issues
- [CONFIG.md](../../docs/CONFIG.md) - Configuration documentation
- [Jira REST API docs](https://developer.atlassian.com/cloud/jira/platform/rest/v2/)
```
--------------------------------------------------------------------------------
/examples/multiple-personas/README.md:
--------------------------------------------------------------------------------
```markdown
# Multiple Personas Workflow Example
This example demonstrates how to use beads when different roles work on the same project (architect, implementer, reviewer, etc.).
## Problem
Complex projects involve different personas with different concerns:
- **Architect:** System design, technical decisions, high-level planning
- **Implementer:** Write code, fix bugs, implement features
- **Reviewer:** Code review, quality gates, testing
- **Product:** Requirements, priorities, user stories
Each persona needs:
- Different views of the same work
- Clear handoffs between roles
- Track discovered work in context
## Solution
Use beads labels, priorities, and dependencies to organize work by persona, with clear ownership and handoffs.
## Setup
```bash
# Initialize beads
cd my-project
bd init
# Start daemon for auto-sync (optional for teams)
bd daemon start --auto-commit --auto-push
```
## Persona: Architect
The architect creates high-level design and makes technical decisions.
### Create Architecture Epic
```bash
# Main epic
bd create "Design new caching layer" -t epic -p 1
# Returns: bd-a1b2c3
# Add architecture label
bd label add bd-a1b2c3 architecture
# Architecture tasks
bd create "Research caching strategies (Redis vs Memcached)" -p 1 \
--deps discovered-from:bd-a1b2c3
bd label add bd-xyz architecture
bd create "Write ADR: Caching layer design" -p 1 \
--deps discovered-from:bd-a1b2c3
bd label add bd-abc architecture
bd create "Design cache invalidation strategy" -p 1 \
--deps discovered-from:bd-a1b2c3
bd label add bd-def architecture
```
### View Architect Work
```bash
# See only architecture issues
bd list --label architecture
# See architecture issues that are ready
bd list --label architecture --status open | grep -v blocked
# High-priority architecture decisions
bd list --label architecture --priority 0
bd list --label architecture --priority 1
```
### Handoff to Implementer
When design is complete, create implementation tasks:
```bash
# Close architecture tasks
bd close bd-xyz --reason "Decided on Redis with write-through"
bd close bd-abc --reason "ADR-007 published"
# Create implementation tasks with labels
bd create "Implement Redis connection pool" -p 1 \
--deps discovered-from:bd-a1b2c3
bd label add bd-impl1 implementation
bd create "Add cache middleware to API routes" -p 1 \
--deps discovered-from:bd-a1b2c3
bd label add bd-impl2 implementation
# Link implementation to architecture
bd dep add bd-impl1 bd-abc --type related # Based on ADR
bd dep add bd-impl2 bd-abc --type related
```
## Persona: Implementer
The implementer writes code based on architecture decisions.
### View Implementation Work
```bash
# See only implementation tasks
bd list --label implementation --status open
# See what's ready to implement
bd ready | grep implementation
# High-priority bugs to fix
bd list --label implementation --type bug --priority 0
bd list --label implementation --type bug --priority 1
```
### Claim and Implement
```bash
# Claim a task
bd update bd-impl1 --status in_progress
# During implementation, discover issues
bd create "Need connection retry logic" -t bug -p 1 \
--deps discovered-from:bd-impl1
bd label add bd-bug1 implementation bug
bd create "Add metrics for cache hit rate" -p 2 \
--deps discovered-from:bd-impl1
bd label add bd-metric1 implementation observability
# Complete implementation
bd close bd-impl1 --reason "Redis pool working, tested locally"
```
### Handoff to Reviewer
```bash
# Mark ready for review
bd create "Code review: Redis caching layer" -p 1
bd label add bd-review1 review
# Link to implementation
bd dep add bd-review1 bd-impl1 --type related
bd dep add bd-review1 bd-impl2 --type related
```
## Persona: Reviewer
The reviewer checks code quality, tests, and approvals.
### View Review Work
```bash
# See all review tasks
bd list --label review --status open
# See what's ready for review
bd ready | grep review
# High-priority reviews
bd list --label review --priority 0
bd list --label review --priority 1
```
### Perform Review
```bash
# Claim review
bd update bd-review1 --status in_progress
# Found issues during review
bd create "Add unit tests for retry logic" -t task -p 1 \
--deps discovered-from:bd-review1
bd label add bd-test1 implementation testing
bd create "Fix: connection leak on timeout" -t bug -p 0 \
--deps discovered-from:bd-review1
bd label add bd-bug2 implementation bug critical
bd create "Document Redis config options" -p 2 \
--deps discovered-from:bd-review1
bd label add bd-doc1 documentation
# Block review until issues fixed
bd dep add bd-review1 bd-test1 --type blocks
bd dep add bd-review1 bd-bug2 --type blocks
```
### Approve or Request Changes
```bash
# After fixes, approve
bd close bd-review1 --reason "LGTM, all tests pass"
# Or request changes
bd update bd-review1 --status blocked
# (blockers will show up in dependency tree)
```
## Persona: Product Owner
The product owner manages priorities and requirements.
### View Product Work
```bash
# See all features
bd list --type feature
# See high-priority work
bd list --priority 0
bd list --priority 1
# See what's in progress
bd list --status in_progress
# See what's blocked
bd list --status blocked
```
### Prioritize Work
```bash
# Bump priority based on customer feedback
bd update bd-impl2 --priority 0
# Lower priority for nice-to-haves
bd update bd-metric1 --priority 3
# Add product label to track customer-facing work
bd label add bd-impl2 customer-facing
```
### Create User Stories
```bash
# User story
bd create "As a user, I want faster page loads" -t feature -p 1
bd label add bd-story1 user-story customer-facing
# Link technical work to user story
bd dep add bd-impl1 bd-story1 --type related
bd dep add bd-impl2 bd-story1 --type related
```
## Multi-Persona Workflow Example
### Week 1: Architecture Phase
**Architect:**
```bash
# Create epic
bd create "Implement rate limiting" -t epic -p 1 # bd-epic1
bd label add bd-epic1 architecture
# Research
bd create "Research rate limiting algorithms" -p 1 \
--deps discovered-from:bd-epic1
bd label add bd-research1 architecture research
bd update bd-research1 --status in_progress
# ... research done ...
bd close bd-research1 --reason "Chose token bucket algorithm"
# Design
bd create "Write ADR: Rate limiting design" -p 1 \
--deps discovered-from:bd-epic1
bd label add bd-adr1 architecture documentation
bd close bd-adr1 --reason "ADR-012 approved"
```
### Week 2: Implementation Phase
**Implementer:**
```bash
# See what's ready to implement
bd ready | grep implementation
# Create implementation tasks based on architecture
bd create "Implement token bucket algorithm" -p 1 \
--deps discovered-from:bd-epic1
bd label add bd-impl1 implementation
bd dep add bd-impl1 bd-adr1 --type related
bd create "Add rate limit middleware" -p 1 \
--deps discovered-from:bd-epic1
bd label add bd-impl2 implementation
# Claim and start
bd update bd-impl1 --status in_progress
# Discover issues
bd create "Need distributed rate limiting (Redis)" -t bug -p 1 \
--deps discovered-from:bd-impl1
bd label add bd-bug1 implementation bug
```
**Architect (consulted):**
```bash
# Architect reviews discovered issue
bd show bd-bug1
bd update bd-bug1 --priority 0 # Escalate to critical
bd label add bd-bug1 architecture # Architect will handle
# Make decision
bd create "Design: Distributed rate limiting" -p 0 \
--deps discovered-from:bd-bug1
bd label add bd-design1 architecture
bd close bd-design1 --reason "Use Redis with sliding window"
```
**Implementer (continues):**
```bash
# Implement based on architecture decision
bd create "Add Redis sliding window for rate limits" -p 0 \
--deps discovered-from:bd-design1
bd label add bd-impl3 implementation
bd close bd-impl1 --reason "Token bucket working"
bd close bd-impl3 --reason "Redis rate limiting working"
```
### Week 3: Review Phase
**Reviewer:**
```bash
# See what's ready for review
bd list --label review
# Create review task
bd create "Code review: Rate limiting" -p 1
bd label add bd-review1 review
bd dep add bd-review1 bd-impl1 --type related
bd dep add bd-review1 bd-impl3 --type related
bd update bd-review1 --status in_progress
# Found issues
bd create "Add integration tests for Redis" -t task -p 1 \
--deps discovered-from:bd-review1
bd label add bd-test1 testing implementation
bd create "Missing error handling for Redis down" -t bug -p 0 \
--deps discovered-from:bd-review1
bd label add bd-bug2 implementation bug critical
# Block review
bd dep add bd-review1 bd-test1 --type blocks
bd dep add bd-review1 bd-bug2 --type blocks
```
**Implementer (fixes):**
```bash
# Fix review findings
bd update bd-bug2 --status in_progress
bd close bd-bug2 --reason "Added circuit breaker for Redis"
bd update bd-test1 --status in_progress
bd close bd-test1 --reason "Integration tests passing"
```
**Reviewer (approves):**
```bash
# Review unblocked
bd close bd-review1 --reason "Approved, merging PR"
```
**Product Owner (closes epic):**
```bash
# Feature shipped!
bd close bd-epic1 --reason "Rate limiting in production"
```
## Label Organization
### Recommended Labels
```bash
# Role labels
architecture, implementation, review, product
# Type labels
bug, feature, task, chore, documentation
# Status labels
critical, blocked, waiting-feedback, needs-design
# Domain labels
frontend, backend, infrastructure, database
# Quality labels
testing, security, performance, accessibility
# Customer labels
customer-facing, user-story, feedback
```
### View by Label Combination
```bash
# Critical bugs for implementers
bd list --label implementation --label bug --label critical
# Architecture issues needing review
bd list --label architecture --label review
# Customer-facing features
bd list --label customer-facing --type feature
# Backend implementation work
bd list --label backend --label implementation --status open
```
## Filtering by Persona
### Architect View
```bash
# My work
bd list --label architecture --status open
# Design decisions to make
bd list --label architecture --label needs-design
# High-priority architecture
bd list --label architecture --priority 0
bd list --label architecture --priority 1
```
### Implementer View
```bash
# My work
bd list --label implementation --status open
# Ready to implement
bd ready | grep implementation
# Bugs to fix
bd list --label implementation --type bug --priority 0
bd list --label implementation --type bug --priority 1
# Blocked work
bd list --label implementation --status blocked
```
### Reviewer View
```bash
# Reviews waiting
bd list --label review --status open
# Critical reviews
bd list --label review --priority 0
# Blocked reviews
bd list --label review --status blocked
```
### Product Owner View
```bash
# All customer-facing work
bd list --label customer-facing
# Features in progress
bd list --type feature --status in_progress
# Blocked work (needs attention)
bd list --status blocked
# High-priority items across all personas
bd list --priority 0
```
## Handoff Patterns
### Architecture → Implementation
```bash
# Architect creates spec
bd create "Design: New payment API" -p 1
bd label add bd-design1 architecture documentation
# When done, create implementation tasks
bd create "Implement Stripe integration" -p 1
bd label add bd-impl1 implementation
bd dep add bd-impl1 bd-design1 --type related
bd close bd-design1 --reason "Spec complete, ready for implementation"
```
### Implementation → Review
```bash
# Implementer finishes
bd close bd-impl1 --reason "Stripe working, PR ready"
# Create review task
bd create "Code review: Stripe integration" -p 1
bd label add bd-review1 review
bd dep add bd-review1 bd-impl1 --type related
```
### Review → Product
```bash
# Reviewer approves
bd close bd-review1 --reason "Approved, deployed to staging"
# Product tests in staging
bd create "UAT: Test Stripe in staging" -p 1
bd label add bd-uat1 product testing
bd dep add bd-uat1 bd-review1 --type related
# Product approves for production
bd close bd-uat1 --reason "UAT passed, deploying to prod"
```
## Best Practices
### 1. Use Labels Consistently
```bash
# Good: Clear role separation
bd label add bd-123 architecture
bd label add bd-456 implementation
bd label add bd-789 review
# Bad: Mixing concerns
# (same issue shouldn't be both architecture and implementation)
```
### 2. Link Related Work
```bash
# Always link implementation to architecture
bd dep add bd-impl bd-arch --type related
# Link bugs to features
bd dep add bd-bug bd-feature --type discovered-from
```
### 3. Clear Handoffs
```bash
# Document why closing
bd close bd-arch --reason "Design complete, created bd-impl1 and bd-impl2 for implementation"
# Not: "done" (too vague)
```
### 4. Escalate When Needed
```bash
# Implementer discovers architectural issue
bd create "Current design doesn't handle edge case X" -t bug -p 0
bd label add bd-issue architecture # Tag for architect
bd label add bd-issue needs-design # Flag as needing design
```
### 5. Regular Syncs
```bash
# Daily: Each persona checks their work
bd list --label architecture --status open # Architect
bd list --label implementation --status open # Implementer
bd list --label review --status open # Reviewer
# Weekly: Team reviews together
bd stats # Overall progress
bd list --status blocked # What's stuck?
bd ready # What's ready to work on?
```
## Common Patterns
### Spike Then Implement
```bash
# Architect creates research spike
bd create "Spike: Evaluate GraphQL vs REST" -p 1
bd label add bd-spike1 architecture research
bd close bd-spike1 --reason "Chose GraphQL, created implementation tasks"
# Implementation follows
bd create "Implement GraphQL API" -p 1
bd label add bd-impl1 implementation
bd dep add bd-impl1 bd-spike1 --type related
```
### Bug Triage
```bash
# Bug reported
bd create "App crashes on large files" -t bug -p 1
# Implementer investigates
bd update bd-bug1 --label implementation
bd update bd-bug1 --status in_progress
# Discovers architectural issue
bd create "Need streaming uploads, not buffering" -t bug -p 0
bd label add bd-arch1 architecture
bd dep add bd-arch1 bd-bug1 --type discovered-from
# Architect designs solution
bd update bd-arch1 --label architecture
bd close bd-arch1 --reason "Designed streaming upload flow"
# Implementer fixes
bd update bd-bug1 --status in_progress
bd close bd-bug1 --reason "Implemented streaming uploads"
```
### Feature Development
```bash
# Product creates user story
bd create "Users want bulk import" -t feature -p 1
bd label add bd-story1 user-story product
# Architect designs
bd create "Design: Bulk import system" -p 1
bd label add bd-design1 architecture
bd dep add bd-design1 bd-story1 --type related
# Implementation tasks
bd create "Implement CSV parser" -p 1
bd label add bd-impl1 implementation
bd dep add bd-impl1 bd-design1 --type related
bd create "Implement batch processor" -p 1
bd label add bd-impl2 implementation
bd dep add bd-impl2 bd-design1 --type related
# Review
bd create "Code review: Bulk import" -p 1
bd label add bd-review1 review
bd dep add bd-review1 bd-impl1 --type blocks
bd dep add bd-review1 bd-impl2 --type blocks
# Product UAT
bd create "UAT: Bulk import" -p 1
bd label add bd-uat1 product testing
bd dep add bd-uat1 bd-review1 --type blocks
```
## See Also
- [Multi-Phase Development](../multi-phase-development/) - Organize work by phase
- [Team Workflow](../team-workflow/) - Collaborate across personas
- [Contributor Workflow](../contributor-workflow/) - External contributions
- [Labels Documentation](../../LABELS.md) - Label management guide
```