This is page 5 of 7. Use http://codebase.md/tiberriver256/azure-devops-mcp?page={x} to view the full context.
# Directory Structure
```
├── .clinerules
├── .env.example
├── .eslintrc.json
├── .github
│ ├── copilot-instructions.md
│ ├── FUNDING.yml
│ ├── release-please-config.json
│ ├── release-please-manifest.json
│ ├── skills
│ │ ├── azure-devops-rest-api
│ │ │ ├── references
│ │ │ │ └── api_areas.md
│ │ │ ├── scripts
│ │ │ │ ├── clone_specs.sh
│ │ │ │ └── find_endpoint.py
│ │ │ └── SKILL.md
│ │ └── skill-creator
│ │ ├── LICENSE.txt
│ │ ├── references
│ │ │ ├── output-patterns.md
│ │ │ └── workflows.md
│ │ ├── scripts
│ │ │ ├── init_skill.py
│ │ │ └── quick_validate.py
│ │ └── SKILL.md
│ └── workflows
│ ├── main.yml
│ ├── release-please.yml
│ └── update-skills.yml
├── .gitignore
├── .husky
│ ├── commit-msg
│ └── pre-commit
├── .kilocode
│ └── mcp.json
├── .prettierrc
├── .vscode
│ └── settings.json
├── CHANGELOG.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── create_branch.sh
├── docs
│ ├── authentication.md
│ ├── azure-identity-authentication.md
│ ├── ci-setup.md
│ ├── examples
│ │ ├── azure-cli-authentication.env
│ │ ├── azure-identity-authentication.env
│ │ ├── pat-authentication.env
│ │ └── README.md
│ ├── testing
│ │ ├── README.md
│ │ └── setup.md
│ └── tools
│ ├── core-navigation.md
│ ├── organizations.md
│ ├── pipelines.md
│ ├── projects.md
│ ├── pull-requests.md
│ ├── README.md
│ ├── repositories.md
│ ├── resources.md
│ ├── search.md
│ ├── user-tools.md
│ ├── wiki.md
│ └── work-items.md
├── finish_task.sh
├── jest.e2e.config.js
├── jest.int.config.js
├── jest.unit.config.js
├── LICENSE
├── memory
│ └── tasks_memory_2025-05-26T16-18-03.json
├── package-lock.json
├── package.json
├── project-management
│ ├── planning
│ │ ├── architecture-guide.md
│ │ ├── azure-identity-authentication-design.md
│ │ ├── project-plan.md
│ │ ├── project-structure.md
│ │ ├── tech-stack.md
│ │ └── the-dream-team.md
│ ├── startup.xml
│ ├── tdd-cycle.xml
│ └── troubleshooter.xml
├── README.md
├── setup_env.sh
├── shrimp-rules.md
├── src
│ ├── clients
│ │ └── azure-devops.ts
│ ├── features
│ │ ├── organizations
│ │ │ ├── __test__
│ │ │ │ └── test-helpers.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-organizations
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── pipelines
│ │ │ ├── artifacts.spec.unit.ts
│ │ │ ├── artifacts.ts
│ │ │ ├── download-pipeline-artifact
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pipeline
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pipeline-log
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pipeline-run
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── helpers.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-pipeline-runs
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── list-pipelines
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── pipeline-timeline
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── tool-definitions.ts
│ │ │ ├── trigger-pipeline
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ └── types.ts
│ │ ├── projects
│ │ │ ├── __test__
│ │ │ │ └── test-helpers.ts
│ │ │ ├── get-project
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-project-details
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-projects
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── pull-requests
│ │ │ ├── add-pull-request-comment
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── create-pull-request
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pull-request-changes
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── get-pull-request-checks
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── get-pull-request-comments
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-pull-requests
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ ├── types.ts
│ │ │ └── update-pull-request
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ └── index.ts
│ │ ├── repositories
│ │ │ ├── __test__
│ │ │ │ └── test-helpers.ts
│ │ │ ├── create-branch
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── create-commit
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── get-all-repositories-tree
│ │ │ │ ├── __snapshots__
│ │ │ │ │ └── feature.spec.unit.ts.snap
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-file-content
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-repository
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-repository-details
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-repository-tree
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-commits
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── list-repositories
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── search
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── schemas.ts
│ │ │ ├── search-code
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── search-wiki
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── search-work-items
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── users
│ │ │ ├── get-me
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── wikis
│ │ │ ├── create-wiki
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── create-wiki-page
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-wiki-page
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-wikis
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-wiki-pages
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── update-wiki-page
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ └── work-items
│ │ ├── __test__
│ │ │ ├── fixtures.ts
│ │ │ ├── test-helpers.ts
│ │ │ └── test-utils.ts
│ │ ├── create-work-item
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── get-work-item
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── index.spec.unit.ts
│ │ ├── index.ts
│ │ ├── list-work-items
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── manage-work-item-link
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── schemas.ts
│ │ ├── tool-definitions.ts
│ │ ├── types.ts
│ │ └── update-work-item
│ │ ├── feature.spec.int.ts
│ │ ├── feature.spec.unit.ts
│ │ ├── feature.ts
│ │ ├── index.ts
│ │ └── schema.ts
│ ├── index.spec.unit.ts
│ ├── index.ts
│ ├── server.spec.e2e.ts
│ ├── server.ts
│ ├── shared
│ │ ├── api
│ │ │ ├── client.ts
│ │ │ └── index.ts
│ │ ├── auth
│ │ │ ├── auth-factory.ts
│ │ │ ├── client-factory.ts
│ │ │ └── index.ts
│ │ ├── config
│ │ │ ├── index.ts
│ │ │ └── version.ts
│ │ ├── enums
│ │ │ ├── index.spec.unit.ts
│ │ │ └── index.ts
│ │ ├── errors
│ │ │ ├── azure-devops-errors.ts
│ │ │ ├── handle-request-error.ts
│ │ │ └── index.ts
│ │ ├── test
│ │ │ └── test-helpers.ts
│ │ └── types
│ │ ├── config.ts
│ │ ├── index.ts
│ │ ├── request-handler.ts
│ │ └── tool-definition.ts
│ ├── types
│ │ └── diff.d.ts
│ └── utils
│ ├── environment.spec.unit.ts
│ └── environment.ts
├── tasks.json
├── tests
│ └── setup.ts
└── tsconfig.json
```
# Files
--------------------------------------------------------------------------------
/.github/copilot-instructions.md:
--------------------------------------------------------------------------------
```markdown
# Azure DevOps MCP Server - Development Instructions
Always reference these instructions first and fallback to search or bash commands only when you encounter unexpected information that does not match the info here.
## Prerequisites
You need to have `Node.js` and `npm` installed. Node 20 (LTS) or later is recommended for development.
## Building and Running
### 1. Install Dependencies
```bash
npm install # Takes ~25 seconds. NEVER CANCEL. Set timeout to 60+ seconds.
```
### 2. Build the Server
```bash
npm run build # Takes ~5 seconds. Compiles TypeScript to `dist/` directory.
```
### 3. Run Tests
```bash
# Unit tests (no Azure DevOps credentials required)
npm run test:unit # Takes ~19 seconds. NEVER CANCEL. Set timeout to 60+ seconds.
# Integration tests (requires Azure DevOps credentials)
npm run test:int # Takes ~18 seconds. NEVER CANCEL. Set timeout to 60+ seconds.
# E2E tests (requires Azure DevOps credentials)
npm run test:e2e # Takes ~6 seconds. NEVER CANCEL. Set timeout to 30+ seconds.
# All tests
npm test # Takes ~45 seconds total. NEVER CANCEL. Set timeout to 90+ seconds.
```
### 4. Code Quality
```bash
npm run lint # Takes ~3 seconds. Runs ESLint for code quality.
npm run lint:fix # Auto-fix linting issues.
npm run format # Takes ~3 seconds. Runs Prettier for code formatting.
```
### 5. Run the Server
```bash
# IMPORTANT: Always configure environment first using `.env` file (copy from `.env.example`)
npm run dev # Development mode with auto-restart using ts-node-dev
npm run start # Production mode - runs compiled version from `dist/index.js`
npm run inspector # Debug mode with MCP Inspector tool
```
## Environment Setup
Copy `.env.example` to `.env` and configure Azure DevOps credentials:
```bash
cp .env.example .env
# Edit .env with your Azure DevOps credentials
```
**Required environment variables:**
```
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/your-organization
AZURE_DEVOPS_AUTH_METHOD=pat # Options: pat, azure-identity, azure-cli
AZURE_DEVOPS_PAT=your-personal-access-token # For PAT auth
AZURE_DEVOPS_DEFAULT_PROJECT=your-project-name # Optional
```
**Alternative setup methods:**
- Use `./setup_env.sh` script for interactive environment setup with Azure CLI
- See `docs/authentication.md` for comprehensive authentication guides
- Copy from `docs/examples/` directory for ready-to-use configurations (PAT, Azure Identity, Azure CLI)
- For CI/CD environments: Reference `docs/ci-setup.md` for secrets configuration
**Note:** The application will fail gracefully with clear error messages if credentials are missing.
## Submitting Changes
Before submitting a PR, ensure:
1. **Linting and formatting pass:**
```bash
npm run lint:fix && npm run format
```
2. **Build succeeds:**
```bash
npm run build
```
3. **Unit tests pass:**
```bash
npm run test:unit
```
4. **Manual testing complete:**
- Configure `.env` file (copy from `.env.example` and update values)
- Test server startup: `npm run dev` (should start or fail gracefully with clear error messages)
- Test MCP protocol using `npm run inspector` (requires working Azure DevOps credentials)
5. **Integration/E2E tests pass** (if you have Azure DevOps credentials):
```bash
npm run test:int && npm run test:e2e
```
6. **Conventional commits:**
```bash
npm run commit # Use this for guided commit message creation
```
**Note:** Unit tests must pass even without Azure DevOps credentials - they use mocks for all external dependencies.
## Project Architecture
- **TypeScript** project with strict configuration (`tsconfig.json`)
- **Feature-based architecture:** Each Azure DevOps feature area is a separate module in `src/features/`
- Example: `src/features/work-items/`, `src/features/projects/`, `src/features/repositories/`
- **MCP Protocol** implementation for AI assistant integration using `@modelcontextprotocol/sdk`
- **Test Strategy:** Testing Trophy approach (see `docs/testing/README.md`)
- Unit tests: `.spec.unit.ts` (mock all external dependencies, focus on logic)
- Integration tests: `.spec.int.ts` (test with real Azure DevOps APIs, requires credentials)
- E2E tests: `.spec.e2e.ts` (test complete MCP server functionality)
- Tests are co-located with feature code
- **Path aliases:** Use `@/` instead of relative imports (e.g., `import { something } from '@/shared/utils'`)
### Key Directories
- `src/index.ts` and `src/server.ts` - Main server entry points
- `src/features/[feature-name]/` - Feature modules
- `src/shared/` - Shared utilities (auth, errors, types, config)
- `src/clients/azure-devops.ts` - Azure DevOps client
- `tests/setup.ts` - Test configuration
- `docs/` - Comprehensive documentation (authentication, testing, tools)
## Adding a New Feature
Follow the Feature Module pattern used throughout the codebase:
1. **Create feature module directory:**
```
src/features/[feature-name]/
```
2. **Add required files:**
- `feature.ts` - Core feature logic
- `schema.ts` - Zod schemas for input/output validation
- `tool-definitions.ts` - MCP tool definitions
- `index.ts` - Exports and request handlers
3. **Add test files:**
- `feature.spec.unit.ts` - Unit tests (required)
- `feature.spec.int.ts` - Integration tests (if applicable)
4. **Register the feature:**
- Add to `src/server.ts` following existing patterns
5. **Reference existing modules:**
- See `src/features/work-items/` or `src/features/projects/` for complete examples
- Follow the same structure and naming conventions
## Modifying Existing Features
When updating existing features:
1. **Update core logic:** `src/features/[feature-name]/feature.ts`
2. **Update schemas:** `schemas.ts` (if input/output changes)
3. **Update tool definitions:** `tool-definitions.ts` (if MCP interface changes)
4. **Update or add tests:** Always update existing tests or add new ones
5. **Run validation:** Execute the full validation workflow before committing
## Dependencies
**Core libraries:**
- `@modelcontextprotocol/sdk` - MCP protocol implementation
- `azure-devops-node-api` - Azure DevOps REST APIs
- `@azure/identity` - Azure authentication
- `zod` - Schema validation and type safety
**Development tools:**
- `jest` - Testing framework (with separate configs for unit/int/e2e tests)
- `ts-node-dev` - Development server with auto-restart
- `eslint` + `prettier` - Code quality and formatting
- `husky` - Git hooks for commit validation
**CI/CD:**
- GitHub Actions workflow: `.github/workflows/main.yml`
- Runs on PRs to `main`: Install → Lint → Build → Unit Tests → Integration Tests → E2E Tests
- Integration and E2E tests require Azure DevOps secrets in CI
- Release automation with `release-please`
## Documentation
The repository has extensive documentation. Reference these for specific scenarios:
### Authentication & Configuration
- `docs/authentication.md` - Complete authentication guide (PAT, Azure Identity, Azure CLI)
- `docs/azure-identity-authentication.md` - Detailed Azure Identity setup and troubleshooting
- `docs/ci-setup.md` - CI/CD environment setup and secrets configuration
- `docs/examples/` - Ready-to-use environment configuration examples
### Testing & Development
- `docs/testing/README.md` - Testing Trophy approach, test types, and testing philosophy
- `docs/testing/setup.md` - Test environment setup, import patterns, VSCode integration
- `CONTRIBUTING.md` - Development practices, commit guidelines, and workflow
### Tool & API Documentation
- `docs/tools/README.md` - Complete tool catalog with examples and response formats
- `docs/tools/[feature].md` - Detailed docs for each feature area (work-items, projects, etc.)
- `docs/tools/resources.md` - Resource URI patterns for accessing repository content
### When to Reference
- **Starting new features:** Review `CONTRIBUTING.md` and `docs/testing/README.md`
- **Authentication issues:** Check `docs/authentication.md` first
- **Available tools:** Browse `docs/tools/README.md`
- **CI/CD problems:** Reference `docs/ci-setup.md`
- **Testing patterns:** Use `docs/testing/setup.md`
- **Environment setup:** Copy from `docs/examples/`
## Troubleshooting
**Build fails:**
- Check TypeScript errors
- Ensure all imports are valid
- Verify `tsconfig.json` paths configuration
**Tests fail:**
- Unit tests: Should pass without Azure DevOps credentials (they use mocks)
- Integration/E2E tests: Check `.env` file has valid Azure DevOps credentials
- See `docs/testing/setup.md` for environment variables and patterns
**Lint errors:**
- Run `npm run lint:fix` to auto-fix common issues
- Check ESLint rules in `.eslintrc.json`
**Server won't start:**
- Verify `.env` file configuration
- Check error messages for missing environment variables
- See `docs/authentication.md` for comprehensive setup guides
**Authentication issues:**
- See `docs/authentication.md` for comprehensive troubleshooting
- For CI/CD: Reference `docs/ci-setup.md` for proper secrets configuration
**Import errors:**
- Use `@/` path aliases instead of relative imports
- Verify `tsconfig.json` paths configuration
**Unknown tool capabilities:**
- Browse `docs/tools/README.md` for complete tool documentation
## Available Skills
Skills are modular, self-contained packages that extend capabilities with specialized knowledge, workflows, and tools. Reference these skills when working on related tasks.
| Skill Name | Use When... | Path |
|------------|-------------|------|
| skill-creator | Creating a new skill or updating an existing skill that extends capabilities with specialized knowledge, workflows, or tool integrations | [.github/skills/skill-creator/SKILL.md](.github/skills/skill-creator/SKILL.md) |
| azure-devops-rest-api | Implementing new Azure DevOps API integrations, exploring API capabilities, understanding request/response formats, or referencing the official OpenAPI specifications from the vsts-rest-api-specs repository | [.github/skills/azure-devops-rest-api/SKILL.md](.github/skills/azure-devops-rest-api/SKILL.md) |
```
--------------------------------------------------------------------------------
/src/features/projects/get-project-details/feature.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import {
AzureDevOpsResourceNotFoundError,
AzureDevOpsError,
} from '../../../shared/errors';
import {
TeamProject,
WebApiTeam,
} from 'azure-devops-node-api/interfaces/CoreInterfaces';
import { WorkItemField } from 'azure-devops-node-api/interfaces/WorkItemTrackingInterfaces';
// Type for work item type field with additional properties
interface WorkItemTypeField extends WorkItemField {
isRequired?: boolean;
isIdentity?: boolean;
isPicklist?: boolean;
}
/**
* Options for getting project details
*/
export interface GetProjectDetailsOptions {
projectId: string;
includeProcess?: boolean;
includeWorkItemTypes?: boolean;
includeFields?: boolean;
includeTeams?: boolean;
expandTeamIdentity?: boolean;
}
/**
* Process information with work item types
*/
interface ProcessInfo {
id: string;
name: string;
description?: string;
isDefault: boolean;
type: string;
workItemTypes?: WorkItemTypeInfo[];
hierarchyInfo?: {
portfolioBacklogs?: {
name: string;
workItemTypes: string[];
}[];
requirementBacklog?: {
name: string;
workItemTypes: string[];
};
taskBacklog?: {
name: string;
workItemTypes: string[];
};
};
}
/**
* Work item type information with states and fields
*/
interface WorkItemTypeInfo {
name: string;
referenceName: string;
description?: string;
isDisabled: boolean;
states?: {
name: string;
color?: string;
stateCategory: string;
}[];
fields?: {
name: string;
referenceName: string;
type: string;
required?: boolean;
isIdentity?: boolean;
isPicklist?: boolean;
description?: string;
}[];
}
/**
* Project details response
*/
interface ProjectDetails extends TeamProject {
process?: ProcessInfo;
teams?: WebApiTeam[];
}
/**
* Get detailed information about a project
*
* @param connection The Azure DevOps WebApi connection
* @param options Options for getting project details
* @returns The project details
* @throws {AzureDevOpsResourceNotFoundError} If the project is not found
*/
export async function getProjectDetails(
connection: WebApi,
options: GetProjectDetailsOptions,
): Promise<ProjectDetails> {
try {
const {
projectId,
includeProcess = false,
includeWorkItemTypes = false,
includeFields = false,
includeTeams = false,
expandTeamIdentity = false,
} = options;
// Get the core API
const coreApi = await connection.getCoreApi();
// Get the basic project information
const project = await coreApi.getProject(projectId);
if (!project) {
throw new AzureDevOpsResourceNotFoundError(
`Project '${projectId}' not found`,
);
}
// Initialize the result with the project information and ensure required properties
const result: ProjectDetails = {
...project,
// Ensure capabilities is always defined
capabilities: project.capabilities || {
versioncontrol: { sourceControlType: 'Git' },
processTemplate: { templateName: 'Unknown', templateTypeId: 'unknown' },
},
};
// If teams are requested, get them
if (includeTeams) {
const teams = await coreApi.getTeams(projectId, expandTeamIdentity);
result.teams = teams;
}
// If process information is requested, get it
if (includeProcess) {
// Get the process template ID from the project capabilities
const processTemplateId =
project.capabilities?.processTemplate?.templateTypeId || 'unknown';
// Always create a process object, even if we don't have a template ID
// In a real implementation, we would use the Process API
// Since it's not directly available in the WebApi type, we'll simulate it
// This is a simplified version for the implementation
// In a real implementation, you would need to use the appropriate API
// Create the process info object directly
const processInfo: ProcessInfo = {
id: processTemplateId,
name: project.capabilities?.processTemplate?.templateName || 'Unknown',
description: 'Process template for the project',
isDefault: true,
type: 'system',
};
// If work item types are requested, get them
if (includeWorkItemTypes) {
// In a real implementation, we would get work item types from the API
// For now, we'll use the work item tracking API to get basic types
const workItemTrackingApi = await connection.getWorkItemTrackingApi();
const workItemTypes =
await workItemTrackingApi.getWorkItemTypes(projectId);
// Map the work item types to our format
const processWorkItemTypes: WorkItemTypeInfo[] = workItemTypes.map(
(wit) => {
// Create the work item type info object
const workItemTypeInfo: WorkItemTypeInfo = {
name: wit.name || 'Unknown',
referenceName:
wit.referenceName || `System.Unknown.${Date.now()}`,
description: wit.description,
isDisabled: false,
states: [
{ name: 'New', stateCategory: 'Proposed' },
{ name: 'Active', stateCategory: 'InProgress' },
{ name: 'Resolved', stateCategory: 'InProgress' },
{ name: 'Closed', stateCategory: 'Completed' },
],
};
// If fields are requested, don't add fields here - we'll add them after fetching from API
return workItemTypeInfo;
},
);
// If fields are requested, get the field definitions from the API
if (includeFields) {
try {
// Instead of getting all fields and applying them to all work item types,
// let's get the fields specific to each work item type
for (const wit of processWorkItemTypes) {
try {
// Get fields specific to this work item type using the specialized method
const typeSpecificFields =
await workItemTrackingApi.getWorkItemTypeFieldsWithReferences(
projectId,
wit.name,
);
// Map the fields to our format
wit.fields = typeSpecificFields.map(
(field: WorkItemTypeField) => ({
name: field.name || 'Unknown',
referenceName: field.referenceName || 'Unknown',
type: field.type?.toString().toLowerCase() || 'string',
required: field.isRequired || false,
isIdentity: field.isIdentity || false,
isPicklist: field.isPicklist || false,
description: field.description,
}),
);
} catch (typeFieldError) {
console.error(
`Error fetching fields for work item type ${wit.name}:`,
typeFieldError,
);
// Fallback to basic fields
wit.fields = [
{
name: 'Title',
referenceName: 'System.Title',
type: 'string',
required: true,
},
{
name: 'Description',
referenceName: 'System.Description',
type: 'html',
required: false,
},
];
}
}
} catch (fieldError) {
console.error('Error in field processing:', fieldError);
// Fallback to default fields if API call fails
processWorkItemTypes.forEach((wit) => {
wit.fields = [
{
name: 'Title',
referenceName: 'System.Title',
type: 'string',
required: true,
},
{
name: 'Description',
referenceName: 'System.Description',
type: 'html',
required: false,
},
];
});
}
}
processInfo.workItemTypes = processWorkItemTypes;
// Add hierarchy information if available
// This is a simplified version - in a real implementation, you would
// need to get the backlog configuration and map it to the work item types
processInfo.hierarchyInfo = {
portfolioBacklogs: [
{
name: 'Epics',
workItemTypes: processWorkItemTypes
.filter(
(wit: WorkItemTypeInfo) => wit.name.toLowerCase() === 'epic',
)
.map((wit: WorkItemTypeInfo) => wit.name),
},
{
name: 'Features',
workItemTypes: processWorkItemTypes
.filter(
(wit: WorkItemTypeInfo) =>
wit.name.toLowerCase() === 'feature',
)
.map((wit: WorkItemTypeInfo) => wit.name),
},
],
requirementBacklog: {
name: 'Stories',
workItemTypes: processWorkItemTypes
.filter(
(wit: WorkItemTypeInfo) =>
wit.name.toLowerCase() === 'user story' ||
wit.name.toLowerCase() === 'bug',
)
.map((wit: WorkItemTypeInfo) => wit.name),
},
taskBacklog: {
name: 'Tasks',
workItemTypes: processWorkItemTypes
.filter(
(wit: WorkItemTypeInfo) => wit.name.toLowerCase() === 'task',
)
.map((wit: WorkItemTypeInfo) => wit.name),
},
};
}
// Always set the process on the result
result.process = processInfo;
}
return result;
} catch (error) {
if (error instanceof AzureDevOpsError) {
throw error;
}
throw new Error(
`Failed to get project details: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
```
--------------------------------------------------------------------------------
/docs/authentication.md:
--------------------------------------------------------------------------------
```markdown
# Authentication Guide for Azure DevOps MCP Server
This guide provides detailed information about the authentication methods supported by the Azure DevOps MCP Server, including setup instructions, configuration examples, and troubleshooting tips.
## Supported Authentication Methods
The Azure DevOps MCP Server supports three authentication methods:
1. **Personal Access Token (PAT)** - Simple token-based authentication
2. **Azure Identity (DefaultAzureCredential)** - Flexible authentication using the Azure Identity SDK
3. **Azure CLI** - Authentication using your Azure CLI login
## Method 1: Personal Access Token (PAT) Authentication
PAT authentication is the simplest method and works well for personal use or testing.
### Setup Instructions
1. **Generate a PAT in Azure DevOps**:
- Go to https://dev.azure.com/{your-organization}/_usersSettings/tokens
- Or click on your profile picture > Personal access tokens
- Select "+ New Token"
- Name your token (e.g., "MCP Server Access")
- Set an expiration date
- Select the following scopes:
- **Code**: Read & Write
- **Work Items**: Read & Write
- **Build**: Read & Execute
- **Project and Team**: Read
- **Graph**: Read
- **Release**: Read & Execute
- Click "Create" and copy the generated token
2. **Configure your `.env` file**:
```
AZURE_DEVOPS_AUTH_METHOD=pat
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/your-organization
AZURE_DEVOPS_PAT=your-personal-access-token
AZURE_DEVOPS_DEFAULT_PROJECT=your-default-project
```
### Security Considerations
- PATs have an expiration date and will need to be renewed
- Store your PAT securely and never commit it to source control
- Consider using environment variables or a secrets manager in production
- Scope your PAT to only the permissions needed for your use case
## Method 2: Azure Identity Authentication (DefaultAzureCredential)
Azure Identity authentication uses the `DefaultAzureCredential` class from the `@azure/identity` package, which provides a simplified authentication experience by trying multiple credential types in sequence.
### How DefaultAzureCredential Works
`DefaultAzureCredential` tries the following credential types in order:
1. Environment variables (EnvironmentCredential)
2. Managed Identity (ManagedIdentityCredential)
3. Azure CLI (AzureCliCredential)
4. Visual Studio Code (VisualStudioCodeCredential)
5. Azure PowerShell (AzurePowerShellCredential)
6. Interactive Browser (InteractiveBrowserCredential) - optional, disabled by default
This makes it ideal for applications that need to work in different environments (local development, Azure-hosted) without code changes.
### Setup Instructions
1. **Install the Azure Identity SDK**:
The SDK is already included as a dependency in the Azure DevOps MCP Server.
2. **Configure your `.env` file**:
```
AZURE_DEVOPS_AUTH_METHOD=azure-identity
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/your-organization
AZURE_DEVOPS_DEFAULT_PROJECT=your-default-project
```
3. **Set up credentials based on your environment**:
a. **For service principals (client credentials)**:
```
AZURE_TENANT_ID=your-tenant-id
AZURE_CLIENT_ID=your-client-id
AZURE_CLIENT_SECRET=your-client-secret
```
b. **For managed identities in Azure**:
No additional configuration needed if running in Azure with a managed identity.
c. **For local development**:
- Log in with Azure CLI: `az login`
- Or use Visual Studio Code Azure Account extension
### Security Considerations
- Use managed identities in Azure for improved security
- For service principals, rotate client secrets regularly
- Store credentials securely using Azure Key Vault or environment variables
- Apply the principle of least privilege when assigning roles
## Method 3: Azure CLI Authentication
Azure CLI authentication uses the `AzureCliCredential` class from the `@azure/identity` package, which authenticates using the Azure CLI's logged-in account.
### Setup Instructions
1. **Install the Azure CLI**:
- Follow the instructions at https://docs.microsoft.com/cli/azure/install-azure-cli
2. **Log in to Azure**:
```bash
az login
```
3. **Configure your `.env` file**:
```
AZURE_DEVOPS_AUTH_METHOD=azure-cli
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/your-organization
AZURE_DEVOPS_DEFAULT_PROJECT=your-default-project
```
### Security Considerations
- Azure CLI authentication is best for local development
- Ensure your Azure CLI session is kept secure
- Log out when not in use: `az logout`
## Configuration Reference
| Variable | Description | Required | Default |
| ------------------------------ | ---------------------------------------------------------------------------------- | ---------------------------- | ---------------- |
| `AZURE_DEVOPS_AUTH_METHOD` | Authentication method (`pat`, `azure-identity`, or `azure-cli`) - case-insensitive | No | `azure-identity` |
| `AZURE_DEVOPS_ORG_URL` | Full URL to your Azure DevOps organization | Yes | - |
| `AZURE_DEVOPS_PAT` | Personal Access Token (for PAT auth) | Only with PAT auth | - |
| `AZURE_DEVOPS_DEFAULT_PROJECT` | Default project if none specified | No | - |
| `AZURE_DEVOPS_API_VERSION` | API version to use | No | Latest |
| `AZURE_TENANT_ID` | Azure AD tenant ID (for service principals) | Only with service principals | - |
| `AZURE_CLIENT_ID` | Azure AD application ID (for service principals) | Only with service principals | - |
| `AZURE_CLIENT_SECRET` | Azure AD client secret (for service principals) | Only with service principals | - |
| `LOG_LEVEL` | Logging level (debug, info, warn, error) | No | info |
## Troubleshooting Authentication Issues
### PAT Authentication Issues
1. **Invalid PAT**: Ensure your PAT hasn't expired and has the required scopes
- Error: `TF400813: The user 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx' is not authorized to access this resource.`
- Solution: Generate a new PAT with the correct scopes
2. **Scope issues**: If receiving 403 errors, check if your PAT has the necessary permissions
- Error: `TF401027: You need the Git 'Read' permission to perform this action.`
- Solution: Update your PAT with the required scopes
3. **Organization access**: Verify your PAT has access to the organization specified in the URL
- Error: `TF400813: Resource not found for anonymous request.`
- Solution: Ensure your PAT has access to the specified organization
### Azure Identity Authentication Issues
1. **Missing credentials**: Ensure you have the necessary credentials configured
- Error: `CredentialUnavailableError: DefaultAzureCredential failed to retrieve a token`
- Solution: Check that you're logged in with Azure CLI or have environment variables set
2. **Permission issues**: Verify your identity has the necessary permissions
- Error: `AuthorizationFailed: The client does not have authorization to perform action`
- Solution: Assign the appropriate roles to your identity
3. **Token acquisition errors**: Check network connectivity and Azure AD endpoint availability
- Error: `ClientAuthError: Interaction required`
- Solution: Check network connectivity or use a different credential type
### Azure CLI Authentication Issues
1. **CLI not installed**: Ensure Azure CLI is installed and in your PATH
- Error: `AzureCliCredential authentication failed: Azure CLI not found`
- Solution: Install Azure CLI
2. **Not logged in**: Verify you're logged in to Azure CLI
- Error: `AzureCliCredential authentication failed: Please run 'az login'`
- Solution: Run `az login`
3. **Permission issues**: Check if your Azure CLI account has access to Azure DevOps
- Error: `TF400813: The user is not authorized to access this resource`
- Solution: Log in with an account that has access to Azure DevOps
## Best Practices
1. **Choose the right authentication method for your environment**:
- For local development: Azure CLI or PAT
- For CI/CD pipelines: PAT or service principal
- For Azure-hosted applications: Managed Identity
2. **Follow the principle of least privilege**:
- Only grant the permissions needed for your use case
- Regularly review and rotate credentials
3. **Secure your credentials**:
- Use environment variables or a secrets manager
- Never commit credentials to source control
- Set appropriate expiration dates for PATs
4. **Monitor and audit authentication**:
- Review Azure DevOps access logs
- Set up alerts for suspicious activity
## Examples
### Example 1: Local Development with PAT
```bash
# .env file
AZURE_DEVOPS_AUTH_METHOD=pat
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/mycompany
AZURE_DEVOPS_PAT=abcdefghijklmnopqrstuvwxyz0123456789
AZURE_DEVOPS_DEFAULT_PROJECT=MyProject
```
### Example 2: Azure-hosted Application with Managed Identity
```bash
# .env file
AZURE_DEVOPS_AUTH_METHOD=azure-identity
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/mycompany
AZURE_DEVOPS_DEFAULT_PROJECT=MyProject
```
### Example 3: CI/CD Pipeline with Service Principal
```bash
# .env file
AZURE_DEVOPS_AUTH_METHOD=azure-identity
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/mycompany
AZURE_DEVOPS_DEFAULT_PROJECT=MyProject
AZURE_TENANT_ID=00000000-0000-0000-0000-000000000000
AZURE_CLIENT_ID=11111111-1111-1111-1111-111111111111
AZURE_CLIENT_SECRET=your-client-secret
```
### Example 4: Local Development with Azure CLI
```bash
# .env file
AZURE_DEVOPS_AUTH_METHOD=azure-cli
AZURE_DEVOPS_ORG_URL=https://dev.azure.com/mycompany
AZURE_DEVOPS_DEFAULT_PROJECT=MyProject
```
```
--------------------------------------------------------------------------------
/src/features/pipelines/download-pipeline-artifact/feature.ts:
--------------------------------------------------------------------------------
```typescript
import axios from 'axios';
import JSZip from 'jszip';
import { WebApi } from 'azure-devops-node-api';
import { BuildArtifact } from 'azure-devops-node-api/interfaces/BuildInterfaces';
import { GetArtifactExpandOptions } from 'azure-devops-node-api/interfaces/PipelinesInterfaces';
import {
AzureDevOpsAuthenticationError,
AzureDevOpsError,
AzureDevOpsResourceNotFoundError,
} from '../../../shared/errors';
import { defaultProject } from '../../../utils/environment';
import { parseArtifactContainer } from '../artifacts';
import { resolvePipelineId } from '../helpers';
import {
DownloadPipelineArtifactOptions,
PipelineArtifactContent,
} from '../types';
function normalizeArtifactPath(artifactPath: string): {
artifactName: string;
relativePath: string;
} {
const trimmed = artifactPath.trim();
if (trimmed.length === 0) {
throw new AzureDevOpsResourceNotFoundError(
'Artifact path must include the artifact name and file path.',
);
}
const sanitized = trimmed.replace(/^[\\/]+/, '').replace(/[\\/]+$/, '');
const segments = sanitized
.split(/[\\/]+/)
.filter((segment) => segment.length > 0);
const artifactName = segments.shift();
if (!artifactName) {
throw new AzureDevOpsResourceNotFoundError(
'Artifact path must include the artifact name and file path.',
);
}
if (segments.length === 0) {
throw new AzureDevOpsResourceNotFoundError(
'Please specify a file path inside the artifact (e.g. <artifact>/<path/to/file>).',
);
}
return {
artifactName,
relativePath: segments.join('/'),
};
}
function joinPaths(...parts: Array<string | undefined>): string {
return parts
.filter(
(part): part is string => typeof part === 'string' && part.length > 0,
)
.map((part) => part.replace(/^[\\/]+|[\\/]+$/g, ''))
.filter((part) => part.length > 0)
.join('/');
}
function buildContainerPathCandidates(
artifactName: string,
rootPath: string | undefined,
relativePath: string,
): string[] {
const normalizedRelative = relativePath.replace(/^[\\/]+/, '');
const candidates = new Set<string>();
candidates.add(normalizedRelative);
candidates.add(joinPaths(rootPath, normalizedRelative));
candidates.add(joinPaths(artifactName, normalizedRelative));
candidates.add(joinPaths(rootPath, artifactName, normalizedRelative));
candidates.add(joinPaths(artifactName, rootPath, normalizedRelative));
return Array.from(candidates).filter((candidate) => candidate.length > 0);
}
function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
stream.on('data', (chunk) => {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
});
stream.on('error', reject);
stream.on('end', () => {
resolve(Buffer.concat(chunks));
});
});
}
async function getContainerItemStream(
connection: WebApi,
containerId: number,
projectId: string,
candidatePaths: string[],
): Promise<{ stream: NodeJS.ReadableStream; path: string } | null> {
if (typeof connection.getFileContainerApi !== 'function') {
return null;
}
const fileContainerApi = await connection.getFileContainerApi();
if (!fileContainerApi || typeof fileContainerApi.getItem !== 'function') {
return null;
}
const scopeCandidates = [projectId, undefined].filter(
(scope, index, array) => array.indexOf(scope) === index,
);
for (const candidatePath of candidatePaths) {
for (const scope of scopeCandidates) {
try {
const response = await fileContainerApi.getItem(
containerId,
scope,
candidatePath,
);
if (response.statusCode === 404) {
continue;
}
if (response.result) {
return { stream: response.result, path: candidatePath };
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (/\b403\b|forbidden|access/i.test(message)) {
throw new AzureDevOpsAuthenticationError(
`Failed to access container ${containerId}: ${message}`,
);
}
// Ignore other errors and try the next variation; the container API
// returns 400 for invalid paths, which we treat as a miss.
}
}
}
return null;
}
function escapeRegExp(value: string): string {
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}
function normalizeZipPath(path: string): string {
return path.replace(/^[\\/]+|[\\/]+$/g, '').replace(/\\+/g, '/');
}
function selectZipEntry(
zip: JSZip,
relativePath: string,
artifactName: string,
rootPath?: string,
): JSZip.JSZipObject | null {
const normalized = normalizeZipPath(relativePath);
const candidates = [normalized];
if (artifactName) {
candidates.push(`${artifactName}/${normalized}`);
}
if (rootPath) {
candidates.push(`${rootPath}/${normalized}`);
if (artifactName) {
candidates.push(`${artifactName}/${rootPath}/${normalized}`);
}
}
for (const candidate of candidates) {
const match = zip.file(candidate);
if (!match) {
continue;
}
const files = Array.isArray(match) ? match : [match];
const file = files.find((entry) => !entry.dir);
if (file) {
return file;
}
}
const fallbackMatches = zip
.file(new RegExp(`${escapeRegExp(normalized)}$`))
?.filter((entry) => !entry.dir);
if (fallbackMatches && fallbackMatches.length > 0) {
fallbackMatches.sort((a, b) => a.name.length - b.name.length);
return fallbackMatches[0] ?? null;
}
return null;
}
async function downloadFromContainer(
connection: WebApi,
projectId: string,
artifactName: string,
artifact: BuildArtifact,
relativePath: string,
): Promise<PipelineArtifactContent | null> {
const { containerId, rootPath } = parseArtifactContainer(artifact.resource);
if (typeof containerId !== 'number') {
return null;
}
const pathCandidates = buildContainerPathCandidates(
artifactName,
rootPath,
relativePath,
);
const resolved = await getContainerItemStream(
connection,
containerId,
projectId,
pathCandidates,
);
if (!resolved) {
throw new AzureDevOpsResourceNotFoundError(
`File ${relativePath} not found in artifact ${artifactName}.`,
);
}
const buffer = await streamToBuffer(resolved.stream);
return {
artifact: artifactName,
path: resolved.path,
content: buffer.toString('utf8'),
};
}
async function downloadFromPipelineArtifact(
connection: WebApi,
projectId: string,
runId: number,
artifactName: string,
artifact: BuildArtifact,
relativePath: string,
pipelineId?: number,
): Promise<PipelineArtifactContent> {
const resolvedPipelineId = await resolvePipelineId(
connection,
projectId,
runId,
pipelineId,
);
if (typeof resolvedPipelineId !== 'number') {
throw new AzureDevOpsResourceNotFoundError(
`Unable to resolve pipeline identifier for artifact ${artifactName}.`,
);
}
const pipelinesApi = await connection.getPipelinesApi();
const artifactDetails = await pipelinesApi.getArtifact(
projectId,
resolvedPipelineId,
runId,
artifactName,
GetArtifactExpandOptions.SignedContent,
);
const downloadUrl =
artifactDetails?.signedContent?.url ||
artifact.resource?.downloadUrl ||
artifactDetails?.url;
if (!downloadUrl) {
throw new AzureDevOpsResourceNotFoundError(
`Artifact ${artifactName} does not expose downloadable content.`,
);
}
const response = await axios.get<ArrayBuffer>(downloadUrl, {
responseType: 'arraybuffer',
});
const zip = await JSZip.loadAsync(response.data);
const file = selectZipEntry(
zip,
relativePath,
artifactName,
parseArtifactContainer(artifact.resource).rootPath,
);
if (!file) {
throw new AzureDevOpsResourceNotFoundError(
`File ${relativePath} not found in artifact ${artifactName}.`,
);
}
const content = await file.async('string');
return {
artifact: artifactName,
path: file.name,
content,
};
}
export async function downloadPipelineArtifact(
connection: WebApi,
options: DownloadPipelineArtifactOptions,
): Promise<PipelineArtifactContent> {
try {
const projectId = options.projectId ?? defaultProject;
const runId = options.runId;
const { artifactName, relativePath } = normalizeArtifactPath(
options.artifactPath,
);
const buildApi = await connection.getBuildApi();
let artifacts: BuildArtifact[];
try {
artifacts = await buildApi.getArtifacts(projectId, runId);
} catch (error) {
throw new AzureDevOpsResourceNotFoundError(
`Pipeline run ${runId} not found in project ${projectId}: ${String(error)}`,
);
}
const artifact = artifacts.find((item) => item.name === artifactName);
if (!artifact) {
throw new AzureDevOpsResourceNotFoundError(
`Artifact ${artifactName} not found for run ${runId} in project ${projectId}.`,
);
}
const containerResult = await downloadFromContainer(
connection,
projectId,
artifactName,
artifact,
relativePath,
);
if (containerResult) {
return containerResult;
}
return await downloadFromPipelineArtifact(
connection,
projectId,
runId,
artifactName,
artifact,
relativePath,
options.pipelineId,
);
} catch (error) {
if (error instanceof AzureDevOpsError) {
throw error;
}
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (
message.includes('authentication') ||
message.includes('unauthorized') ||
message.includes('401')
) {
throw new AzureDevOpsAuthenticationError(
`Failed to authenticate: ${error.message}`,
);
}
if (
message.includes('not found') ||
message.includes('does not exist') ||
message.includes('404')
) {
throw new AzureDevOpsResourceNotFoundError(
`Pipeline artifact or project not found: ${error.message}`,
);
}
}
throw new AzureDevOpsError(
`Failed to download pipeline artifact: ${
error instanceof Error ? error.message : String(error)
}`,
);
}
}
```
--------------------------------------------------------------------------------
/src/features/pull-requests/index.spec.unit.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import { CallToolRequest } from '@modelcontextprotocol/sdk/types.js';
import { isPullRequestsRequest, handlePullRequestsRequest } from './index';
import { createPullRequest } from './create-pull-request';
import { listPullRequests } from './list-pull-requests';
import { getPullRequestComments } from './get-pull-request-comments';
import { addPullRequestComment } from './add-pull-request-comment';
import { AddPullRequestCommentSchema } from './schemas';
import { getPullRequestChanges } from './get-pull-request-changes';
import { getPullRequestChecks } from './get-pull-request-checks';
// Mock the imported modules
jest.mock('./create-pull-request', () => ({
createPullRequest: jest.fn(),
}));
jest.mock('./list-pull-requests', () => ({
listPullRequests: jest.fn(),
}));
jest.mock('./get-pull-request-comments', () => ({
getPullRequestComments: jest.fn(),
}));
jest.mock('./add-pull-request-comment', () => ({
addPullRequestComment: jest.fn(),
}));
jest.mock('./get-pull-request-changes', () => ({
getPullRequestChanges: jest.fn(),
}));
jest.mock('./get-pull-request-checks', () => ({
getPullRequestChecks: jest.fn(),
}));
describe('Pull Requests Request Handlers', () => {
const mockConnection = {} as WebApi;
describe('isPullRequestsRequest', () => {
it('should return true for pull requests tools', () => {
const validTools = [
'create_pull_request',
'list_pull_requests',
'get_pull_request_comments',
'add_pull_request_comment',
'get_pull_request_changes',
'get_pull_request_checks',
];
validTools.forEach((tool) => {
const request = {
params: { name: tool, arguments: {} },
method: 'tools/call',
} as CallToolRequest;
expect(isPullRequestsRequest(request)).toBe(true);
});
});
it('should return false for non-pull requests tools', () => {
const request = {
params: { name: 'list_projects', arguments: {} },
method: 'tools/call',
} as CallToolRequest;
expect(isPullRequestsRequest(request)).toBe(false);
});
});
describe('handlePullRequestsRequest', () => {
it('should handle create_pull_request request', async () => {
const mockPullRequest = { id: 1, title: 'Test PR' };
(createPullRequest as jest.Mock).mockResolvedValue(mockPullRequest);
const request = {
params: {
name: 'create_pull_request',
arguments: {
repositoryId: 'test-repo',
title: 'Test PR',
sourceRefName: 'refs/heads/feature',
targetRefName: 'refs/heads/main',
tags: ['Tag-One'],
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handlePullRequestsRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockPullRequest,
);
expect(createPullRequest).toHaveBeenCalledWith(
mockConnection,
expect.any(String),
'test-repo',
expect.objectContaining({
title: 'Test PR',
sourceRefName: 'refs/heads/feature',
targetRefName: 'refs/heads/main',
tags: ['Tag-One'],
}),
);
});
it('should handle list_pull_requests request', async () => {
const mockPullRequests = {
count: 2,
value: [
{ id: 1, title: 'PR 1' },
{ id: 2, title: 'PR 2' },
],
hasMoreResults: false,
};
(listPullRequests as jest.Mock).mockResolvedValue(mockPullRequests);
const request = {
params: {
name: 'list_pull_requests',
arguments: {
repositoryId: 'test-repo',
status: 'active',
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handlePullRequestsRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockPullRequests,
);
expect(listPullRequests).toHaveBeenCalledWith(
mockConnection,
expect.any(String),
'test-repo',
expect.objectContaining({
status: 'active',
pullRequestId: undefined,
}),
);
});
it('should pass pullRequestId to list_pull_requests request', async () => {
const mockPullRequests = {
count: 1,
value: [{ id: 42, title: 'PR 42' }],
hasMoreResults: false,
};
(listPullRequests as jest.Mock).mockResolvedValue(mockPullRequests);
const request = {
params: {
name: 'list_pull_requests',
arguments: {
repositoryId: 'test-repo',
pullRequestId: 42,
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handlePullRequestsRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockPullRequests,
);
expect(listPullRequests).toHaveBeenCalledWith(
mockConnection,
expect.any(String),
'test-repo',
expect.objectContaining({
pullRequestId: 42,
}),
);
});
it('should handle get_pull_request_comments request', async () => {
const mockComments = {
threads: [
{
id: 1,
comments: [{ id: 1, content: 'Comment 1' }],
},
],
};
(getPullRequestComments as jest.Mock).mockResolvedValue(mockComments);
const request = {
params: {
name: 'get_pull_request_comments',
arguments: {
repositoryId: 'test-repo',
pullRequestId: 123,
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handlePullRequestsRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockComments,
);
expect(getPullRequestComments).toHaveBeenCalledWith(
mockConnection,
expect.any(String),
'test-repo',
123,
expect.objectContaining({
pullRequestId: 123,
}),
);
});
it('should handle add_pull_request_comment request', async () => {
const mockResult = {
comment: { id: 1, content: 'New comment' },
thread: { id: 1 },
};
(addPullRequestComment as jest.Mock).mockResolvedValue(mockResult);
const request = {
params: {
name: 'add_pull_request_comment',
arguments: {
repositoryId: 'test-repo',
pullRequestId: 123,
content: 'New comment',
status: 'active', // Status is required when creating a new thread
},
},
method: 'tools/call',
} as CallToolRequest;
// Mock the schema parsing
const mockParsedArgs = {
repositoryId: 'test-repo',
pullRequestId: 123,
content: 'New comment',
status: 'active',
};
// Use a different approach for mocking
const originalParse = AddPullRequestCommentSchema.parse;
AddPullRequestCommentSchema.parse = jest
.fn()
.mockReturnValue(mockParsedArgs);
const response = await handlePullRequestsRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockResult,
);
expect(addPullRequestComment).toHaveBeenCalledWith(
mockConnection,
expect.any(String),
'test-repo',
123,
expect.objectContaining({
content: 'New comment',
}),
);
// Restore the original parse function
AddPullRequestCommentSchema.parse = originalParse;
});
it('should handle get_pull_request_changes request', async () => {
const mockResult = { changes: { changeEntries: [] }, evaluations: [] };
(getPullRequestChanges as jest.Mock).mockResolvedValue(mockResult);
const request = {
params: {
name: 'get_pull_request_changes',
arguments: { repositoryId: 'test-repo', pullRequestId: 1 },
},
method: 'tools/call',
} as CallToolRequest;
const response = await handlePullRequestsRequest(mockConnection, request);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockResult,
);
expect(getPullRequestChanges).toHaveBeenCalled();
});
it('should handle get_pull_request_checks request', async () => {
const mockResult = { statuses: [], policyEvaluations: [] };
(getPullRequestChecks as jest.Mock).mockResolvedValue(mockResult);
const request = {
params: {
name: 'get_pull_request_checks',
arguments: { repositoryId: 'test-repo', pullRequestId: 7 },
},
method: 'tools/call',
} as CallToolRequest;
const response = await handlePullRequestsRequest(mockConnection, request);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockResult,
);
expect(getPullRequestChecks).toHaveBeenCalledWith(
mockConnection,
expect.objectContaining({
repositoryId: 'test-repo',
pullRequestId: 7,
}),
);
});
it('should throw error for unknown tool', async () => {
const request = {
params: {
name: 'unknown_tool',
arguments: {},
},
method: 'tools/call',
} as CallToolRequest;
await expect(
handlePullRequestsRequest(mockConnection, request),
).rejects.toThrow('Unknown pull requests tool');
});
it('should propagate errors from pull request functions', async () => {
const mockError = new Error('Test error');
(listPullRequests as jest.Mock).mockRejectedValue(mockError);
const request = {
params: {
name: 'list_pull_requests',
arguments: {
repositoryId: 'test-repo',
},
},
method: 'tools/call',
} as CallToolRequest;
await expect(
handlePullRequestsRequest(mockConnection, request),
).rejects.toThrow(mockError);
});
});
});
```
--------------------------------------------------------------------------------
/src/features/pipelines/artifacts.ts:
--------------------------------------------------------------------------------
```typescript
import axios from 'axios';
import JSZip from 'jszip';
import { WebApi } from 'azure-devops-node-api';
import {
BuildArtifact,
ArtifactResource,
} from 'azure-devops-node-api/interfaces/BuildInterfaces';
import {
ContainerItemType,
FileContainerItem,
} from 'azure-devops-node-api/interfaces/FileContainerInterfaces';
import { GetArtifactExpandOptions } from 'azure-devops-node-api/interfaces/PipelinesInterfaces';
import { PipelineArtifactItem, PipelineRunArtifact } from './types';
interface ArtifactContainerInfo {
containerId?: number;
rootPath?: string;
}
const MAX_ITEMS_PER_ARTIFACT = 200;
function extractContainerInfo(
resource?: ArtifactResource,
): ArtifactContainerInfo {
const data = resource?.data;
if (typeof data !== 'string' || data.length === 0) {
return {};
}
const segments = data.split('/').filter((segment) => segment.length > 0);
if (segments.length < 2) {
return {};
}
const containerId = Number.parseInt(segments[1] ?? '', 10);
if (Number.isNaN(containerId)) {
return {};
}
const rootPath = segments.slice(2).join('/');
return {
containerId,
rootPath: rootPath.length > 0 ? rootPath : undefined,
};
}
function mapBuildArtifact(artifact: BuildArtifact): PipelineRunArtifact {
const resource = artifact.resource;
const { containerId, rootPath } = extractContainerInfo(resource);
return {
name: artifact.name ?? 'unknown',
type: resource?.type,
source: artifact.source,
downloadUrl: resource?.downloadUrl,
resourceUrl: resource?.url,
containerId,
rootPath,
};
}
function normalizePathSegment(segment: string): string {
return segment.replace(/^[\\/]+|[\\/]+$/g, '');
}
function normalizeFullPath(path: string): string {
return path.replace(/\\+/g, '/').replace(/^\/+/, '');
}
function makeRelativePath(path: string, prefixes: string[]): string {
const normalized = normalizeFullPath(path);
const filteredPrefixes = prefixes
.map((prefix) => normalizePathSegment(prefix))
.filter((prefix) => prefix.length > 0)
.sort((a, b) => b.length - a.length);
for (const prefix of filteredPrefixes) {
if (normalized === prefix) {
return '';
}
if (normalized.startsWith(`${prefix}/`)) {
return normalized.slice(prefix.length + 1);
}
}
return normalized;
}
function mapContainerItems(
items: FileContainerItem[],
artifact: PipelineRunArtifact,
): { items: PipelineArtifactItem[]; truncated: boolean } {
const basePrefixes = [artifact.rootPath, artifact.name].filter(
(value): value is string => typeof value === 'string' && value.length > 0,
);
const uniquePaths = new Set<string>();
const mapped: PipelineArtifactItem[] = [];
let truncated = false;
for (const item of items) {
const relative = makeRelativePath(item.path, basePrefixes);
if (relative.length === 0) {
continue;
}
if (uniquePaths.has(relative)) {
continue;
}
uniquePaths.add(relative);
mapped.push({
path: relative,
itemType: item.itemType === ContainerItemType.Folder ? 'folder' : 'file',
size: item.fileLength,
});
if (mapped.length >= MAX_ITEMS_PER_ARTIFACT) {
truncated = true;
break;
}
}
mapped.sort((a, b) => a.path.localeCompare(b.path));
return {
items: mapped,
truncated,
};
}
async function listContainerItems(
connection: WebApi,
projectId: string,
artifact: PipelineRunArtifact,
): Promise<{ items?: PipelineArtifactItem[]; truncated?: boolean }> {
if (typeof artifact.containerId !== 'number') {
return {};
}
const fileContainerApi =
typeof connection.getFileContainerApi === 'function'
? await connection.getFileContainerApi()
: null;
if (!fileContainerApi || typeof fileContainerApi.getItems !== 'function') {
return {};
}
const scopeCandidates = [projectId, undefined].filter(
(scope, index, array) => array.indexOf(scope) === index,
);
const itemPathCandidates = [
artifact.rootPath,
artifact.name,
undefined,
].filter((value, index, arr) => arr.indexOf(value) === index);
for (const scope of scopeCandidates) {
for (const itemPath of itemPathCandidates) {
try {
const items = await fileContainerApi.getItems(
artifact.containerId,
scope,
typeof itemPath === 'string' && itemPath.length > 0
? itemPath
: undefined,
);
if (!Array.isArray(items) || items.length === 0) {
continue;
}
const { items: mapped, truncated } = mapContainerItems(items, artifact);
if (mapped.length === 0) {
continue;
}
return {
items: mapped,
truncated,
};
} catch {
// Swallow and try next combination.
}
}
}
return {};
}
async function listPipelineArtifactItems(
artifact: PipelineRunArtifact,
): Promise<{ items?: PipelineArtifactItem[]; truncated?: boolean }> {
const downloadUrl =
artifact.signedContentUrl || artifact.downloadUrl || artifact.resourceUrl;
if (!downloadUrl) {
return {};
}
try {
const response = await axios.get<ArrayBuffer>(downloadUrl, {
responseType: 'arraybuffer',
});
const zip = await JSZip.loadAsync(response.data);
const basePrefixes = [artifact.name, artifact.rootPath].filter(
(value): value is string => typeof value === 'string' && value.length > 0,
);
const items: PipelineArtifactItem[] = [];
const directories = new Set<string>();
let hitLimit = false;
zip.forEach((entryPath, entry) => {
if (hitLimit) {
return;
}
const relative = makeRelativePath(entryPath, basePrefixes);
if (relative.length === 0) {
return;
}
if (entry.dir) {
const folderPath = relative.replace(/\/+$/, '');
if (folderPath.length > 0) {
directories.add(folderPath);
}
return;
}
// Ensure parent folders are recorded even when the archive omits explicit entries
const segments = relative.split('/');
if (segments.length > 1) {
for (let i = 1; i < segments.length; i += 1) {
const folder = segments.slice(0, i).join('/');
directories.add(folder);
}
}
items.push({
path: relative,
itemType: 'file',
});
if (items.length >= MAX_ITEMS_PER_ARTIFACT) {
hitLimit = true;
}
});
const folderItems: PipelineArtifactItem[] = Array.from(directories)
.filter((folder) => folder.length > 0)
.map((folder) => ({ path: folder, itemType: 'folder' }));
const combined = [...folderItems, ...items]
.filter((entry, index, array) => {
const duplicateIndex = array.findIndex(
(candidate) => candidate.path === entry.path,
);
return duplicateIndex === index;
})
.sort((a, b) => a.path.localeCompare(b.path));
const truncated = hitLimit || combined.length > MAX_ITEMS_PER_ARTIFACT;
return {
items: truncated ? combined.slice(0, MAX_ITEMS_PER_ARTIFACT) : combined,
truncated,
};
} catch {
return {};
}
}
export async function fetchRunArtifacts(
connection: WebApi,
projectId: string,
runId: number,
pipelineId?: number,
): Promise<PipelineRunArtifact[]> {
try {
const buildApi = await connection.getBuildApi();
if (!buildApi || typeof buildApi.getArtifacts !== 'function') {
return [];
}
const artifacts = await buildApi.getArtifacts(projectId, runId);
if (!artifacts || artifacts.length === 0) {
return [];
}
const summaries = artifacts.map(mapBuildArtifact);
if (typeof pipelineId === 'number') {
const pipelinesApi = await connection.getPipelinesApi();
await Promise.all(
summaries.map(async (summary) => {
try {
const artifactDetails = await pipelinesApi.getArtifact(
projectId,
pipelineId,
runId,
summary.name,
GetArtifactExpandOptions.SignedContent,
);
const signedContentUrl = artifactDetails?.signedContent?.url;
if (signedContentUrl) {
summary.signedContentUrl = signedContentUrl;
}
} catch {
// Ignore failures fetching signed content; best-effort enrichment.
}
}),
);
}
const enriched = await Promise.all(
summaries.map(async (artifact) => {
const collectors: Array<
Promise<{ items?: PipelineArtifactItem[]; truncated?: boolean }>
> = [];
const artifactType = artifact.type?.toLowerCase();
if (
artifactType === 'container' ||
typeof artifact.containerId === 'number'
) {
collectors.push(listContainerItems(connection, projectId, artifact));
}
if (artifactType?.includes('pipelineartifact')) {
collectors.push(listPipelineArtifactItems(artifact));
}
if (collectors.length === 0) {
return artifact;
}
let aggregatedItems: PipelineArtifactItem[] | undefined;
let truncated = false;
for (const collector of collectors) {
try {
const result = await collector;
if (!result.items || result.items.length === 0) {
continue;
}
aggregatedItems = aggregatedItems
? [...aggregatedItems, ...result.items]
: result.items;
truncated = truncated || Boolean(result.truncated);
} catch {
// Continue to next collector
}
}
if (!aggregatedItems || aggregatedItems.length === 0) {
return artifact;
}
const uniqueItems = Array.from(
new Map(aggregatedItems.map((item) => [item.path, item])).values(),
).sort((a, b) => a.path.localeCompare(b.path));
return {
...artifact,
items:
uniqueItems.length > MAX_ITEMS_PER_ARTIFACT
? uniqueItems.slice(0, MAX_ITEMS_PER_ARTIFACT)
: uniqueItems,
itemsTruncated:
truncated ||
uniqueItems.length > MAX_ITEMS_PER_ARTIFACT ||
undefined,
};
}),
);
return enriched;
} catch {
return [];
}
}
export function getArtifactContainerInfo(
artifact: PipelineRunArtifact,
): ArtifactContainerInfo {
return {
containerId: artifact.containerId,
rootPath: artifact.rootPath,
};
}
export function parseArtifactContainer(
resource?: ArtifactResource,
): ArtifactContainerInfo {
return extractContainerInfo(resource);
}
```
--------------------------------------------------------------------------------
/.github/skills/skill-creator/scripts/init_skill.py:
--------------------------------------------------------------------------------
```python
#!/usr/bin/env python3
"""
Skill Initializer - Creates a new skill from template
Usage:
init_skill.py <skill-name> --path <path>
Examples:
init_skill.py my-new-skill --path skills/public
init_skill.py my-api-helper --path skills/private
init_skill.py custom-skill --path /custom/location
"""
import sys
from pathlib import Path
SKILL_TEMPLATE = """---
name: {skill_name}
description: [TODO: Complete and informative explanation of what the skill does and when to use it. Include WHEN to use this skill - specific scenarios, file types, or tasks that trigger it.]
---
# {skill_title}
## Overview
[TODO: 1-2 sentences explaining what this skill enables]
## Structuring This Skill
[TODO: Choose the structure that best fits this skill's purpose. Common patterns:
**1. Workflow-Based** (best for sequential processes)
- Works well when there are clear step-by-step procedures
- Example: DOCX skill with "Workflow Decision Tree" → "Reading" → "Creating" → "Editing"
- Structure: ## Overview → ## Workflow Decision Tree → ## Step 1 → ## Step 2...
**2. Task-Based** (best for tool collections)
- Works well when the skill offers different operations/capabilities
- Example: PDF skill with "Quick Start" → "Merge PDFs" → "Split PDFs" → "Extract Text"
- Structure: ## Overview → ## Quick Start → ## Task Category 1 → ## Task Category 2...
**3. Reference/Guidelines** (best for standards or specifications)
- Works well for brand guidelines, coding standards, or requirements
- Example: Brand styling with "Brand Guidelines" → "Colors" → "Typography" → "Features"
- Structure: ## Overview → ## Guidelines → ## Specifications → ## Usage...
**4. Capabilities-Based** (best for integrated systems)
- Works well when the skill provides multiple interrelated features
- Example: Product Management with "Core Capabilities" → numbered capability list
- Structure: ## Overview → ## Core Capabilities → ### 1. Feature → ### 2. Feature...
Patterns can be mixed and matched as needed. Most skills combine patterns (e.g., start with task-based, add workflow for complex operations).
Delete this entire "Structuring This Skill" section when done - it's just guidance.]
## [TODO: Replace with the first main section based on chosen structure]
[TODO: Add content here. See examples in existing skills:
- Code samples for technical skills
- Decision trees for complex workflows
- Concrete examples with realistic user requests
- References to scripts/templates/references as needed]
## Resources
This skill includes example resource directories that demonstrate how to organize different types of bundled resources:
### scripts/
Executable code (Python/Bash/etc.) that can be run directly to perform specific operations.
**Examples from other skills:**
- PDF skill: `fill_fillable_fields.py`, `extract_form_field_info.py` - utilities for PDF manipulation
- DOCX skill: `document.py`, `utilities.py` - Python modules for document processing
**Appropriate for:** Python scripts, shell scripts, or any executable code that performs automation, data processing, or specific operations.
**Note:** Scripts may be executed without loading into context, but can still be read by Claude for patching or environment adjustments.
### references/
Documentation and reference material intended to be loaded into context to inform Claude's process and thinking.
**Examples from other skills:**
- Product management: `communication.md`, `context_building.md` - detailed workflow guides
- BigQuery: API reference documentation and query examples
- Finance: Schema documentation, company policies
**Appropriate for:** In-depth documentation, API references, database schemas, comprehensive guides, or any detailed information that Claude should reference while working.
### assets/
Files not intended to be loaded into context, but rather used within the output Claude produces.
**Examples from other skills:**
- Brand styling: PowerPoint template files (.pptx), logo files
- Frontend builder: HTML/React boilerplate project directories
- Typography: Font files (.ttf, .woff2)
**Appropriate for:** Templates, boilerplate code, document templates, images, icons, fonts, or any files meant to be copied or used in the final output.
---
**Any unneeded directories can be deleted.** Not every skill requires all three types of resources.
"""
EXAMPLE_SCRIPT = '''#!/usr/bin/env python3
"""
Example helper script for {skill_name}
This is a placeholder script that can be executed directly.
Replace with actual implementation or delete if not needed.
Example real scripts from other skills:
- pdf/scripts/fill_fillable_fields.py - Fills PDF form fields
- pdf/scripts/convert_pdf_to_images.py - Converts PDF pages to images
"""
def main():
print("This is an example script for {skill_name}")
# TODO: Add actual script logic here
# This could be data processing, file conversion, API calls, etc.
if __name__ == "__main__":
main()
'''
EXAMPLE_REFERENCE = """# Reference Documentation for {skill_title}
This is a placeholder for detailed reference documentation.
Replace with actual reference content or delete if not needed.
Example real reference docs from other skills:
- product-management/references/communication.md - Comprehensive guide for status updates
- product-management/references/context_building.md - Deep-dive on gathering context
- bigquery/references/ - API references and query examples
## When Reference Docs Are Useful
Reference docs are ideal for:
- Comprehensive API documentation
- Detailed workflow guides
- Complex multi-step processes
- Information too lengthy for main SKILL.md
- Content that's only needed for specific use cases
## Structure Suggestions
### API Reference Example
- Overview
- Authentication
- Endpoints with examples
- Error codes
- Rate limits
### Workflow Guide Example
- Prerequisites
- Step-by-step instructions
- Common patterns
- Troubleshooting
- Best practices
"""
EXAMPLE_ASSET = """# Example Asset File
This placeholder represents where asset files would be stored.
Replace with actual asset files (templates, images, fonts, etc.) or delete if not needed.
Asset files are NOT intended to be loaded into context, but rather used within
the output Claude produces.
Example asset files from other skills:
- Brand guidelines: logo.png, slides_template.pptx
- Frontend builder: hello-world/ directory with HTML/React boilerplate
- Typography: custom-font.ttf, font-family.woff2
- Data: sample_data.csv, test_dataset.json
## Common Asset Types
- Templates: .pptx, .docx, boilerplate directories
- Images: .png, .jpg, .svg, .gif
- Fonts: .ttf, .otf, .woff, .woff2
- Boilerplate code: Project directories, starter files
- Icons: .ico, .svg
- Data files: .csv, .json, .xml, .yaml
Note: This is a text placeholder. Actual assets can be any file type.
"""
def title_case_skill_name(skill_name):
"""Convert hyphenated skill name to Title Case for display."""
return ' '.join(word.capitalize() for word in skill_name.split('-'))
def init_skill(skill_name, path):
"""
Initialize a new skill directory with template SKILL.md.
Args:
skill_name: Name of the skill
path: Path where the skill directory should be created
Returns:
Path to created skill directory, or None if error
"""
# Determine skill directory path
skill_dir = Path(path).resolve() / skill_name
# Check if directory already exists
if skill_dir.exists():
print(f"❌ Error: Skill directory already exists: {skill_dir}")
return None
# Create skill directory
try:
skill_dir.mkdir(parents=True, exist_ok=False)
print(f"✅ Created skill directory: {skill_dir}")
except Exception as e:
print(f"❌ Error creating directory: {e}")
return None
# Create SKILL.md from template
skill_title = title_case_skill_name(skill_name)
skill_content = SKILL_TEMPLATE.format(
skill_name=skill_name,
skill_title=skill_title
)
skill_md_path = skill_dir / 'SKILL.md'
try:
skill_md_path.write_text(skill_content)
print("✅ Created SKILL.md")
except Exception as e:
print(f"❌ Error creating SKILL.md: {e}")
return None
# Create resource directories with example files
try:
# Create scripts/ directory with example script
scripts_dir = skill_dir / 'scripts'
scripts_dir.mkdir(exist_ok=True)
example_script = scripts_dir / 'example.py'
example_script.write_text(EXAMPLE_SCRIPT.format(skill_name=skill_name))
example_script.chmod(0o755)
print("✅ Created scripts/example.py")
# Create references/ directory with example reference doc
references_dir = skill_dir / 'references'
references_dir.mkdir(exist_ok=True)
example_reference = references_dir / 'api_reference.md'
example_reference.write_text(EXAMPLE_REFERENCE.format(skill_title=skill_title))
print("✅ Created references/api_reference.md")
# Create assets/ directory with example asset placeholder
assets_dir = skill_dir / 'assets'
assets_dir.mkdir(exist_ok=True)
example_asset = assets_dir / 'example_asset.txt'
example_asset.write_text(EXAMPLE_ASSET)
print("✅ Created assets/example_asset.txt")
except Exception as e:
print(f"❌ Error creating resource directories: {e}")
return None
# Print next steps
print(f"\n✅ Skill '{skill_name}' initialized successfully at {skill_dir}")
print("\nNext steps:")
print("1. Edit SKILL.md to complete the TODO items and update the description")
print("2. Customize or delete the example files in scripts/, references/, and assets/")
print("3. Run the validator when ready to check the skill structure")
return skill_dir
def main():
if len(sys.argv) < 4 or sys.argv[2] != '--path':
print("Usage: init_skill.py <skill-name> --path <path>")
print("\nSkill name requirements:")
print(" - Hyphen-case identifier (e.g., 'data-analyzer')")
print(" - Lowercase letters, digits, and hyphens only")
print(" - Max 40 characters")
print(" - Must match directory name exactly")
print("\nExamples:")
print(" init_skill.py my-new-skill --path skills/public")
print(" init_skill.py my-api-helper --path skills/private")
print(" init_skill.py custom-skill --path /custom/location")
sys.exit(1)
skill_name = sys.argv[1]
path = sys.argv[3]
print(f"🚀 Initializing skill: {skill_name}")
print(f" Location: {path}")
print()
result = init_skill(skill_name, path)
if result:
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
```
--------------------------------------------------------------------------------
/src/features/search/search-work-items/feature.spec.unit.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import axios from 'axios';
import { searchWorkItems } from './feature';
import {
AzureDevOpsError,
AzureDevOpsResourceNotFoundError,
AzureDevOpsValidationError,
AzureDevOpsPermissionError,
} from '../../../shared/errors';
import { SearchWorkItemsOptions, WorkItemSearchResponse } from '../types';
// Mock axios
jest.mock('axios');
const mockedAxios = axios as jest.Mocked<typeof axios>;
// Mock @azure/identity
jest.mock('@azure/identity', () => ({
DefaultAzureCredential: jest.fn().mockImplementation(() => ({
getToken: jest
.fn()
.mockResolvedValue({ token: 'mock-azure-identity-token' }),
})),
AzureCliCredential: jest.fn(),
}));
// Mock WebApi
jest.mock('azure-devops-node-api');
const MockedWebApi = WebApi as jest.MockedClass<typeof WebApi>;
describe('searchWorkItems', () => {
let connection: WebApi;
let options: SearchWorkItemsOptions;
let mockResponse: WorkItemSearchResponse;
beforeEach(() => {
// Reset mocks
jest.clearAllMocks();
// Mock environment variables
process.env.AZURE_DEVOPS_AUTH_METHOD = 'pat';
process.env.AZURE_DEVOPS_PAT = 'mock-pat';
// Set up connection mock
// Create a mock auth handler that implements IRequestHandler
const mockAuthHandler = {
prepareRequest: jest.fn(),
canHandleAuthentication: jest.fn().mockReturnValue(true),
handleAuthentication: jest.fn(),
};
connection = new MockedWebApi(
'https://dev.azure.com/mock-org',
mockAuthHandler,
);
(connection as any).serverUrl = 'https://dev.azure.com/mock-org';
(connection.getCoreApi as jest.Mock).mockResolvedValue({
getProjects: jest.fn().mockResolvedValue([]),
});
// Set up options
options = {
searchText: 'test query',
projectId: 'mock-project',
top: 50,
skip: 0,
includeFacets: true,
};
// Set up mock response
mockResponse = {
count: 2,
results: [
{
project: {
id: 'project-id-1',
name: 'mock-project',
},
fields: {
'system.id': '42',
'system.workitemtype': 'Bug',
'system.title': 'Test Bug',
'system.state': 'Active',
'system.assignedto': 'Test User',
},
hits: [
{
fieldReferenceName: 'system.title',
highlights: ['Test <b>Bug</b>'],
},
],
url: 'https://dev.azure.com/mock-org/mock-project/_workitems/edit/42',
},
{
project: {
id: 'project-id-1',
name: 'mock-project',
},
fields: {
'system.id': '43',
'system.workitemtype': 'Task',
'system.title': 'Test Task',
'system.state': 'New',
'system.assignedto': 'Test User',
},
hits: [
{
fieldReferenceName: 'system.title',
highlights: ['Test <b>Task</b>'],
},
],
url: 'https://dev.azure.com/mock-org/mock-project/_workitems/edit/43',
},
],
facets: {
'System.WorkItemType': [
{
name: 'Bug',
id: 'Bug',
resultCount: 1,
},
{
name: 'Task',
id: 'Task',
resultCount: 1,
},
],
},
};
// Mock axios response
mockedAxios.post.mockResolvedValue({ data: mockResponse });
});
afterEach(() => {
// Clean up environment variables
delete process.env.AZURE_DEVOPS_AUTH_METHOD;
delete process.env.AZURE_DEVOPS_PAT;
});
it('should search work items with the correct parameters', async () => {
// Act
const result = await searchWorkItems(connection, options);
// Assert
expect(mockedAxios.post).toHaveBeenCalledWith(
'https://almsearch.dev.azure.com/mock-org/mock-project/_apis/search/workitemsearchresults?api-version=7.1',
{
searchText: 'test query',
$skip: 0,
$top: 50,
filters: {
'System.TeamProject': ['mock-project'],
},
includeFacets: true,
},
expect.objectContaining({
headers: expect.objectContaining({
Authorization: expect.stringContaining('Basic'),
'Content-Type': 'application/json',
}),
}),
);
expect(result).toEqual(mockResponse);
});
it('should include filters when provided', async () => {
// Arrange
options.filters = {
'System.WorkItemType': ['Bug', 'Task'],
'System.State': ['Active'],
};
// Act
await searchWorkItems(connection, options);
// Assert
expect(mockedAxios.post).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
filters: {
'System.TeamProject': ['mock-project'],
'System.WorkItemType': ['Bug', 'Task'],
'System.State': ['Active'],
},
}),
expect.any(Object),
);
});
it('should include orderBy when provided', async () => {
// Arrange
options.orderBy = [{ field: 'System.CreatedDate', sortOrder: 'ASC' }];
// Act
await searchWorkItems(connection, options);
// Assert
expect(mockedAxios.post).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
$orderBy: [{ field: 'System.CreatedDate', sortOrder: 'ASC' }],
}),
expect.any(Object),
);
});
it('should handle 404 errors correctly', async () => {
// Arrange - Mock the implementation to throw the specific error
mockedAxios.post.mockImplementation(() => {
throw new AzureDevOpsResourceNotFoundError(
'Resource not found: Project not found',
);
});
// Act & Assert
await expect(searchWorkItems(connection, options)).rejects.toThrow(
AzureDevOpsResourceNotFoundError,
);
});
it('should handle 400 errors correctly', async () => {
// Arrange - Mock the implementation to throw the specific error
mockedAxios.post.mockImplementation(() => {
throw new AzureDevOpsValidationError('Invalid request: Invalid query');
});
// Act & Assert
await expect(searchWorkItems(connection, options)).rejects.toThrow(
AzureDevOpsValidationError,
);
});
it('should handle 401/403 errors correctly', async () => {
// Arrange - Mock the implementation to throw the specific error
mockedAxios.post.mockImplementation(() => {
throw new AzureDevOpsPermissionError(
'Permission denied: Permission denied',
);
});
// Act & Assert
await expect(searchWorkItems(connection, options)).rejects.toThrow(
AzureDevOpsPermissionError,
);
});
it('should handle other axios errors correctly', async () => {
// Arrange - Mock the implementation to throw the specific error
mockedAxios.post.mockImplementation(() => {
throw new AzureDevOpsError(
'Azure DevOps API error: Internal server error',
);
});
// Act & Assert
await expect(searchWorkItems(connection, options)).rejects.toThrow(
AzureDevOpsError,
);
});
it('should handle non-axios errors correctly', async () => {
// Arrange
mockedAxios.post.mockRejectedValue(new Error('Network error'));
// Act & Assert
await expect(searchWorkItems(connection, options)).rejects.toThrow(
AzureDevOpsError,
);
});
it('should throw an error if organization cannot be extracted', async () => {
// Arrange
(connection as any).serverUrl = 'https://invalid-url';
// Act & Assert
await expect(searchWorkItems(connection, options)).rejects.toThrow(
AzureDevOpsValidationError,
);
});
it('should use Azure Identity authentication when AZURE_DEVOPS_AUTH_METHOD is azure-identity', async () => {
// Mock environment variables
const originalEnv = process.env.AZURE_DEVOPS_AUTH_METHOD;
process.env.AZURE_DEVOPS_AUTH_METHOD = 'azure-identity';
// Mock the WebApi connection
const mockConnection = {
serverUrl: 'https://dev.azure.com/testorg',
getCoreApi: jest.fn().mockResolvedValue({
getProjects: jest.fn().mockResolvedValue([]),
}),
};
// Mock axios post
const mockResponse = {
data: {
count: 0,
results: [],
},
};
(axios.post as jest.Mock).mockResolvedValueOnce(mockResponse);
// Call the function
await searchWorkItems(mockConnection as unknown as WebApi, {
projectId: 'testproject',
searchText: 'test query',
});
// Verify the axios post was called with a Bearer token
expect(axios.post).toHaveBeenCalledWith(
expect.any(String),
expect.any(Object),
{
headers: {
Authorization: 'Bearer mock-azure-identity-token',
'Content-Type': 'application/json',
},
},
);
// Cleanup
process.env.AZURE_DEVOPS_AUTH_METHOD = originalEnv;
});
test('should perform organization-wide work item search when projectId is not provided', async () => {
// Arrange
const mockSearchResponse = {
data: {
count: 2,
results: [
{
id: 1,
fields: {
'System.Title': 'Test Bug 1',
'System.State': 'Active',
'System.WorkItemType': 'Bug',
'System.TeamProject': 'Project1',
},
project: {
name: 'Project1',
id: 'project-id-1',
},
},
{
id: 2,
fields: {
'System.Title': 'Test Bug 2',
'System.State': 'Active',
'System.WorkItemType': 'Bug',
'System.TeamProject': 'Project2',
},
project: {
name: 'Project2',
id: 'project-id-2',
},
},
],
},
};
mockedAxios.post.mockResolvedValueOnce(mockSearchResponse);
// Act
const result = await searchWorkItems(connection, {
searchText: 'bug',
});
// Assert
expect(result).toBeDefined();
expect(result.count).toBe(2);
expect(result.results).toHaveLength(2);
expect(result.results[0].fields['System.TeamProject']).toBe('Project1');
expect(result.results[1].fields['System.TeamProject']).toBe('Project2');
expect(mockedAxios.post).toHaveBeenCalledTimes(1);
expect(mockedAxios.post).toHaveBeenCalledWith(
expect.stringContaining(
'https://almsearch.dev.azure.com/mock-org/_apis/search/workitemsearchresults',
),
expect.not.objectContaining({
filters: expect.objectContaining({
'System.TeamProject': expect.anything(),
}),
}),
expect.any(Object),
);
});
});
```
--------------------------------------------------------------------------------
/src/features/search/search-code/feature.spec.int.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import { searchCode } from './feature';
import {
getTestConnection,
shouldSkipIntegrationTest,
} from '@/shared/test/test-helpers';
import { SearchCodeOptions } from '../types';
describe('searchCode integration', () => {
let connection: WebApi | null = null;
let projectName: string;
beforeAll(async () => {
// Get a real connection using environment variables
connection = await getTestConnection();
projectName = process.env.AZURE_DEVOPS_DEFAULT_PROJECT || 'DefaultProject';
});
test('should search code in a project', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test: No Azure DevOps connection available');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
const options: SearchCodeOptions = {
searchText: 'function',
projectId: projectName,
top: 10,
};
try {
// Act - make an actual API call to Azure DevOps
const result = await searchCode(connection, options);
// Assert on the actual response
expect(result).toBeDefined();
expect(typeof result.count).toBe('number');
expect(Array.isArray(result.results)).toBe(true);
// Check structure of returned items (if any)
if (result.results.length > 0) {
const firstResult = result.results[0];
expect(firstResult.fileName).toBeDefined();
expect(firstResult.path).toBeDefined();
expect(firstResult.project).toBeDefined();
expect(firstResult.repository).toBeDefined();
if (firstResult.project) {
expect(firstResult.project.name).toBe(projectName);
}
}
} catch (error) {
// Skip test if the code search extension is not installed
if (
error instanceof Error &&
(error.message.includes('ms.vss-code-search is not installed') ||
error.message.includes('Resource not found') ||
error.message.includes('Failed to search code'))
) {
console.log(
'Skipping test: Code Search extension is not installed or not available in this Azure DevOps organization',
);
return;
}
throw error;
}
});
test('should include file content when requested', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test: No Azure DevOps connection available');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
const options: SearchCodeOptions = {
searchText: 'function',
projectId: projectName,
top: 5,
includeContent: true,
};
try {
// Act - make an actual API call to Azure DevOps
const result = await searchCode(connection, options);
// Assert on the actual response
expect(result).toBeDefined();
// Check if content is included (if any results)
if (result.results.length > 0) {
// At least some results should have content
// Note: Some files might fail to fetch content, so we don't expect all to have it
const hasContent = result.results.some((r) => r.content !== undefined);
expect(hasContent).toBe(true);
}
} catch (error) {
// Skip test if the code search extension is not installed
if (
error instanceof Error &&
(error.message.includes('ms.vss-code-search is not installed') ||
error.message.includes('Resource not found') ||
error.message.includes('Failed to search code'))
) {
console.log(
'Skipping test: Code Search extension is not installed or not available in this Azure DevOps organization',
);
return;
}
throw error;
}
});
test('should filter results when filters are provided', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test: No Azure DevOps connection available');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
try {
// First get some results to find a repository name
const initialOptions: SearchCodeOptions = {
searchText: 'function',
projectId: projectName,
top: 1,
};
const initialResult = await searchCode(connection, initialOptions);
// Skip if no results found
if (initialResult.results.length === 0) {
console.log('Skipping filter test: No initial results found');
return;
}
// Use the repository from the first result for filtering
const repoName = initialResult.results[0].repository.name;
const filteredOptions: SearchCodeOptions = {
searchText: 'function',
projectId: projectName,
filters: {
Repository: [repoName],
},
top: 5,
};
// Act - make an actual API call to Azure DevOps with filters
const result = await searchCode(connection, filteredOptions);
// Assert on the actual response
expect(result).toBeDefined();
// All results should be from the specified repository
if (result.results.length > 0) {
const allFromRepo = result.results.every(
(r) => r.repository.name === repoName,
);
expect(allFromRepo).toBe(true);
}
} catch (error) {
// Skip test if the code search extension is not installed
if (
error instanceof Error &&
(error.message.includes('ms.vss-code-search is not installed') ||
error.message.includes('Resource not found') ||
error.message.includes('Failed to search code'))
) {
console.log(
'Skipping test: Code Search extension is not installed or not available in this Azure DevOps organization',
);
return;
}
throw error;
}
});
test('should handle pagination', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test: No Azure DevOps connection available');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
try {
// Get first page
const firstPageOptions: SearchCodeOptions = {
searchText: 'function',
projectId: projectName,
top: 2,
skip: 0,
};
const firstPageResult = await searchCode(connection, firstPageOptions);
// Skip if not enough results for pagination test
if (firstPageResult.count <= 2) {
console.log('Skipping pagination test: Not enough results');
return;
}
// Get second page
const secondPageOptions: SearchCodeOptions = {
searchText: 'function',
projectId: projectName,
top: 2,
skip: 2,
};
const secondPageResult = await searchCode(connection, secondPageOptions);
// Assert on pagination
expect(secondPageResult).toBeDefined();
expect(secondPageResult.results.length).toBeGreaterThan(0);
// First and second page should have different results
if (
firstPageResult.results.length > 0 &&
secondPageResult.results.length > 0
) {
const firstPagePaths = firstPageResult.results.map((r) => r.path);
const secondPagePaths = secondPageResult.results.map((r) => r.path);
// Check if there's any overlap between pages
const hasOverlap = firstPagePaths.some((path) =>
secondPagePaths.includes(path),
);
expect(hasOverlap).toBe(false);
}
} catch (error) {
// Skip test if the code search extension is not installed
if (
error instanceof Error &&
(error.message.includes('ms.vss-code-search is not installed') ||
error.message.includes('Resource not found') ||
error.message.includes('Failed to search code'))
) {
console.log(
'Skipping test: Code Search extension is not installed or not available in this Azure DevOps organization',
);
return;
}
throw error;
}
});
test('should use default project when no projectId is provided', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test: No Azure DevOps connection available');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// Store original environment variable
const originalEnv = process.env.AZURE_DEVOPS_DEFAULT_PROJECT;
try {
// Set the default project to the current project name for testing
process.env.AZURE_DEVOPS_DEFAULT_PROJECT = projectName;
// Search without specifying a project ID
const options: SearchCodeOptions = {
searchText: 'function',
top: 5,
};
// Act - make an actual API call to Azure DevOps
const result = await searchCode(connection, options);
// Assert on the actual response
expect(result).toBeDefined();
expect(typeof result.count).toBe('number');
expect(Array.isArray(result.results)).toBe(true);
// Check structure of returned items (if any)
if (result.results.length > 0) {
const firstResult = result.results[0];
expect(firstResult.fileName).toBeDefined();
expect(firstResult.path).toBeDefined();
expect(firstResult.project).toBeDefined();
expect(firstResult.repository).toBeDefined();
if (firstResult.project) {
expect(firstResult.project.name).toBe(projectName);
}
}
} catch (error) {
// Skip test if the code search extension is not installed
if (
error instanceof Error &&
(error.message.includes('ms.vss-code-search is not installed') ||
error.message.includes('Resource not found') ||
error.message.includes('Failed to search code'))
) {
console.log(
'Skipping test: Code Search extension is not installed or not available in this Azure DevOps organization',
);
return;
}
throw error;
} finally {
// Restore original environment variable
process.env.AZURE_DEVOPS_DEFAULT_PROJECT = originalEnv;
}
});
});
```
--------------------------------------------------------------------------------
/src/features/repositories/schemas.ts:
--------------------------------------------------------------------------------
```typescript
import { z } from 'zod';
import { defaultProject, defaultOrg } from '../../utils/environment';
/**
* Schema for getting a repository
*/
export const GetRepositorySchema = z.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
repositoryId: z.string().describe('The ID or name of the repository'),
});
/**
* Schema for getting detailed repository information
*/
export const GetRepositoryDetailsSchema = z.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
repositoryId: z.string().describe('The ID or name of the repository'),
includeStatistics: z
.boolean()
.optional()
.default(false)
.describe('Whether to include branch statistics'),
includeRefs: z
.boolean()
.optional()
.default(false)
.describe('Whether to include repository refs'),
refFilter: z
.string()
.optional()
.describe('Optional filter for refs (e.g., "heads/" or "tags/")'),
branchName: z
.string()
.optional()
.describe(
'Name of specific branch to get statistics for (if includeStatistics is true)',
),
});
/**
* Schema for listing repositories
*/
export const ListRepositoriesSchema = z.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
includeLinks: z
.boolean()
.optional()
.describe('Whether to include reference links'),
});
/**
* Schema for getting file content
*/
export const GetFileContentSchema = z.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
repositoryId: z.string().describe('The ID or name of the repository'),
path: z
.string()
.optional()
.default('/')
.describe('Path to the file or folder'),
version: z
.string()
.optional()
.describe('The version (branch, tag, or commit) to get content from'),
versionType: z
.enum(['branch', 'commit', 'tag'])
.optional()
.describe('Type of version specified (branch, commit, or tag)'),
});
/**
* Schema for getting all repositories tree structure
*/
export const GetAllRepositoriesTreeSchema = z.object({
organizationId: z
.string()
.optional()
.describe(
`The ID or name of the Azure DevOps organization (Default: ${defaultOrg})`,
),
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
repositoryPattern: z
.string()
.optional()
.describe(
'Repository name pattern (wildcard characters allowed) to filter which repositories are included',
),
depth: z
.number()
.int()
.min(0)
.max(10)
.optional()
.default(0)
.describe(
'Maximum depth to traverse within each repository (0 = unlimited)',
),
pattern: z
.string()
.optional()
.describe(
'File pattern (wildcard characters allowed) to filter files by within each repository',
),
});
/**
* Schema for getting a tree for a single repository
*/
export const GetRepositoryTreeSchema = z.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
repositoryId: z.string().describe('The ID or name of the repository'),
path: z
.string()
.optional()
.default('/')
.describe('Path within the repository to start from'),
depth: z
.number()
.int()
.min(0)
.max(10)
.optional()
.default(0)
.describe('Maximum depth to traverse (0 = unlimited)'),
});
/**
* Schema for creating a new branch
*/
export const CreateBranchSchema = z
.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
repositoryId: z.string().describe('The ID or name of the repository'),
sourceBranch: z
.string()
.describe(
'Name of the branch to copy from (without "refs/heads/", e.g., "master")',
),
newBranch: z
.string()
.describe(
'Name of the new branch to create (without "refs/heads/", e.g., "feature/my-branch")',
),
})
.describe(
'Create a new branch from an existing branch.\n' +
'- Pass plain branch names (no "refs/heads/"). Example: sourceBranch="master", newBranch="codex/test1".\n' +
'- When creating pull requests later, use fully-qualified refs (e.g., "refs/heads/codex/test1").',
);
/**
* Schema for creating a commit with multiple file changes
*/
export const CreateCommitSchema = z
.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
repositoryId: z.string().describe('The ID or name of the repository'),
branchName: z
.string()
.describe(
'The branch to commit to (without "refs/heads/", e.g., "codex/test2-delete-main-py")',
),
commitMessage: z.string().describe('Commit message'),
changes: z
.array(
z
.object({
path: z
.string()
.optional()
.describe(
'File path. Optional for patch format (uses diff header), REQUIRED for search/replace format',
),
patch: z
.string()
.optional()
.describe(
[
'Unified git diff for a single file.',
'MUST include `diff --git`, `--- a/...`, `+++ b/...`, and complete hunk headers.',
'CRITICAL: Every hunk header must have line numbers in format: @@ -oldStart,oldLines +newStart,newLines @@',
'Do NOT use @@ without the line range numbers - this will cause parsing failures.',
'Include 3-5 context lines before and after changes for proper patch application.',
'Use `/dev/null` with `---` for new files, or with `+++` for deleted files.',
'',
'Example modify patch:',
'```diff',
'diff --git a/charts/bcs-mcp-server/templates/service-api.yaml b/charts/bcs-mcp-server/templates/service-api.yaml',
'--- a/charts/bcs-mcp-server/templates/service-api.yaml',
'+++ b/charts/bcs-mcp-server/templates/service-api.yaml',
'@@ -4,7 +4,7 @@ spec:',
' spec:',
' type: {{ .Values.service.type }}',
' ports:',
'- - port: 8080',
'+ - port: 9090',
' targetPort: deployment-port',
' protocol: TCP',
' name: http',
'```',
].join('\n'),
),
search: z
.string()
.optional()
.describe(
[
'Alternative to patch: Exact text to search for in the file.',
'Must be used with "replace" and "path" fields.',
'The server will fetch the file, perform the replacement, and generate the patch automatically.',
'This is MUCH EASIER than creating unified diffs manually - no line counting needed!',
'',
'Example:',
'"search": "return axios.post(apiUrl, payload, requestConfig);"',
'"replace": "return axios.post(apiUrl, payload, requestConfig).then(r => { /* process */ return r; });"',
].join('\n'),
),
replace: z
.string()
.optional()
.describe(
'Alternative to patch: Exact text to replace the "search" string with. Must be used together with "search" and "path".',
),
})
.refine(
(data) => {
const hasPatch = !!data.patch;
const hasSearchReplace = !!data.search && !!data.replace;
return hasPatch || hasSearchReplace;
},
{
message:
'Either "patch" or both "search" and "replace" must be provided',
},
),
)
.describe(
'List of file changes as either unified git diffs OR search/replace pairs',
),
})
.describe(
[
'Create a commit on an existing branch using file changes.',
'- Provide plain branch names (no "refs/heads/").',
'',
'**RECOMMENDED: Use search/replace format (easier, no line counting needed!)**',
'',
'Option 1 - Search/Replace (Easiest):',
'```json',
'{',
' "changes": [{',
' "path": "src/file.ts",',
' "search": "old code here",',
' "replace": "new code here"',
' }]',
'}',
'```',
'',
'Option 2 - Unified Diff (Advanced):',
'- Requires complete hunk headers: @@ -oldStart,oldLines +newStart,newLines @@',
'- Include 3-5 context lines before/after changes',
'- For deletions: --- a/file, +++ /dev/null',
'- For additions: --- /dev/null, +++ b/file',
].join('\n'),
);
/**
* Schema for listing commits on a branch
*/
export const ListCommitsSchema = z.object({
projectId: z
.string()
.optional()
.describe(`The ID or name of the project (Default: ${defaultProject})`),
organizationId: z
.string()
.optional()
.describe(`The ID or name of the organization (Default: ${defaultOrg})`),
repositoryId: z.string().describe('The ID or name of the repository'),
branchName: z.string().describe('Branch name to list commits from'),
top: z
.number()
.int()
.min(1)
.max(100)
.optional()
.describe('Maximum number of commits to return (Default: 10)'),
skip: z
.number()
.int()
.min(0)
.optional()
.describe('Number of commits to skip from the newest'),
});
```
--------------------------------------------------------------------------------
/src/features/search/types.ts:
--------------------------------------------------------------------------------
```typescript
/**
* Options for searching code in Azure DevOps repositories
*/
export interface SearchCodeOptions {
searchText: string;
projectId?: string;
filters?: {
Repository?: string[];
Path?: string[];
Branch?: string[];
CodeElement?: string[];
};
top?: number;
skip?: number;
includeSnippet?: boolean;
includeContent?: boolean;
}
/**
* Request body for the Azure DevOps Search API
*/
export interface CodeSearchRequest {
searchText: string;
$skip?: number;
$top?: number;
filters?: {
Project?: string[];
Repository?: string[];
Path?: string[];
Branch?: string[];
CodeElement?: string[];
};
includeFacets?: boolean;
includeSnippet?: boolean;
}
/**
* Match information for search results
*/
export interface CodeSearchMatch {
charOffset: number;
length: number;
}
/**
* Collection information for search results
*/
export interface CodeSearchCollection {
name: string;
}
/**
* Project information for search results
*/
export interface CodeSearchProject {
name: string;
id: string;
}
/**
* Repository information for search results
*/
export interface CodeSearchRepository {
name: string;
id: string;
type: string;
}
/**
* Version information for search results
*/
export interface CodeSearchVersion {
branchName: string;
changeId: string;
}
/**
* Individual code search result
*/
export interface CodeSearchResult {
fileName: string;
path: string;
content?: string; // Added to store full file content
matches: {
content?: CodeSearchMatch[];
fileName?: CodeSearchMatch[];
};
collection: CodeSearchCollection;
project: CodeSearchProject;
repository: CodeSearchRepository;
versions: CodeSearchVersion[];
contentId: string;
}
/**
* Facet information for search results
*/
export interface CodeSearchFacet {
name: string;
id: string;
resultCount: number;
}
/**
* Response from the Azure DevOps Search API
*/
export interface CodeSearchResponse {
count: number;
results: CodeSearchResult[];
infoCode?: number;
facets?: {
Project?: CodeSearchFacet[];
Repository?: CodeSearchFacet[];
Path?: CodeSearchFacet[];
Branch?: CodeSearchFacet[];
CodeElement?: CodeSearchFacet[];
};
}
/**
* Options for searching wiki pages in Azure DevOps projects
*/
export interface SearchWikiOptions {
/**
* The text to search for within wiki pages
*/
searchText: string;
/**
* The ID or name of the project to search in
* If not provided, search will be performed across the entire organization
*/
projectId?: string;
/**
* Optional filters to narrow search results
*/
filters?: {
/**
* Filter by project names. Useful for cross-project searches.
*/
Project?: string[];
};
/**
* Number of results to return
* @default 100
* @minimum 1
* @maximum 1000
*/
top?: number;
/**
* Number of results to skip for pagination
* @default 0
* @minimum 0
*/
skip?: number;
/**
* Whether to include faceting in results
* @default true
*/
includeFacets?: boolean;
}
/**
* Request body for the Azure DevOps Wiki Search API
*/
export interface WikiSearchRequest {
/**
* The search text to find in wiki pages
*/
searchText: string;
/**
* Number of results to skip for pagination
*/
$skip?: number;
/**
* Number of results to return
*/
$top?: number;
/**
* Filters to be applied. Set to null if no filters are needed.
*/
filters?: {
/**
* Filter by project names
*/
Project?: string[];
};
/**
* Options for sorting search results
* If null, results are sorted by relevance
*/
$orderBy?: SortOption[];
/**
* Whether to include faceting in the result
* @default false
*/
includeFacets?: boolean;
}
/**
* Sort option for search results
*/
export interface SortOption {
/**
* Field to sort by
*/
field: string;
/**
* Sort direction
*/
sortOrder: 'asc' | 'desc' | 'ASC' | 'DESC';
}
/**
* Defines the matched terms in the field of the wiki result
*/
export interface WikiHit {
/**
* Reference name of the highlighted field
*/
fieldReferenceName: string;
/**
* Matched/highlighted snippets of the field
*/
highlights: string[];
}
/**
* Defines the wiki result that matched a wiki search request
*/
export interface WikiResult {
/**
* Name of the result file
*/
fileName: string;
/**
* Path at which result file is present
*/
path: string;
/**
* Collection of the result file
*/
collection: {
/**
* Name of the collection
*/
name: string;
};
/**
* Project details of the wiki document
*/
project: {
/**
* ID of the project
*/
id: string;
/**
* Name of the project
*/
name: string;
/**
* Visibility of the project
*/
visibility?: string;
};
/**
* Wiki information for the result
*/
wiki: {
/**
* ID of the wiki
*/
id: string;
/**
* Mapped path for the wiki
*/
mappedPath: string;
/**
* Name of the wiki
*/
name: string;
/**
* Version for wiki
*/
version: string;
};
/**
* Content ID of the result file
*/
contentId: string;
/**
* Highlighted snippets of fields that match the search request
* The list is sorted by relevance of the snippets
*/
hits: WikiHit[];
}
/**
* Defines a wiki search response item
*/
export interface WikiSearchResponse {
/**
* Total number of matched wiki documents
*/
count: number;
/**
* List of top matched wiki documents
*/
results: WikiResult[];
/**
* Numeric code indicating additional information:
* 0 - Ok
* 1 - Account is being reindexed
* 2 - Account indexing has not started
* 3 - Invalid Request
* ... and others as defined in the API
*/
infoCode?: number;
/**
* A dictionary storing an array of Filter objects against each facet
*/
facets?: {
/**
* Project facets for filtering
*/
Project?: CodeSearchFacet[];
};
}
/**
* Options for searching work items in Azure DevOps projects
*/
export interface SearchWorkItemsOptions {
/**
* The text to search for within work items
*/
searchText: string;
/**
* The ID or name of the project to search in
* If not provided, search will be performed across the entire organization
*/
projectId?: string;
/**
* Optional filters to narrow search results
*/
filters?: {
/**
* Filter by project names. Useful for cross-project searches.
*/
'System.TeamProject'?: string[];
/**
* Filter by work item types (Bug, Task, User Story, etc.)
*/
'System.WorkItemType'?: string[];
/**
* Filter by work item states (New, Active, Closed, etc.)
*/
'System.State'?: string[];
/**
* Filter by assigned users
*/
'System.AssignedTo'?: string[];
/**
* Filter by area paths
*/
'System.AreaPath'?: string[];
};
/**
* Number of results to return
* @default 100
* @minimum 1
* @maximum 1000
*/
top?: number;
/**
* Number of results to skip for pagination
* @default 0
* @minimum 0
*/
skip?: number;
/**
* Whether to include faceting in results
* @default true
*/
includeFacets?: boolean;
/**
* Options for sorting search results
* If null, results are sorted by relevance
*/
orderBy?: SortOption[];
}
/**
* Request body for the Azure DevOps Work Item Search API
*/
export interface WorkItemSearchRequest {
/**
* The search text to find in work items
*/
searchText: string;
/**
* Number of results to skip for pagination
*/
$skip?: number;
/**
* Number of results to return
*/
$top?: number;
/**
* Filters to be applied. Set to null if no filters are needed.
*/
filters?: {
'System.TeamProject'?: string[];
'System.WorkItemType'?: string[];
'System.State'?: string[];
'System.AssignedTo'?: string[];
'System.AreaPath'?: string[];
};
/**
* Options for sorting search results
* If null, results are sorted by relevance
*/
$orderBy?: SortOption[];
/**
* Whether to include faceting in the result
* @default false
*/
includeFacets?: boolean;
}
/**
* Defines the matched terms in the field of the work item result
*/
export interface WorkItemHit {
/**
* Reference name of the highlighted field
*/
fieldReferenceName: string;
/**
* Matched/highlighted snippets of the field
*/
highlights: string[];
}
/**
* Defines the work item result that matched a work item search request
*/
export interface WorkItemResult {
/**
* Project details of the work item
*/
project: {
/**
* ID of the project
*/
id: string;
/**
* Name of the project
*/
name: string;
};
/**
* A standard set of work item fields and their values
*/
fields: {
/**
* ID of the work item
*/
'system.id': string;
/**
* Type of the work item (Bug, Task, User Story, etc.)
*/
'system.workitemtype': string;
/**
* Title of the work item
*/
'system.title': string;
/**
* User assigned to the work item
*/
'system.assignedto'?: string;
/**
* Current state of the work item
*/
'system.state'?: string;
/**
* Tags associated with the work item
*/
'system.tags'?: string;
/**
* Revision number of the work item
*/
'system.rev'?: string;
/**
* Creation date of the work item
*/
'system.createddate'?: string;
/**
* Last modified date of the work item
*/
'system.changeddate'?: string;
/**
* Other fields may be included based on the work item type
*/
[key: string]: string | number | boolean | null | undefined;
};
/**
* Highlighted snippets of fields that match the search request
* The list is sorted by relevance of the snippets
*/
hits: WorkItemHit[];
/**
* URL to the work item
*/
url: string;
}
/**
* Defines a work item search response item
*/
export interface WorkItemSearchResponse {
/**
* Total number of matched work items
*/
count: number;
/**
* List of top matched work items
*/
results: WorkItemResult[];
/**
* Numeric code indicating additional information:
* 0 - Ok
* 1 - Account is being reindexed
* 2 - Account indexing has not started
* 3 - Invalid Request
* ... and others as defined in the API
*/
infoCode?: number;
/**
* A dictionary storing an array of Filter objects against each facet
*/
facets?: {
'System.TeamProject'?: CodeSearchFacet[];
'System.WorkItemType'?: CodeSearchFacet[];
'System.State'?: CodeSearchFacet[];
'System.AssignedTo'?: CodeSearchFacet[];
'System.AreaPath'?: CodeSearchFacet[];
};
}
```
--------------------------------------------------------------------------------
/setup_env.sh:
--------------------------------------------------------------------------------
```bash
#!/bin/bash
# Global variable to track if an error has occurred
ERROR_OCCURRED=0
# Function to handle errors without exiting the shell when sourced
handle_error() {
local message=$1
local reset_colors="\033[0m"
echo -e "\033[0;31m$message$reset_colors"
# Set the error flag
ERROR_OCCURRED=1
# If script is being sourced (. or source)
if [[ "${BASH_SOURCE[0]}" != "${0}" ]] || [[ -n "$ZSH_VERSION" && "$ZSH_EVAL_CONTEXT" == *:file:* ]]; then
echo "Script terminated with error. Returning to shell."
# Reset colors to ensure shell isn't affected
echo -e "$reset_colors"
# The return will be caught by the caller
return 1
else
# If script is being executed directly
exit 1
fi
}
# Function to check if we should continue after potential error points
should_continue() {
if [ $ERROR_OCCURRED -eq 1 ]; then
# Reset colors to ensure shell isn't affected
echo -e "\033[0m"
return 1
fi
return 0
}
# Ensure script is running with a compatible shell
if [ -z "$BASH_VERSION" ] && [ -z "$ZSH_VERSION" ]; then
handle_error "This script requires bash or zsh to run. Please run it with: bash $(basename "$0") or zsh $(basename "$0")"
return 1 2>/dev/null || exit 1
fi
# Set shell options for compatibility
if [ -n "$ZSH_VERSION" ]; then
# ZSH specific settings
setopt SH_WORD_SPLIT
setopt KSH_ARRAYS
fi
# Colors for better output - ensure they're properly reset after use
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
RED='\033[0;31m'
NC='\033[0m' # No Color
echo -e "${GREEN}Azure DevOps MCP Server - Environment Setup${NC}"
echo "This script will help you set up your .env file with Azure DevOps credentials."
echo
# Clean up any existing create_pat.json file
if [ -f "create_pat.json" ]; then
echo -e "${YELLOW}Cleaning up existing create_pat.json file...${NC}"
rm -f create_pat.json
fi
# Check if Azure CLI is installed
if ! command -v az &> /dev/null; then
handle_error "Error: Azure CLI is not installed.\nPlease install Azure CLI first: https://docs.microsoft.com/en-us/cli/azure/install-azure-cli"
return 1 2>/dev/null || exit 1
fi
should_continue || return 1 2>/dev/null || exit 1
# Check if Azure DevOps extension is installed
echo -e "${YELLOW}Checking for Azure DevOps extension...${NC}"
az devops &> /dev/null
if [ $? -ne 0 ]; then
echo "Azure DevOps extension not found. Installing..."
az extension add --name azure-devops
if [ $? -ne 0 ]; then
handle_error "Failed to install Azure DevOps extension."
return 1 2>/dev/null || exit 1
else
echo -e "${GREEN}Azure DevOps extension installed successfully.${NC}"
fi
else
echo "Azure DevOps extension is already installed."
fi
should_continue || return 1 2>/dev/null || exit 1
# Check if jq is installed
if ! command -v jq &> /dev/null; then
handle_error "Error: jq is not installed.\nPlease install jq first. On Ubuntu/Debian: sudo apt-get install jq\nOn macOS: brew install jq"
return 1 2>/dev/null || exit 1
fi
should_continue || return 1 2>/dev/null || exit 1
# Check if already logged in
echo -e "\n${YELLOW}Step 1: Checking Azure CLI authentication...${NC}"
if ! az account show &> /dev/null; then
echo "Not logged in. Initiating login..."
az login --allow-no-subscriptions
if [ $? -ne 0 ]; then
handle_error "Failed to login to Azure CLI."
return 1 2>/dev/null || exit 1
fi
else
echo -e "${GREEN}Already logged in to Azure CLI.${NC}"
fi
should_continue || return 1 2>/dev/null || exit 1
# Get Azure DevOps Organizations using REST API
echo -e "\n${YELLOW}Step 2: Fetching your Azure DevOps organizations...${NC}"
echo "This may take a moment..."
# First get the user profile
echo "Getting user profile..."
profile_response=$(az rest --method get --uri "https://app.vssps.visualstudio.com/_apis/profile/profiles/me?api-version=6.0" --resource "499b84ac-1321-427f-aa17-267ca6975798" 2>&1)
profile_status=$?
if [ $profile_status -ne 0 ]; then
echo -e "${RED}Error: Failed to get user profile${NC}"
echo -e "${RED}Status code: $profile_status${NC}"
echo -e "${RED}Error response:${NC}"
echo "$profile_response"
echo
echo "Manually provide your organization name instead."
read -p "Enter your Azure DevOps organization name: " org_name
else
echo "Profile API response:"
echo "$profile_response"
echo
public_alias=$(echo "$profile_response" | jq -r '.publicAlias')
if [ "$public_alias" = "null" ] || [ -z "$public_alias" ]; then
echo -e "${RED}Failed to extract publicAlias from response.${NC}"
echo "Full response was:"
echo "$profile_response"
echo
echo "Manually provide your organization name instead."
read -p "Enter your Azure DevOps organization name: " org_name
else
# Get organizations using the publicAlias
echo "Fetching organizations..."
orgs_result=$(az rest --method get --uri "https://app.vssps.visualstudio.com/_apis/accounts?memberId=$public_alias&api-version=6.0" --resource "499b84ac-1321-427f-aa17-267ca6975798")
# Extract organization names from the response using jq
orgs=$(echo "$orgs_result" | jq -r '.value[].accountName')
if [ -z "$orgs" ]; then
echo -e "${RED}No organizations found.${NC}"
echo "Manually provide your organization name instead."
read -p "Enter your Azure DevOps organization name: " org_name
else
# Display organizations for selection
echo -e "\nYour Azure DevOps organizations:"
i=1
OLDIFS=$IFS
IFS=$'\n'
# Create array in a shell-agnostic way
orgs_array=()
while IFS= read -r line; do
[ -n "$line" ] && orgs_array+=("$line")
done <<< "$orgs"
IFS=$OLDIFS
# Check if array is empty
if [ ${#orgs_array[@]} -eq 0 ]; then
echo -e "${RED}Failed to parse organizations list.${NC}"
echo "Manually provide your organization name instead."
read -p "Enter your Azure DevOps organization name: " org_name
else
# Display organizations with explicit indexing
for ((idx=0; idx<${#orgs_array[@]}; idx++)); do
echo "$((idx+1)) ${orgs_array[$idx]}"
done
# Prompt for selection
read -p "Select an organization (1-${#orgs_array[@]}): " org_selection
if [[ "$org_selection" =~ ^[0-9]+$ ]] && [ "$org_selection" -ge 1 ] && [ "$org_selection" -le "${#orgs_array[@]}" ]; then
org_name=${orgs_array[$((org_selection-1))]}
else
handle_error "Invalid selection. Please run the script again."
return 1 2>/dev/null || exit 1
fi
fi
fi
fi
fi
should_continue || return 1 2>/dev/null || exit 1
org_url="https://dev.azure.com/$org_name"
echo -e "${GREEN}Using organization URL: $org_url${NC}"
# Get Default Project (Optional)
echo -e "\n${YELLOW}Step 3: Would you like to set a default project? (y/n)${NC}"
read -p "Select option: " set_default_project
default_project=""
if [[ "$set_default_project" = "y" || "$set_default_project" = "Y" ]]; then
# Configure az devops to use the selected organization
az devops configure --defaults organization=$org_url
# List projects
echo "Fetching projects from $org_name..."
projects=$(az devops project list --query "value[].name" -o tsv)
if [ $? -ne 0 ] || [ -z "$projects" ]; then
echo -e "${YELLOW}No projects found or unable to list projects.${NC}"
read -p "Enter a default project name (leave blank to skip): " default_project
else
# Display projects for selection
echo -e "\nAvailable projects in $org_name:"
OLDIFS=$IFS
IFS=$'\n'
# Create array in a shell-agnostic way
projects_array=()
while IFS= read -r line; do
[ -n "$line" ] && projects_array+=("$line")
done <<< "$projects"
IFS=$OLDIFS
# Check if array is empty
if [ ${#projects_array[@]} -eq 0 ]; then
echo -e "${YELLOW}Failed to parse projects list.${NC}"
read -p "Enter a default project name (leave blank to skip): " default_project
else
# Display projects with explicit indexing
for ((idx=0; idx<${#projects_array[@]}; idx++)); do
echo "$((idx+1)) ${projects_array[$idx]}"
done
echo "$((${#projects_array[@]}+1)) Skip setting a default project"
# Prompt for selection
read -p "Select a default project (1-$((${#projects_array[@]}+1))): " project_selection
if [[ "$project_selection" =~ ^[0-9]+$ ]] && [ "$project_selection" -ge 1 ] && [ "$project_selection" -lt "$((${#projects_array[@]}+1))" ]; then
default_project=${projects_array[$((project_selection-1))]}
echo -e "${GREEN}Using default project: $default_project${NC}"
else
echo "No default project selected."
fi
fi
fi
fi
# Create .env file
echo -e "\n${YELLOW}Step 5: Creating .env file...${NC}"
cat > .env << EOF
# Azure DevOps MCP Server - Environment Variables
# Azure DevOps Organization Name (selected from your available organizations)
AZURE_DEVOPS_ORG=$org_name
# Azure DevOps Organization URL (required)
AZURE_DEVOPS_ORG_URL=$org_url
AZURE_DEVOPS_AUTH_METHOD=azure-identity
EOF
# Add default project if specified
if [ ! -z "$default_project" ]; then
cat >> .env << EOF
# Default Project to use when not specified
AZURE_DEVOPS_DEFAULT_PROJECT=$default_project
EOF
else
cat >> .env << EOF
# Default Project to use when not specified (optional)
# AZURE_DEVOPS_DEFAULT_PROJECT=your-default-project
EOF
fi
# Add remaining configuration
cat >> .env << EOF
# API Version to use (optional, defaults to latest)
# AZURE_DEVOPS_API_VERSION=6.0
# Server Configuration
PORT=3000
HOST=localhost
# Logging Level (debug, info, warn, error)
LOG_LEVEL=info
EOF
echo -e "\n${GREEN}Environment setup completed successfully!${NC}"
echo "Your .env file has been created with the following configuration:"
echo "- Organization: $org_name"
echo "- Organization URL: $org_url"
if [ ! -z "$default_project" ]; then
echo "- Default Project: $default_project"
fi
echo "- PAT: Created with expanded scopes for full integration"
echo
echo "You can now run your Azure DevOps MCP Server with:"
echo " npm run dev"
echo
echo "You can also run integration tests with:"
echo " npm run test:integration"
# At the end of the script, ensure colors are reset
echo -e "${NC}"
```
--------------------------------------------------------------------------------
/src/features/repositories/index.spec.unit.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import { CallToolRequest } from '@modelcontextprotocol/sdk/types.js';
import { isRepositoriesRequest, handleRepositoriesRequest } from './index';
import { getRepository } from './get-repository';
import { getRepositoryDetails } from './get-repository-details';
import { listRepositories } from './list-repositories';
import { getFileContent } from './get-file-content';
import {
getAllRepositoriesTree,
formatRepositoryTree,
} from './get-all-repositories-tree';
import { getRepositoryTree } from './get-repository-tree';
import { createBranch } from './create-branch';
import { createCommit } from './create-commit';
import { listCommits } from './list-commits';
import { GitVersionType } from 'azure-devops-node-api/interfaces/GitInterfaces';
// Mock the imported modules
jest.mock('./get-repository', () => ({
getRepository: jest.fn(),
}));
jest.mock('./get-repository-details', () => ({
getRepositoryDetails: jest.fn(),
}));
jest.mock('./list-repositories', () => ({
listRepositories: jest.fn(),
}));
jest.mock('./get-file-content', () => ({
getFileContent: jest.fn(),
}));
jest.mock('./get-all-repositories-tree', () => ({
getAllRepositoriesTree: jest.fn(),
formatRepositoryTree: jest.fn(),
}));
jest.mock('./get-repository-tree', () => ({
getRepositoryTree: jest.fn(),
}));
jest.mock('./create-branch', () => ({
createBranch: jest.fn(),
}));
jest.mock('./create-commit', () => ({
createCommit: jest.fn(),
}));
jest.mock('./list-commits', () => ({
listCommits: jest.fn(),
}));
describe('Repositories Request Handlers', () => {
const mockConnection = {} as WebApi;
describe('isRepositoriesRequest', () => {
it('should return true for repositories requests', () => {
const validTools = [
'get_repository',
'get_repository_details',
'list_repositories',
'get_file_content',
'get_all_repositories_tree',
'get_repository_tree',
'create_branch',
'create_commit',
'list_commits',
];
validTools.forEach((tool) => {
const request = {
params: { name: tool, arguments: {} },
method: 'tools/call',
} as CallToolRequest;
expect(isRepositoriesRequest(request)).toBe(true);
});
});
it('should return false for non-repositories requests', () => {
const request = {
params: { name: 'list_projects', arguments: {} },
method: 'tools/call',
} as CallToolRequest;
expect(isRepositoriesRequest(request)).toBe(false);
});
});
describe('handleRepositoriesRequest', () => {
it('should handle get_repository request', async () => {
const mockRepository = { id: 'repo1', name: 'Repository 1' };
(getRepository as jest.Mock).mockResolvedValue(mockRepository);
const request = {
params: {
name: 'get_repository',
arguments: {
repositoryId: 'repo1',
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockRepository,
);
expect(getRepository).toHaveBeenCalledWith(
mockConnection,
expect.any(String),
'repo1',
);
});
it('should handle get_repository_details request', async () => {
const mockRepositoryDetails = {
repository: { id: 'repo1', name: 'Repository 1' },
statistics: { branches: [] },
refs: { value: [], count: 0 },
};
(getRepositoryDetails as jest.Mock).mockResolvedValue(
mockRepositoryDetails,
);
const request = {
params: {
name: 'get_repository_details',
arguments: {
repositoryId: 'repo1',
includeStatistics: true,
includeRefs: true,
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockRepositoryDetails,
);
expect(getRepositoryDetails).toHaveBeenCalledWith(
mockConnection,
expect.objectContaining({
repositoryId: 'repo1',
includeStatistics: true,
includeRefs: true,
}),
);
});
it('should handle list_repositories request', async () => {
const mockRepositories = [
{ id: 'repo1', name: 'Repository 1' },
{ id: 'repo2', name: 'Repository 2' },
];
(listRepositories as jest.Mock).mockResolvedValue(mockRepositories);
const request = {
params: {
name: 'list_repositories',
arguments: {
projectId: 'project1',
includeLinks: true,
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockRepositories,
);
expect(listRepositories).toHaveBeenCalledWith(
mockConnection,
expect.objectContaining({
projectId: 'project1',
includeLinks: true,
}),
);
});
it('should handle get_file_content request', async () => {
const mockFileContent = { content: 'file content', isFolder: false };
(getFileContent as jest.Mock).mockResolvedValue(mockFileContent);
const request = {
params: {
name: 'get_file_content',
arguments: {
repositoryId: 'repo1',
path: '/path/to/file',
version: 'main',
versionType: 'branch',
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockFileContent,
);
expect(getFileContent).toHaveBeenCalledWith(
mockConnection,
expect.any(String),
'repo1',
'/path/to/file',
{ versionType: GitVersionType.Branch, version: 'main' },
);
});
it('should handle get_all_repositories_tree request', async () => {
const mockTreeResponse = {
repositories: [
{
name: 'repo1',
tree: [
{ name: 'file1', path: '/file1', isFolder: false, level: 0 },
],
stats: { directories: 0, files: 1 },
},
],
};
(getAllRepositoriesTree as jest.Mock).mockResolvedValue(mockTreeResponse);
(formatRepositoryTree as jest.Mock).mockReturnValue('repo1\n file1\n');
const request = {
params: {
name: 'get_all_repositories_tree',
arguments: {
projectId: 'project1',
depth: 2,
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(response.content).toHaveLength(1);
expect(response.content[0].text as string).toContain('repo1');
expect(getAllRepositoriesTree).toHaveBeenCalledWith(
mockConnection,
expect.objectContaining({
projectId: 'project1',
depth: 2,
}),
);
expect(formatRepositoryTree).toHaveBeenCalledWith(
'repo1',
expect.any(Array),
expect.any(Object),
undefined,
);
});
it('should handle get_repository_tree request', async () => {
const mockResponse = {
name: 'repo',
tree: [],
stats: { directories: 0, files: 0 },
};
(getRepositoryTree as jest.Mock).mockResolvedValue(mockResponse);
const request = {
params: {
name: 'get_repository_tree',
arguments: { repositoryId: 'r' },
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(JSON.parse(response.content[0].text as string)).toEqual(
mockResponse,
);
expect(getRepositoryTree).toHaveBeenCalled();
});
it('should handle create_branch request', async () => {
const request = {
params: {
name: 'create_branch',
arguments: {
repositoryId: 'r',
sourceBranch: 'main',
newBranch: 'feature',
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(response.content[0].text).toContain('Branch created');
expect(createBranch).toHaveBeenCalled();
});
it('should handle create_commit request', async () => {
const request = {
params: {
name: 'create_commit',
arguments: {
repositoryId: 'r',
branchName: 'main',
commitMessage: 'msg',
changes: [],
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(response.content[0].text).toContain('Commit created');
expect(createCommit).toHaveBeenCalled();
});
it('should handle list_commits request', async () => {
(listCommits as jest.Mock).mockResolvedValue({ commits: [] });
const request = {
params: {
name: 'list_commits',
arguments: {
repositoryId: 'r',
branchName: 'main',
},
},
method: 'tools/call',
} as CallToolRequest;
const response = await handleRepositoriesRequest(mockConnection, request);
expect(JSON.parse(response.content[0].text as string)).toEqual({
commits: [],
});
expect(listCommits).toHaveBeenCalled();
});
it('should throw error for unknown tool', async () => {
const request = {
params: {
name: 'unknown_tool',
arguments: {},
},
method: 'tools/call',
} as CallToolRequest;
await expect(
handleRepositoriesRequest(mockConnection, request),
).rejects.toThrow('Unknown repositories tool');
});
it('should propagate errors from repository functions', async () => {
const mockError = new Error('Test error');
(listRepositories as jest.Mock).mockRejectedValue(mockError);
const request = {
params: {
name: 'list_repositories',
arguments: {
projectId: 'project1',
},
},
method: 'tools/call',
} as CallToolRequest;
await expect(
handleRepositoriesRequest(mockConnection, request),
).rejects.toThrow(mockError);
});
});
});
```
--------------------------------------------------------------------------------
/.github/skills/skill-creator/LICENSE.txt:
--------------------------------------------------------------------------------
```
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
```
--------------------------------------------------------------------------------
/src/features/pull-requests/get-pull-request-checks/feature.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import {
GitPullRequestStatus,
GitStatusState,
} from 'azure-devops-node-api/interfaces/GitInterfaces';
import {
PolicyEvaluationRecord,
PolicyEvaluationStatus,
} from 'azure-devops-node-api/interfaces/PolicyInterfaces';
import {
AzureDevOpsError,
AzureDevOpsResourceNotFoundError,
} from '../../../shared/errors';
export interface PullRequestChecksOptions {
projectId: string;
repositoryId: string;
pullRequestId: number;
}
export interface PipelineReference {
pipelineId?: number;
definitionId?: number;
runId?: number;
buildId?: number;
displayName?: string;
targetUrl?: string;
}
export interface PullRequestStatusCheck {
id?: number;
state: string;
description?: string;
context?: {
name?: string;
genre?: string;
};
createdDate?: string;
updatedDate?: string;
targetUrl?: string;
pipeline?: PipelineReference;
}
export interface PullRequestPolicyCheck {
evaluationId?: string;
status: string;
isBlocking?: boolean;
isEnabled?: boolean;
configurationId?: number;
configurationRevision?: number;
configurationTypeId?: string;
configurationTypeDisplayName?: string;
displayName?: string;
startedDate?: string;
completedDate?: string;
message?: string;
targetUrl?: string;
pipeline?: PipelineReference;
}
export interface PullRequestChecksResult {
statuses: PullRequestStatusCheck[];
policyEvaluations: PullRequestPolicyCheck[];
}
/**
* Retrieve status checks and policy evaluations for a pull request.
*/
export async function getPullRequestChecks(
connection: WebApi,
options: PullRequestChecksOptions,
): Promise<PullRequestChecksResult> {
try {
const [gitApi, policyApi, projectId] = await Promise.all([
connection.getGitApi(),
connection.getPolicyApi(),
resolveProjectId(connection, options.projectId),
]);
const [statusRecords, evaluationRecords] = await Promise.all([
gitApi.getPullRequestStatuses(
options.repositoryId,
options.pullRequestId,
projectId,
),
policyApi.getPolicyEvaluations(
projectId,
buildPolicyArtifactId(projectId, options.pullRequestId),
),
]);
return {
statuses: (statusRecords ?? []).map(mapStatusRecord),
policyEvaluations: (evaluationRecords ?? []).map(mapEvaluationRecord),
};
} catch (error) {
if (error instanceof AzureDevOpsError) {
throw error;
}
throw new Error(
`Failed to get pull request checks: ${
error instanceof Error ? error.message : String(error)
}`,
);
}
}
const buildPolicyArtifactId = (projectId: string, pullRequestId: number) =>
`vstfs:///CodeReview/CodeReviewId/${projectId}/${pullRequestId}`;
const projectIdGuidPattern =
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
const resolveProjectId = async (
connection: WebApi,
projectIdOrName: string,
): Promise<string> => {
if (projectIdGuidPattern.test(projectIdOrName)) {
return projectIdOrName;
}
const coreApi = await connection.getCoreApi();
const project = await coreApi.getProject(projectIdOrName);
if (!project?.id) {
throw new AzureDevOpsResourceNotFoundError(
`Project '${projectIdOrName}' not found`,
);
}
return project.id;
};
const gitStatusStateMap = GitStatusState as unknown as Record<number, string>;
const policyStatusMap = PolicyEvaluationStatus as unknown as Record<
number,
string
>;
const mapStatusRecord = (
status: GitPullRequestStatus,
): PullRequestStatusCheck => {
const pipeline = mergePipelineReferences(
parsePipelineReferenceFromUrl(status.targetUrl),
extractPipelineReferenceFromObject(status.context),
extractPipelineReferenceFromObject(status.properties),
);
return {
id: status.id,
state: formatEnumValue(status.state, gitStatusStateMap),
description: status.description,
context: {
name: status.context?.name,
genre: status.context?.genre,
},
createdDate: toIsoString(status.creationDate),
updatedDate: toIsoString(status.updatedDate),
targetUrl: status.targetUrl ?? pipeline?.targetUrl,
pipeline,
};
};
const mapEvaluationRecord = (
evaluation: PolicyEvaluationRecord,
): PullRequestPolicyCheck => {
const settings =
(evaluation.configuration?.settings as Record<string, unknown>) || {};
const context =
(evaluation.context as Record<string, unknown> | undefined) ?? {};
const pipeline = mergePipelineReferences(
extractPipelineReferenceFromObject(settings),
extractPipelineReferenceFromObject(context),
parsePipelineReferenceFromUrl(
extractString(settings.targetUrl) ?? extractString(context.targetUrl),
),
);
const displayName =
extractString(settings.displayName) ??
extractString(context.displayName) ??
evaluation.configuration?.type?.displayName;
const targetUrl =
pipeline?.targetUrl ??
extractString(context.targetUrl) ??
extractString(settings.targetUrl);
return {
evaluationId: evaluation.evaluationId,
status: formatEnumValue(evaluation.status, policyStatusMap),
isBlocking: evaluation.configuration?.isBlocking,
isEnabled: evaluation.configuration?.isEnabled,
configurationId: evaluation.configuration?.id,
configurationRevision: evaluation.configuration?.revision,
configurationTypeId: evaluation.configuration?.type?.id,
configurationTypeDisplayName: evaluation.configuration?.type?.displayName,
displayName,
startedDate: toIsoString(evaluation.startedDate),
completedDate: toIsoString(evaluation.completedDate),
message: extractString(context.message) ?? extractString(settings.message),
targetUrl,
pipeline,
};
};
const formatEnumValue = (
value: number | undefined,
map: Record<number, string>,
): string => {
if (typeof value === 'number' && map[value]) {
const name = map[value];
return name.charAt(0).toLowerCase() + name.slice(1);
}
return 'unknown';
};
const toIsoString = (date: Date | undefined): string | undefined =>
date ? date.toISOString() : undefined;
const extractString = (value: unknown): string | undefined =>
typeof value === 'string' ? value : undefined;
const parseNumeric = (value: unknown): number | undefined => {
if (value === null || value === undefined) {
return undefined;
}
const numeric = Number(value);
return Number.isFinite(numeric) ? numeric : undefined;
};
const parseIdFromUri = (uri?: string): number | undefined => {
if (!uri) {
return undefined;
}
const match = uri.match(/(\d+)(?!.*\d)/);
if (!match) {
return undefined;
}
const id = Number(match[1]);
return Number.isFinite(id) ? id : undefined;
};
const parsePipelineReferenceFromUrl = (
targetUrl?: string,
): PipelineReference | undefined => {
if (!targetUrl) {
return undefined;
}
try {
const url = new URL(targetUrl);
const result: PipelineReference = { targetUrl };
const setParam = (param: string, setter: (value: number) => void) => {
const raw = url.searchParams.get(param);
const numeric = parseNumeric(raw);
if (numeric !== undefined) {
setter(numeric);
}
};
setParam('pipelineId', (value) => {
result.pipelineId = value;
});
setParam('definitionId', (value) => {
result.definitionId = value;
});
setParam('buildDefinitionId', (value) => {
result.definitionId = result.definitionId ?? value;
});
setParam('runId', (value) => {
result.runId = value;
});
setParam('buildId', (value) => {
result.buildId = value;
result.runId = result.runId ?? value;
});
const segments = url.pathname.split('/').filter(Boolean);
const pipelinesIndex = segments.lastIndexOf('pipelines');
if (pipelinesIndex !== -1 && pipelinesIndex + 1 < segments.length) {
const pipelineCandidate = parseNumeric(segments[pipelinesIndex + 1]);
if (pipelineCandidate !== undefined) {
result.pipelineId = result.pipelineId ?? pipelineCandidate;
}
}
const runsIndex = segments.lastIndexOf('runs');
if (runsIndex !== -1 && runsIndex + 1 < segments.length) {
const runCandidate = parseNumeric(segments[runsIndex + 1]);
if (runCandidate !== undefined) {
result.runId = result.runId ?? runCandidate;
}
if (runsIndex > 0) {
const preceding = segments[runsIndex - 1];
const pipelineCandidate = parseNumeric(preceding);
if (pipelineCandidate !== undefined) {
result.pipelineId = result.pipelineId ?? pipelineCandidate;
}
}
}
const buildMatch = url.pathname.match(/\/build\/(?:definition\/)?(\d+)/i);
if (!result.definitionId && buildMatch) {
const id = parseNumeric(buildMatch[1]);
if (id !== undefined) {
result.definitionId = id;
}
}
const buildUriMatch = url.pathname.match(/\/Build\/Build\/(\d+)/i);
if (buildUriMatch) {
const buildId = parseNumeric(buildUriMatch[1]);
if (buildId !== undefined) {
result.buildId = result.buildId ?? buildId;
result.runId = result.runId ?? buildId;
}
}
return result;
} catch {
return { targetUrl };
}
};
const extractPipelineReferenceFromObject = (
value: unknown,
): PipelineReference | undefined => {
if (!value || typeof value !== 'object') {
return undefined;
}
const object = value as Record<string, unknown>;
const candidate: PipelineReference = {};
const pipelineId = parseNumeric(
object.pipelineId ??
(object.pipeline as Record<string, unknown> | undefined)?.id,
);
if (pipelineId !== undefined) {
candidate.pipelineId = pipelineId;
}
const definitionId = parseNumeric(
object.definitionId ??
object.buildDefinitionId ??
object.pipelineDefinitionId ??
(object.definition as Record<string, unknown> | undefined)?.id,
);
if (definitionId !== undefined) {
candidate.definitionId = definitionId;
}
const runId = parseNumeric(
object.runId ??
object.buildId ??
object.stageRunId ??
object.jobRunId ??
object.planId,
);
if (runId !== undefined) {
candidate.runId = runId;
}
const buildId = parseNumeric(
object.buildId ??
(object.build as Record<string, unknown> | undefined)?.id ??
parseIdFromUri(extractString(object.buildUri)) ??
parseIdFromUri(extractString(object.uri)),
);
if (buildId !== undefined) {
candidate.buildId = candidate.buildId ?? buildId;
if (candidate.runId === undefined) {
candidate.runId = buildId;
}
}
const displayName =
extractString(object.displayName) ?? extractString(object.name);
if (displayName) {
candidate.displayName = displayName;
}
const targetUrl =
extractString(object.targetUrl) ??
extractString(object.url) ??
extractString(object.href);
return mergePipelineReferences(
candidate,
parsePipelineReferenceFromUrl(targetUrl),
);
};
const mergePipelineReferences = (
...refs: Array<PipelineReference | undefined>
): PipelineReference | undefined => {
const merged: PipelineReference = {};
let hasValue = false;
for (const ref of refs) {
if (!ref) {
continue;
}
const apply = <K extends keyof PipelineReference>(key: K) => {
const value = ref[key];
if (value !== undefined && merged[key] === undefined) {
merged[key] = value;
hasValue = true;
}
};
apply('pipelineId');
apply('definitionId');
apply('runId');
apply('buildId');
apply('displayName');
apply('targetUrl');
}
return hasValue ? merged : undefined;
};
```
--------------------------------------------------------------------------------
/src/features/pull-requests/update-pull-request/feature.ts:
--------------------------------------------------------------------------------
```typescript
import { GitPullRequest } from 'azure-devops-node-api/interfaces/GitInterfaces';
import { WebApi } from 'azure-devops-node-api';
import {
WorkItemRelation,
WorkItemExpand,
} from 'azure-devops-node-api/interfaces/WorkItemTrackingInterfaces';
import { AzureDevOpsClient } from '../../../shared/auth/client-factory';
import { AzureDevOpsError } from '../../../shared/errors';
import { UpdatePullRequestOptions } from '../types';
import { AuthenticationMethod } from '../../../shared/auth/auth-factory';
import { pullRequestStatusMapper } from '../../../shared/enums';
function normalizeTags(tags?: string[]): string[] {
if (!tags) {
return [];
}
const seen = new Set<string>();
const normalized: string[] = [];
for (const rawTag of tags) {
const trimmed = rawTag.trim();
if (!trimmed) {
continue;
}
const key = trimmed.toLowerCase();
if (seen.has(key)) {
continue;
}
seen.add(key);
normalized.push(trimmed);
}
return normalized;
}
/**
* Updates an existing pull request in Azure DevOps with the specified changes.
*
* @param options - The options for updating the pull request
* @returns The updated pull request
*/
export const updatePullRequest = async (
options: UpdatePullRequestOptions,
): Promise<GitPullRequest> => {
const {
projectId,
repositoryId,
pullRequestId,
title,
description,
status,
isDraft,
addWorkItemIds,
removeWorkItemIds,
addReviewers,
removeReviewers,
addTags,
removeTags,
additionalProperties,
} = options;
try {
// Get connection to Azure DevOps
const client = new AzureDevOpsClient({
method:
(process.env.AZURE_DEVOPS_AUTH_METHOD as AuthenticationMethod) ?? 'pat',
organizationUrl: process.env.AZURE_DEVOPS_ORG_URL ?? '',
personalAccessToken: process.env.AZURE_DEVOPS_PAT,
});
const connection = await client.getWebApiClient();
// Get the Git API client
const gitApi = await connection.getGitApi();
// First, get the current pull request
const pullRequest = await gitApi.getPullRequestById(
pullRequestId,
projectId,
);
if (!pullRequest) {
throw new AzureDevOpsError(
`Pull request ${pullRequestId} not found in repository ${repositoryId}`,
);
}
// Store the artifactId for work item linking
const artifactId = pullRequest.artifactId;
const effectivePullRequestId = pullRequest.pullRequestId ?? pullRequestId;
// Create an object with the properties to update
const updateObject: Partial<GitPullRequest> = {};
if (title !== undefined) {
updateObject.title = title;
}
if (description !== undefined) {
updateObject.description = description;
}
if (isDraft !== undefined) {
updateObject.isDraft = isDraft;
}
if (status) {
const enumStatus = pullRequestStatusMapper.toEnum(status);
if (enumStatus !== undefined) {
updateObject.status = enumStatus;
} else {
throw new AzureDevOpsError(
`Invalid status: ${status}. Valid values are: active, abandoned, completed`,
);
}
}
// Add any additional properties that were specified
if (additionalProperties) {
Object.assign(updateObject, additionalProperties);
}
// Update the pull request
const updatedPullRequest = await gitApi.updatePullRequest(
updateObject,
repositoryId,
pullRequestId,
projectId,
);
// Handle work items separately if needed
const addIds = addWorkItemIds ?? [];
const removeIds = removeWorkItemIds ?? [];
if (addIds.length > 0 || removeIds.length > 0) {
await handleWorkItems({
connection,
pullRequestId,
repositoryId,
projectId,
workItemIdsToAdd: addIds,
workItemIdsToRemove: removeIds,
artifactId,
});
}
// Handle reviewers separately if needed
const addReviewerIds = addReviewers ?? [];
const removeReviewerIds = removeReviewers ?? [];
if (addReviewerIds.length > 0 || removeReviewerIds.length > 0) {
await handleReviewers({
connection,
pullRequestId,
repositoryId,
projectId,
reviewersToAdd: addReviewerIds,
reviewersToRemove: removeReviewerIds,
});
}
const normalizedTagsToAdd = normalizeTags(addTags);
const normalizedTagsToRemove = normalizeTags(removeTags);
if (
effectivePullRequestId &&
(normalizedTagsToAdd.length > 0 || normalizedTagsToRemove.length > 0)
) {
let labels =
(await gitApi.getPullRequestLabels(
repositoryId,
effectivePullRequestId,
projectId,
)) ?? [];
const existingNames = new Set(
labels
.map((label) => label.name?.toLowerCase())
.filter((name): name is string => Boolean(name)),
);
const tagsToCreate = normalizedTagsToAdd.filter(
(tag) => !existingNames.has(tag.toLowerCase()),
);
for (const tag of tagsToCreate) {
try {
const createdLabel = await gitApi.createPullRequestLabel(
{ name: tag },
repositoryId,
effectivePullRequestId,
projectId,
);
labels.push(createdLabel);
existingNames.add(tag.toLowerCase());
} catch (error) {
throw new Error(
`Failed to add tag '${tag}': ${
error instanceof Error ? error.message : String(error)
}`,
);
}
}
for (const tag of normalizedTagsToRemove) {
try {
await gitApi.deletePullRequestLabels(
repositoryId,
effectivePullRequestId,
tag,
projectId,
);
labels = labels.filter((label) => {
const name = label.name?.toLowerCase();
return name ? name !== tag.toLowerCase() : true;
});
existingNames.delete(tag.toLowerCase());
} catch (error) {
if (
error &&
typeof error === 'object' &&
'statusCode' in error &&
(error as { statusCode?: number }).statusCode === 404
) {
continue;
}
throw new Error(
`Failed to remove tag '${tag}': ${
error instanceof Error ? error.message : String(error)
}`,
);
}
}
updatedPullRequest.labels = labels;
}
return updatedPullRequest;
} catch (error) {
throw new AzureDevOpsError(
`Failed to update pull request ${pullRequestId} in repository ${repositoryId}: ${error instanceof Error ? error.message : String(error)}`,
);
}
};
/**
* Handle adding or removing work items from a pull request
*/
interface WorkItemHandlingOptions {
connection: WebApi;
pullRequestId: number;
repositoryId: string;
projectId?: string;
workItemIdsToAdd: number[];
workItemIdsToRemove: number[];
artifactId?: string;
}
async function handleWorkItems(
options: WorkItemHandlingOptions,
): Promise<void> {
const {
connection,
pullRequestId,
repositoryId,
projectId,
workItemIdsToAdd,
workItemIdsToRemove,
artifactId,
} = options;
try {
// For each work item to add, create a link
if (workItemIdsToAdd.length > 0) {
const workItemTrackingApi = await connection.getWorkItemTrackingApi();
for (const workItemId of workItemIdsToAdd) {
// Add the relationship between the work item and pull request
await workItemTrackingApi.updateWorkItem(
null,
[
{
op: 'add',
path: '/relations/-',
value: {
rel: 'ArtifactLink',
// Use the artifactId if available, otherwise fall back to the old format
url:
artifactId ||
`vstfs:///Git/PullRequestId/${projectId ?? ''}/${repositoryId}/${pullRequestId}`,
attributes: {
name: 'Pull Request',
},
},
},
],
workItemId,
);
}
}
// For each work item to remove, remove the link
if (workItemIdsToRemove.length > 0) {
const workItemTrackingApi = await connection.getWorkItemTrackingApi();
for (const workItemId of workItemIdsToRemove) {
try {
// First, get the work item with relations expanded
const workItem = await workItemTrackingApi.getWorkItem(
workItemId,
undefined, // fields
undefined, // asOf
WorkItemExpand.Relations,
);
if (workItem.relations) {
// Find the relationship to the pull request using the artifactId
const prRelationIndex = workItem.relations.findIndex(
(rel: WorkItemRelation) =>
rel.rel === 'ArtifactLink' &&
rel.attributes &&
rel.attributes.name === 'Pull Request' &&
rel.url === artifactId,
);
if (prRelationIndex !== -1) {
// Remove the relationship
await workItemTrackingApi.updateWorkItem(
null,
[
{
op: 'remove',
path: `/relations/${prRelationIndex}`,
},
],
workItemId,
);
}
}
} catch (error) {
console.log(
`Error removing work item ${workItemId} from pull request ${pullRequestId}: ${
error instanceof Error ? error.message : String(error)
}`,
);
}
}
}
} catch (error) {
throw new AzureDevOpsError(
`Failed to update work item links for pull request ${pullRequestId}: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Handle adding or removing reviewers from a pull request
*/
interface ReviewerHandlingOptions {
connection: WebApi;
pullRequestId: number;
repositoryId: string;
projectId?: string;
reviewersToAdd: string[];
reviewersToRemove: string[];
}
async function handleReviewers(
options: ReviewerHandlingOptions,
): Promise<void> {
const {
connection,
pullRequestId,
repositoryId,
projectId,
reviewersToAdd,
reviewersToRemove,
} = options;
try {
const gitApi = await connection.getGitApi();
// Add reviewers
if (reviewersToAdd.length > 0) {
for (const reviewer of reviewersToAdd) {
try {
// Create a reviewer object with the identifier
await gitApi.createPullRequestReviewer(
{
id: reviewer, // This can be email or ID
isRequired: false,
},
repositoryId,
pullRequestId,
reviewer,
projectId,
);
} catch (error) {
console.log(
`Error adding reviewer ${reviewer} to pull request ${pullRequestId}: ${
error instanceof Error ? error.message : String(error)
}`,
);
}
}
}
// Remove reviewers
if (reviewersToRemove.length > 0) {
for (const reviewer of reviewersToRemove) {
try {
await gitApi.deletePullRequestReviewer(
repositoryId,
pullRequestId,
reviewer,
projectId,
);
} catch (error) {
console.log(
`Error removing reviewer ${reviewer} from pull request ${pullRequestId}: ${
error instanceof Error ? error.message : String(error)
}`,
);
}
}
}
} catch (error) {
throw new AzureDevOpsError(
`Failed to update reviewers for pull request ${pullRequestId}: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
```
--------------------------------------------------------------------------------
/src/features/repositories/get-all-repositories-tree/feature.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import { IGitApi } from 'azure-devops-node-api/GitApi';
import {
GitVersionType,
VersionControlRecursionType,
GitItem,
GitObjectType,
} from 'azure-devops-node-api/interfaces/GitInterfaces';
import { minimatch } from 'minimatch';
import { AzureDevOpsError } from '../../../shared/errors';
import {
GetAllRepositoriesTreeOptions,
AllRepositoriesTreeResponse,
RepositoryTreeResponse,
RepositoryTreeItem,
GitRepository,
} from '../types';
/**
* Get tree view of files/directories across multiple repositories
*
* @param connection The Azure DevOps WebApi connection
* @param options Options for getting repository tree
* @returns Tree structure for each repository
*/
export async function getAllRepositoriesTree(
connection: WebApi,
options: GetAllRepositoriesTreeOptions,
): Promise<AllRepositoriesTreeResponse> {
try {
const gitApi = await connection.getGitApi();
let repositories: GitRepository[] = [];
// Get all repositories in the project
repositories = await gitApi.getRepositories(options.projectId);
// Filter repositories by name pattern if specified
if (options.repositoryPattern) {
repositories = repositories.filter((repo) =>
minimatch(repo.name || '', options.repositoryPattern || '*'),
);
}
// Initialize results array
const results: RepositoryTreeResponse[] = [];
// Process each repository
for (const repo of repositories) {
try {
// Get default branch ref
const defaultBranch = repo.defaultBranch;
if (!defaultBranch) {
// Skip repositories with no default branch
results.push({
name: repo.name || 'Unknown',
tree: [],
stats: { directories: 0, files: 0 },
error: 'No default branch found',
});
continue;
}
// Clean the branch name (remove refs/heads/ prefix)
const branchRef = defaultBranch.replace('refs/heads/', '');
// Initialize tree items array and counters
const treeItems: RepositoryTreeItem[] = [];
const stats = { directories: 0, files: 0 };
// Determine the recursion level and processing approach
const depth = options.depth !== undefined ? options.depth : 0; // Default to 0 (max depth)
if (depth === 0) {
// For max depth (0), use server-side recursion for better performance
const allItems = await gitApi.getItems(
repo.id || '',
options.projectId,
'/',
VersionControlRecursionType.Full, // Use full recursion
true,
false,
false,
false,
{
version: branchRef,
versionType: GitVersionType.Branch,
},
);
// Filter out the root item itself and bad items
const itemsToProcess = allItems.filter(
(item) =>
item.path !== '/' && item.gitObjectType !== GitObjectType.Bad,
);
// Process all items at once (they're already retrieved recursively)
processItemsNonRecursive(
itemsToProcess,
treeItems,
stats,
options.pattern,
);
} else {
// For limited depth, use the regular recursive approach
// Get items at the root level
const rootItems = await gitApi.getItems(
repo.id || '',
options.projectId,
'/',
VersionControlRecursionType.OneLevel,
true,
false,
false,
false,
{
version: branchRef,
versionType: GitVersionType.Branch,
},
);
// Filter out the root item itself and bad items
const itemsToProcess = rootItems.filter(
(item) =>
item.path !== '/' && item.gitObjectType !== GitObjectType.Bad,
);
// Process the root items and their children (up to specified depth)
await processItems(
gitApi,
repo.id || '',
options.projectId,
itemsToProcess,
branchRef,
treeItems,
stats,
1,
depth,
options.pattern,
);
}
// Add repository tree to results
results.push({
name: repo.name || 'Unknown',
tree: treeItems,
stats,
});
} catch (repoError) {
// Handle errors for individual repositories
results.push({
name: repo.name || 'Unknown',
tree: [],
stats: { directories: 0, files: 0 },
error: `Error processing repository: ${repoError instanceof Error ? repoError.message : String(repoError)}`,
});
}
}
return { repositories: results };
} catch (error) {
if (error instanceof AzureDevOpsError) {
throw error;
}
throw new Error(
`Failed to get repository tree: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Process items non-recursively when they're already retrieved with VersionControlRecursionType.Full
*/
function processItemsNonRecursive(
items: GitItem[],
result: RepositoryTreeItem[],
stats: { directories: number; files: number },
pattern?: string,
): void {
// Sort items (folders first, then by path)
const sortedItems = [...items].sort((a, b) => {
if (a.isFolder === b.isFolder) {
return (a.path || '').localeCompare(b.path || '');
}
return a.isFolder ? -1 : 1;
});
for (const item of sortedItems) {
const name = item.path?.split('/').pop() || '';
const path = item.path || '';
const isFolder = !!item.isFolder;
// Skip the root folder
if (path === '/') {
continue;
}
// Calculate level from path segments
// Remove leading '/' then count segments
// For paths like:
// /README.md -> ["README.md"] -> length 1 -> level 1
// /src/index.ts -> ["src", "index.ts"] -> length 2 -> level 2
// /src/utils/helper.ts -> ["src", "utils", "helper.ts"] -> length 3 -> level 3
const pathSegments = path.replace(/^\//, '').split('/');
const level = pathSegments.length;
// Filter files based on pattern (if specified)
if (!isFolder && pattern && !minimatch(name, pattern)) {
continue;
}
// Add item to results
result.push({
name,
path,
isFolder,
level,
});
// Update counters
if (isFolder) {
stats.directories++;
} else {
stats.files++;
}
}
}
/**
* Process items recursively up to the specified depth
*/
async function processItems(
gitApi: IGitApi,
repoId: string,
projectId: string,
items: GitItem[],
branchRef: string,
result: RepositoryTreeItem[],
stats: { directories: number; files: number },
currentDepth: number,
maxDepth: number,
pattern?: string,
): Promise<void> {
// Sort items (directories first, then files)
const sortedItems = [...items].sort((a, b) => {
if (a.isFolder === b.isFolder) {
return (a.path || '').localeCompare(b.path || '');
}
return a.isFolder ? -1 : 1;
});
for (const item of sortedItems) {
const name = item.path?.split('/').pop() || '';
const path = item.path || '';
const isFolder = !!item.isFolder;
// Filter files based on pattern (if specified)
if (!isFolder && pattern && !minimatch(name, pattern)) {
continue;
}
// Add item to results
result.push({
name,
path,
isFolder,
level: currentDepth,
});
// Update counters
if (isFolder) {
stats.directories++;
} else {
stats.files++;
}
// Recursively process folders if not yet at max depth
if (isFolder && currentDepth < maxDepth) {
try {
const childItems = await gitApi.getItems(
repoId,
projectId,
path,
VersionControlRecursionType.OneLevel,
true,
false,
false,
false,
{
version: branchRef,
versionType: GitVersionType.Branch,
},
);
// Filter out the parent folder itself and bad items
const itemsToProcess = childItems.filter(
(child: GitItem) =>
child.path !== path && child.gitObjectType !== GitObjectType.Bad,
);
// Process child items
await processItems(
gitApi,
repoId,
projectId,
itemsToProcess,
branchRef,
result,
stats,
currentDepth + 1,
maxDepth,
pattern,
);
} catch (error) {
// Ignore errors in child items and continue with siblings
console.error(`Error processing folder ${path}: ${error}`);
}
}
}
}
/**
* Convert the tree items to a formatted ASCII string representation
*
* @param repoName Repository name
* @param items Tree items
* @param stats Statistics about files and directories
* @returns Formatted ASCII string
*/
export function formatRepositoryTree(
repoName: string,
items: RepositoryTreeItem[],
stats: { directories: number; files: number },
error?: string,
): string {
let output = `${repoName}/\n`;
if (error) {
output += ` (${error})\n`;
} else if (items.length === 0) {
output += ' (Repository is empty or default branch not found)\n';
} else {
// Sort items by path to ensure proper sequence
const sortedItems = [...items].sort((a, b) => {
// Sort by level first
if (a.level !== b.level) {
return a.level - b.level;
}
// Then folders before files
if (a.isFolder !== b.isFolder) {
return a.isFolder ? -1 : 1;
}
// Then alphabetically
return a.path.localeCompare(b.path);
});
// Create a structured tree representation
const tree = createTreeStructure(sortedItems);
// Format the tree starting from the root
output += formatTree(tree, ' ');
}
// Add summary line
output += `${stats.directories} directories, ${stats.files} files\n`;
return output;
}
/**
* Create a structured tree from the flat list of items
*/
function createTreeStructure(items: RepositoryTreeItem[]): TreeNode {
const root: TreeNode = {
name: '',
path: '',
isFolder: true,
children: [],
};
// Map to track all nodes by path
const nodeMap: Record<string, TreeNode> = { '': root };
// First create all nodes
for (const item of items) {
nodeMap[item.path] = {
name: item.name,
path: item.path,
isFolder: item.isFolder,
children: [],
};
}
// Then build the hierarchy
for (const item of items) {
if (item.path === '/') continue;
const node = nodeMap[item.path];
const lastSlashIndex = item.path.lastIndexOf('/');
// For root level items, the parent path is empty
const parentPath =
lastSlashIndex <= 0 ? '' : item.path.substring(0, lastSlashIndex);
// Get parent node (defaults to root if parent not found)
const parent = nodeMap[parentPath] || root;
// Add this node as a child of its parent
parent.children.push(node);
}
return root;
}
/**
* Format a tree structure into an ASCII tree representation
*/
function formatTree(node: TreeNode, indent: string): string {
if (!node.children.length) return '';
let output = '';
// Sort the children: folders first, then alphabetically
const children = [...node.children].sort((a, b) => {
if (a.isFolder !== b.isFolder) {
return a.isFolder ? -1 : 1;
}
return a.name.localeCompare(b.name);
});
// Format each child node
for (let i = 0; i < children.length; i++) {
const child = children[i];
const isLast = i === children.length - 1;
const connector = isLast ? '`-- ' : '|-- ';
const childIndent = isLast ? ' ' : '| ';
// Add the node itself
const suffix = child.isFolder ? '/' : '';
output += `${indent}${connector}${child.name}${suffix}\n`;
// Recursively add its children
if (child.children.length > 0) {
output += formatTree(child, indent + childIndent);
}
}
return output;
}
/**
* Tree node interface for hierarchical representation
*/
interface TreeNode {
name: string;
path: string;
isFolder: boolean;
children: TreeNode[];
}
```
--------------------------------------------------------------------------------
/src/features/pull-requests/get-pull-request-comments/feature.spec.unit.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import { getPullRequestComments } from './feature';
import { GitPullRequestCommentThread } from 'azure-devops-node-api/interfaces/GitInterfaces';
describe('getPullRequestComments', () => {
afterEach(() => {
jest.resetAllMocks();
});
test('should return pull request comment threads with file path and line number', async () => {
// Mock data for a comment thread
const mockCommentThreads: GitPullRequestCommentThread[] = [
{
id: 1,
status: 1, // Active
threadContext: {
filePath: '/src/app.ts',
rightFileStart: {
line: 10,
offset: 5,
},
rightFileEnd: {
line: 10,
offset: 15,
},
},
comments: [
{
id: 100,
content: 'This code needs refactoring',
commentType: 1, // CodeChange
author: {
displayName: 'Test User',
id: 'test-user-id',
},
publishedDate: new Date(),
},
{
id: 101,
parentCommentId: 100,
content: 'I agree, will update',
commentType: 1, // CodeChange
author: {
displayName: 'Another User',
id: 'another-user-id',
},
publishedDate: new Date(),
},
],
},
];
// Setup mock connection
const mockGitApi = {
getThreads: jest.fn().mockResolvedValue(mockCommentThreads),
getPullRequestThread: jest.fn(),
};
const mockConnection: any = {
getGitApi: jest.fn().mockResolvedValue(mockGitApi),
};
// Call the function with test parameters
const projectId = 'test-project';
const repositoryId = 'test-repo';
const pullRequestId = 123;
const options = {
projectId,
repositoryId,
pullRequestId,
};
const result = await getPullRequestComments(
mockConnection as WebApi,
projectId,
repositoryId,
pullRequestId,
options,
);
// Verify results
expect(result).toHaveLength(1);
expect(result[0].comments).toHaveLength(2);
// Verify file path and line number are added to each comment
result[0].comments?.forEach((comment) => {
expect(comment).toHaveProperty('filePath', '/src/app.ts');
expect(comment).toHaveProperty('rightFileStart', { line: 10, offset: 5 });
expect(comment).toHaveProperty('rightFileEnd', { line: 10, offset: 15 });
expect(comment).toHaveProperty('leftFileStart', undefined);
expect(comment).toHaveProperty('leftFileEnd', undefined);
});
expect(mockConnection.getGitApi).toHaveBeenCalledTimes(1);
expect(mockGitApi.getThreads).toHaveBeenCalledTimes(1);
expect(mockGitApi.getThreads).toHaveBeenCalledWith(
repositoryId,
pullRequestId,
projectId,
undefined,
undefined,
);
expect(mockGitApi.getPullRequestThread).not.toHaveBeenCalled();
});
test('should handle comments without thread context', async () => {
// Mock data for a comment thread without thread context
const mockCommentThreads: GitPullRequestCommentThread[] = [
{
id: 1,
status: 1, // Active
comments: [
{
id: 100,
content: 'General comment',
commentType: 1,
author: {
displayName: 'Test User',
id: 'test-user-id',
},
publishedDate: new Date(),
},
],
},
];
// Setup mock connection
const mockGitApi = {
getThreads: jest.fn().mockResolvedValue(mockCommentThreads),
getPullRequestThread: jest.fn(),
};
const mockConnection: any = {
getGitApi: jest.fn().mockResolvedValue(mockGitApi),
};
const result = await getPullRequestComments(
mockConnection as WebApi,
'test-project',
'test-repo',
123,
{
projectId: 'test-project',
repositoryId: 'test-repo',
pullRequestId: 123,
},
);
// Verify results
expect(result).toHaveLength(1);
expect(result[0].comments).toHaveLength(1);
expect(result[0].status).toBe('active');
// Verify file path and line number are null for comments without thread context
const comment = result[0].comments![0];
expect(comment).toHaveProperty('filePath', undefined);
expect(comment).toHaveProperty('rightFileStart', undefined);
expect(comment).toHaveProperty('rightFileEnd', undefined);
expect(comment).toHaveProperty('leftFileStart', undefined);
expect(comment).toHaveProperty('leftFileEnd', undefined);
expect(comment).toHaveProperty('commentType', 'text');
});
test('should use leftFileStart when rightFileStart is not available', async () => {
// Mock data for a comment thread with only leftFileStart
const mockCommentThreads: GitPullRequestCommentThread[] = [
{
id: 1,
status: 1,
threadContext: {
filePath: '/src/app.ts',
leftFileStart: {
line: 5,
offset: 1,
},
},
comments: [
{
id: 100,
content: 'Comment on deleted line',
commentType: 1,
author: {
displayName: 'Test User',
id: 'test-user-id',
},
publishedDate: new Date(),
},
],
},
];
// Setup mock connection
const mockGitApi = {
getThreads: jest.fn().mockResolvedValue(mockCommentThreads),
getPullRequestThread: jest.fn(),
};
const mockConnection: any = {
getGitApi: jest.fn().mockResolvedValue(mockGitApi),
};
const result = await getPullRequestComments(
mockConnection as WebApi,
'test-project',
'test-repo',
123,
{
projectId: 'test-project',
repositoryId: 'test-repo',
pullRequestId: 123,
},
);
// Verify results
expect(result).toHaveLength(1);
expect(result[0].comments).toHaveLength(1);
// Verify rightFileStart is undefined, leftFileStart is present
const comment = result[0].comments![0];
expect(comment).toHaveProperty('filePath', '/src/app.ts');
expect(comment).toHaveProperty('leftFileStart', { line: 5, offset: 1 });
expect(comment).toHaveProperty('rightFileStart', undefined);
expect(comment).toHaveProperty('leftFileEnd', undefined);
expect(comment).toHaveProperty('rightFileEnd', undefined);
});
test('should return a specific comment thread when threadId is provided', async () => {
// Mock data for a specific comment thread
const threadId = 42;
const mockCommentThread: GitPullRequestCommentThread = {
id: threadId,
status: 1, // Active
threadContext: {
filePath: '/src/utils.ts',
rightFileStart: {
line: 15,
offset: 1,
},
},
comments: [
{
id: 100,
content: 'Specific comment',
commentType: 1, // CodeChange
author: {
displayName: 'Test User',
id: 'test-user-id',
},
publishedDate: new Date(),
},
],
};
// Setup mock connection
const mockGitApi = {
getThreads: jest.fn(),
getPullRequestThread: jest.fn().mockResolvedValue(mockCommentThread),
};
const mockConnection: any = {
getGitApi: jest.fn().mockResolvedValue(mockGitApi),
};
// Call the function with test parameters
const projectId = 'test-project';
const repositoryId = 'test-repo';
const pullRequestId = 123;
const options = {
projectId,
repositoryId,
pullRequestId,
threadId,
};
const result = await getPullRequestComments(
mockConnection as WebApi,
projectId,
repositoryId,
pullRequestId,
options,
);
// Verify results
expect(result).toHaveLength(1);
expect(result[0].id).toBe(threadId);
expect(result[0].comments).toHaveLength(1);
// Verify file path and line number are added
const comment = result[0].comments![0];
expect(comment).toHaveProperty('filePath', '/src/utils.ts');
expect(comment).toHaveProperty('rightFileStart', { line: 15, offset: 1 });
expect(comment).toHaveProperty('leftFileStart', undefined);
expect(comment).toHaveProperty('leftFileEnd', undefined);
expect(comment).toHaveProperty('rightFileEnd', undefined);
expect(mockConnection.getGitApi).toHaveBeenCalledTimes(1);
expect(mockGitApi.getPullRequestThread).toHaveBeenCalledTimes(1);
expect(mockGitApi.getPullRequestThread).toHaveBeenCalledWith(
repositoryId,
pullRequestId,
threadId,
projectId,
);
expect(mockGitApi.getThreads).not.toHaveBeenCalled();
});
test('should handle pagination when top parameter is provided', async () => {
// Mock data for multiple comment threads
const mockCommentThreads: GitPullRequestCommentThread[] = [
{
id: 1,
status: 1,
threadContext: {
filePath: '/src/file1.ts',
rightFileStart: { line: 1, offset: 1 },
},
comments: [{ id: 100, content: 'Comment 1' }],
},
{
id: 2,
status: 1,
threadContext: {
filePath: '/src/file2.ts',
rightFileStart: { line: 2, offset: 1 },
},
comments: [{ id: 101, content: 'Comment 2' }],
},
{
id: 3,
status: 1,
threadContext: {
filePath: '/src/file3.ts',
rightFileStart: { line: 3, offset: 1 },
},
comments: [{ id: 102, content: 'Comment 3' }],
},
];
// Setup mock connection
const mockGitApi = {
getThreads: jest.fn().mockResolvedValue(mockCommentThreads),
getPullRequestThread: jest.fn(),
};
const mockConnection: any = {
getGitApi: jest.fn().mockResolvedValue(mockGitApi),
};
// Call the function with test parameters and top=2
const projectId = 'test-project';
const repositoryId = 'test-repo';
const pullRequestId = 123;
const options = {
projectId,
repositoryId,
pullRequestId,
top: 2,
};
const result = await getPullRequestComments(
mockConnection as WebApi,
projectId,
repositoryId,
pullRequestId,
options,
);
// Verify results (should only include first 2 threads)
expect(result).toHaveLength(2);
expect(result).toEqual(
mockCommentThreads.slice(0, 2).map((thread) => ({
...thread,
status: 'active', // Transform enum to string
comments: thread.comments?.map((comment) => ({
...comment,
commentType: undefined, // Will be undefined since mock doesn't have commentType
filePath: thread.threadContext?.filePath,
rightFileStart: thread.threadContext?.rightFileStart ?? undefined,
rightFileEnd: thread.threadContext?.rightFileEnd ?? undefined,
leftFileStart: thread.threadContext?.leftFileStart ?? undefined,
leftFileEnd: thread.threadContext?.leftFileEnd ?? undefined,
})),
})),
);
expect(mockConnection.getGitApi).toHaveBeenCalledTimes(1);
expect(mockGitApi.getThreads).toHaveBeenCalledTimes(1);
expect(result[0].comments![0]).toHaveProperty('rightFileStart', {
line: 1,
offset: 1,
});
expect(result[1].comments![0]).toHaveProperty('rightFileStart', {
line: 2,
offset: 1,
});
});
test('should handle error when API call fails', async () => {
// Setup mock connection with error
const errorMessage = 'API error';
const mockGitApi = {
getThreads: jest.fn().mockRejectedValue(new Error(errorMessage)),
};
const mockConnection: any = {
getGitApi: jest.fn().mockResolvedValue(mockGitApi),
};
// Call the function with test parameters
const projectId = 'test-project';
const repositoryId = 'test-repo';
const pullRequestId = 123;
const options = {
projectId,
repositoryId,
pullRequestId,
};
// Verify error handling
await expect(
getPullRequestComments(
mockConnection as WebApi,
projectId,
repositoryId,
pullRequestId,
options,
),
).rejects.toThrow(`Failed to get pull request comments: ${errorMessage}`);
});
});
```