This is page 6 of 7. Use http://codebase.md/tiberriver256/azure-devops-mcp?page={x} to view the full context.
# Directory Structure
```
├── .clinerules
├── .env.example
├── .eslintrc.json
├── .github
│ ├── copilot-instructions.md
│ ├── FUNDING.yml
│ ├── release-please-config.json
│ ├── release-please-manifest.json
│ ├── skills
│ │ ├── azure-devops-rest-api
│ │ │ ├── references
│ │ │ │ └── api_areas.md
│ │ │ ├── scripts
│ │ │ │ ├── clone_specs.sh
│ │ │ │ └── find_endpoint.py
│ │ │ └── SKILL.md
│ │ └── skill-creator
│ │ ├── LICENSE.txt
│ │ ├── references
│ │ │ ├── output-patterns.md
│ │ │ └── workflows.md
│ │ ├── scripts
│ │ │ ├── init_skill.py
│ │ │ └── quick_validate.py
│ │ └── SKILL.md
│ └── workflows
│ ├── main.yml
│ ├── release-please.yml
│ └── update-skills.yml
├── .gitignore
├── .husky
│ ├── commit-msg
│ └── pre-commit
├── .kilocode
│ └── mcp.json
├── .prettierrc
├── .vscode
│ └── settings.json
├── CHANGELOG.md
├── commitlint.config.js
├── CONTRIBUTING.md
├── create_branch.sh
├── docs
│ ├── authentication.md
│ ├── azure-identity-authentication.md
│ ├── ci-setup.md
│ ├── examples
│ │ ├── azure-cli-authentication.env
│ │ ├── azure-identity-authentication.env
│ │ ├── pat-authentication.env
│ │ └── README.md
│ ├── testing
│ │ ├── README.md
│ │ └── setup.md
│ └── tools
│ ├── core-navigation.md
│ ├── organizations.md
│ ├── pipelines.md
│ ├── projects.md
│ ├── pull-requests.md
│ ├── README.md
│ ├── repositories.md
│ ├── resources.md
│ ├── search.md
│ ├── user-tools.md
│ ├── wiki.md
│ └── work-items.md
├── finish_task.sh
├── jest.e2e.config.js
├── jest.int.config.js
├── jest.unit.config.js
├── LICENSE
├── memory
│ └── tasks_memory_2025-05-26T16-18-03.json
├── package-lock.json
├── package.json
├── project-management
│ ├── planning
│ │ ├── architecture-guide.md
│ │ ├── azure-identity-authentication-design.md
│ │ ├── project-plan.md
│ │ ├── project-structure.md
│ │ ├── tech-stack.md
│ │ └── the-dream-team.md
│ ├── startup.xml
│ ├── tdd-cycle.xml
│ └── troubleshooter.xml
├── README.md
├── setup_env.sh
├── shrimp-rules.md
├── src
│ ├── clients
│ │ └── azure-devops.ts
│ ├── features
│ │ ├── organizations
│ │ │ ├── __test__
│ │ │ │ └── test-helpers.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-organizations
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── pipelines
│ │ │ ├── artifacts.spec.unit.ts
│ │ │ ├── artifacts.ts
│ │ │ ├── download-pipeline-artifact
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pipeline
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pipeline-log
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pipeline-run
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── helpers.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-pipeline-runs
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── list-pipelines
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── pipeline-timeline
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── tool-definitions.ts
│ │ │ ├── trigger-pipeline
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ └── types.ts
│ │ ├── projects
│ │ │ ├── __test__
│ │ │ │ └── test-helpers.ts
│ │ │ ├── get-project
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-project-details
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-projects
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── pull-requests
│ │ │ ├── add-pull-request-comment
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── create-pull-request
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-pull-request-changes
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── get-pull-request-checks
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── get-pull-request-comments
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-pull-requests
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ ├── types.ts
│ │ │ └── update-pull-request
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ └── index.ts
│ │ ├── repositories
│ │ │ ├── __test__
│ │ │ │ └── test-helpers.ts
│ │ │ ├── create-branch
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── create-commit
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── get-all-repositories-tree
│ │ │ │ ├── __snapshots__
│ │ │ │ │ └── feature.spec.unit.ts.snap
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-file-content
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-repository
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-repository-details
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-repository-tree
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-commits
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── list-repositories
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── search
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── schemas.ts
│ │ │ ├── search-code
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── search-wiki
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── search-work-items
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ └── index.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── users
│ │ │ ├── get-me
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── schemas.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── types.ts
│ │ ├── wikis
│ │ │ ├── create-wiki
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── create-wiki-page
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-wiki-page
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── get-wikis
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── index.spec.unit.ts
│ │ │ ├── index.ts
│ │ │ ├── list-wiki-pages
│ │ │ │ ├── feature.spec.int.ts
│ │ │ │ ├── feature.spec.unit.ts
│ │ │ │ ├── feature.ts
│ │ │ │ ├── index.ts
│ │ │ │ └── schema.ts
│ │ │ ├── tool-definitions.ts
│ │ │ └── update-wiki-page
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ └── work-items
│ │ ├── __test__
│ │ │ ├── fixtures.ts
│ │ │ ├── test-helpers.ts
│ │ │ └── test-utils.ts
│ │ ├── create-work-item
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── get-work-item
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── index.spec.unit.ts
│ │ ├── index.ts
│ │ ├── list-work-items
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── manage-work-item-link
│ │ │ ├── feature.spec.int.ts
│ │ │ ├── feature.spec.unit.ts
│ │ │ ├── feature.ts
│ │ │ ├── index.ts
│ │ │ └── schema.ts
│ │ ├── schemas.ts
│ │ ├── tool-definitions.ts
│ │ ├── types.ts
│ │ └── update-work-item
│ │ ├── feature.spec.int.ts
│ │ ├── feature.spec.unit.ts
│ │ ├── feature.ts
│ │ ├── index.ts
│ │ └── schema.ts
│ ├── index.spec.unit.ts
│ ├── index.ts
│ ├── server.spec.e2e.ts
│ ├── server.ts
│ ├── shared
│ │ ├── api
│ │ │ ├── client.ts
│ │ │ └── index.ts
│ │ ├── auth
│ │ │ ├── auth-factory.ts
│ │ │ ├── client-factory.ts
│ │ │ └── index.ts
│ │ ├── config
│ │ │ ├── index.ts
│ │ │ └── version.ts
│ │ ├── enums
│ │ │ ├── index.spec.unit.ts
│ │ │ └── index.ts
│ │ ├── errors
│ │ │ ├── azure-devops-errors.ts
│ │ │ ├── handle-request-error.ts
│ │ │ └── index.ts
│ │ ├── test
│ │ │ └── test-helpers.ts
│ │ └── types
│ │ ├── config.ts
│ │ ├── index.ts
│ │ ├── request-handler.ts
│ │ └── tool-definition.ts
│ ├── types
│ │ └── diff.d.ts
│ └── utils
│ ├── environment.spec.unit.ts
│ └── environment.ts
├── tasks.json
├── tests
│ └── setup.ts
└── tsconfig.json
```
# Files
--------------------------------------------------------------------------------
/src/features/repositories/get-all-repositories-tree/feature.spec.unit.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import {
GitObjectType,
VersionControlRecursionType,
} from 'azure-devops-node-api/interfaces/GitInterfaces';
import { getAllRepositoriesTree, formatRepositoryTree } from './feature';
import { RepositoryTreeItem } from '../types';
// Mock the Azure DevOps API
jest.mock('azure-devops-node-api');
describe('getAllRepositoriesTree', () => {
// Sample repositories
const mockRepos = [
{
id: 'repo1-id',
name: 'repo1',
defaultBranch: 'refs/heads/main',
},
{
id: 'repo2-id',
name: 'repo2',
defaultBranch: 'refs/heads/master',
},
{
id: 'repo3-id',
name: 'repo3-api',
defaultBranch: null, // No default branch
},
];
// Sample files/folders for repo1 at root level
const mockRepo1RootItems = [
{
path: '/',
gitObjectType: GitObjectType.Tree,
},
{
path: '/README.md',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
{
path: '/src',
isFolder: true,
gitObjectType: GitObjectType.Tree,
},
{
path: '/package.json',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
];
// Sample files/folders for repo1 - src folder
const mockRepo1SrcItems = [
{
path: '/src',
isFolder: true,
gitObjectType: GitObjectType.Tree,
},
{
path: '/src/index.ts',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
{
path: '/src/utils',
isFolder: true,
gitObjectType: GitObjectType.Tree,
},
];
// Sample files/folders for repo1 with unlimited depth (what server would return for Full recursion)
const mockRepo1FullRecursionItems = [
{
path: '/',
gitObjectType: GitObjectType.Tree,
},
{
path: '/README.md',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
{
path: '/src',
isFolder: true,
gitObjectType: GitObjectType.Tree,
},
{
path: '/package.json',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
{
path: '/src/index.ts',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
{
path: '/src/utils',
isFolder: true,
gitObjectType: GitObjectType.Tree,
},
{
path: '/src/utils/helper.ts',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
{
path: '/src/utils/constants.ts',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
];
// Sample files/folders for repo2
const mockRepo2RootItems = [
{
path: '/',
gitObjectType: GitObjectType.Tree,
},
{
path: '/README.md',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
{
path: '/data.json',
isFolder: false,
gitObjectType: GitObjectType.Blob,
},
];
let mockConnection: jest.Mocked<WebApi>;
let mockGitApi: any;
beforeEach(() => {
// Clear mocks
jest.clearAllMocks();
// Create mock GitApi
mockGitApi = {
getRepositories: jest.fn().mockResolvedValue(mockRepos),
getItems: jest
.fn()
.mockImplementation((repoId, _projectId, path, recursionLevel) => {
if (repoId === 'repo1-id') {
if (recursionLevel === VersionControlRecursionType.Full) {
return Promise.resolve(mockRepo1FullRecursionItems);
} else if (path === '/') {
return Promise.resolve(mockRepo1RootItems);
} else if (path === '/src') {
return Promise.resolve(mockRepo1SrcItems);
}
} else if (repoId === 'repo2-id') {
if (recursionLevel === VersionControlRecursionType.Full) {
return Promise.resolve(mockRepo2RootItems);
} else if (path === '/') {
return Promise.resolve(mockRepo2RootItems);
}
}
return Promise.resolve([]);
}),
};
// Create mock connection
mockConnection = {
getGitApi: jest.fn().mockResolvedValue(mockGitApi),
} as unknown as jest.Mocked<WebApi>;
});
it('should return tree structures for multiple repositories with limited depth', async () => {
// Arrange
const options = {
organizationId: 'testOrg',
projectId: 'testProject',
depth: 2, // Limited depth
};
// Act
const result = await getAllRepositoriesTree(mockConnection, options);
// Assert
expect(mockGitApi.getRepositories).toHaveBeenCalledWith('testProject');
expect(result.repositories.length).toBe(3);
// Verify repo1 tree
const repo1 = result.repositories.find((r) => r.name === 'repo1');
expect(repo1).toBeDefined();
expect(repo1?.tree.length).toBeGreaterThan(0);
expect(repo1?.stats.directories).toBeGreaterThan(0);
expect(repo1?.stats.files).toBeGreaterThan(0);
// Verify repo2 tree
const repo2 = result.repositories.find((r) => r.name === 'repo2');
expect(repo2).toBeDefined();
expect(repo2?.tree.length).toBeGreaterThan(0);
// Verify repo3 has error (no default branch)
const repo3 = result.repositories.find((r) => r.name === 'repo3-api');
expect(repo3).toBeDefined();
expect(repo3?.error).toContain('No default branch found');
// Verify recursion level was set correctly
expect(mockGitApi.getItems).toHaveBeenCalledWith(
'repo1-id',
'testProject',
'/',
VersionControlRecursionType.OneLevel,
expect.anything(),
expect.anything(),
expect.anything(),
expect.anything(),
expect.anything(),
);
});
it('should return tree structures with max depth using Full recursion', async () => {
// Arrange
const options = {
organizationId: 'testOrg',
projectId: 'testProject',
depth: 0, // Max depth
};
// Act
const result = await getAllRepositoriesTree(mockConnection, options);
// Assert
expect(mockGitApi.getRepositories).toHaveBeenCalledWith('testProject');
expect(result.repositories.length).toBe(3);
// Verify repo1 tree
const repo1 = result.repositories.find((r) => r.name === 'repo1');
expect(repo1).toBeDefined();
expect(repo1?.tree.length).toBeGreaterThan(0);
// Should include all items, including nested ones
expect(repo1?.tree.length).toBe(mockRepo1FullRecursionItems.length - 1); // -1 for root folder
// Verify recursion level was set correctly
expect(mockGitApi.getItems).toHaveBeenCalledWith(
'repo1-id',
'testProject',
'/',
VersionControlRecursionType.Full,
expect.anything(),
expect.anything(),
expect.anything(),
expect.anything(),
expect.anything(),
);
// Verify all levels are represented
if (repo1) {
const level1Items = repo1.tree.filter((item) => item.level === 1);
const level2Items = repo1.tree.filter((item) => item.level === 2);
const level3Items = repo1.tree.filter((item) => item.level === 3);
// Verify we have items at level 1
expect(level1Items.length).toBeGreaterThan(0);
// Verify we have items at level 2 (src/something)
expect(level2Items.length).toBeGreaterThan(0);
// Check for level 3 items if they exist in our mock data
if (
mockRepo1FullRecursionItems.some((item) => {
const pathSegments = item.path.split('/').filter(Boolean);
return pathSegments.length >= 3;
})
) {
expect(level3Items.length).toBeGreaterThan(0);
}
}
});
it('should filter repositories by pattern', async () => {
// Arrange
const options = {
organizationId: 'testOrg',
projectId: 'testProject',
repositoryPattern: '*api*',
depth: 1,
};
// Act
const result = await getAllRepositoriesTree(mockConnection, options);
// Assert
expect(mockGitApi.getRepositories).toHaveBeenCalledWith('testProject');
expect(result.repositories.length).toBe(1);
expect(result.repositories[0].name).toBe('repo3-api');
});
it('should format repository tree correctly', () => {
// Arrange
const treeItems: RepositoryTreeItem[] = [
{ name: 'src', path: '/src', isFolder: true, level: 1 },
{ name: 'index.ts', path: '/src/index.ts', isFolder: false, level: 2 },
{ name: 'README.md', path: '/README.md', isFolder: false, level: 1 },
];
const stats = { directories: 1, files: 2 };
// Act
const formatted = formatRepositoryTree('test-repo', treeItems, stats);
// Assert
expect(formatted).toMatchSnapshot();
});
it('should format complex repository tree structures correctly', () => {
// Arrange
const treeItems: RepositoryTreeItem[] = [
// Root level files
{ name: 'README.md', path: '/README.md', isFolder: false, level: 1 },
{
name: 'package.json',
path: '/package.json',
isFolder: false,
level: 1,
},
{ name: '.gitignore', path: '/.gitignore', isFolder: false, level: 1 },
// Multiple folders at root level
{ name: 'src', path: '/src', isFolder: true, level: 1 },
{ name: 'tests', path: '/tests', isFolder: true, level: 1 },
{ name: 'docs', path: '/docs', isFolder: true, level: 1 },
// Nested src folder structure
{ name: 'components', path: '/src/components', isFolder: true, level: 2 },
{ name: 'utils', path: '/src/utils', isFolder: true, level: 2 },
{ name: 'index.ts', path: '/src/index.ts', isFolder: false, level: 2 },
// Deeply nested components
{
name: 'Button',
path: '/src/components/Button',
isFolder: true,
level: 3,
},
{ name: 'Card', path: '/src/components/Card', isFolder: true, level: 3 },
{
name: 'Button.tsx',
path: '/src/components/Button/Button.tsx',
isFolder: false,
level: 4,
},
{
name: 'Button.styles.ts',
path: '/src/components/Button/Button.styles.ts',
isFolder: false,
level: 4,
},
{
name: 'Button.test.tsx',
path: '/src/components/Button/Button.test.tsx',
isFolder: false,
level: 4,
},
{
name: 'index.ts',
path: '/src/components/Button/index.ts',
isFolder: false,
level: 4,
},
{
name: 'Card.tsx',
path: '/src/components/Card/Card.tsx',
isFolder: false,
level: 4,
},
// Utils with files
{
name: 'helpers.ts',
path: '/src/utils/helpers.ts',
isFolder: false,
level: 3,
},
{
name: 'constants.ts',
path: '/src/utils/constants.ts',
isFolder: false,
level: 3,
},
// Empty folder
{ name: 'assets', path: '/src/assets', isFolder: true, level: 2 },
// Files with special characters
{
name: 'file-with-dashes.js',
path: '/src/file-with-dashes.js',
isFolder: false,
level: 2,
},
{
name: 'file_with_underscores.js',
path: '/src/file_with_underscores.js',
isFolder: false,
level: 2,
},
// Folders in test directory
{ name: 'unit', path: '/tests/unit', isFolder: true, level: 2 },
{
name: 'integration',
path: '/tests/integration',
isFolder: true,
level: 2,
},
// Files in test directories
{ name: 'setup.js', path: '/tests/setup.js', isFolder: false, level: 2 },
{
name: 'example.test.js',
path: '/tests/unit/example.test.js',
isFolder: false,
level: 3,
},
// Files in docs
{ name: 'API.md', path: '/docs/API.md', isFolder: false, level: 2 },
{
name: 'CONTRIBUTING.md',
path: '/docs/CONTRIBUTING.md',
isFolder: false,
level: 2,
},
];
const stats = { directories: 10, files: 18 };
// Act
const formatted = formatRepositoryTree('complex-repo', treeItems, stats);
// Assert
expect(formatted).toMatchSnapshot();
});
it('should handle repository errors gracefully', async () => {
// Arrange
mockGitApi.getItems = jest.fn().mockRejectedValue(new Error('API error'));
const options = {
organizationId: 'testOrg',
projectId: 'testProject',
depth: 1,
};
// Act
const result = await getAllRepositoriesTree(mockConnection, options);
// Assert
expect(result.repositories.length).toBe(3);
const repo1 = result.repositories.find((r) => r.name === 'repo1');
expect(repo1?.error).toBeDefined();
});
});
```
--------------------------------------------------------------------------------
/src/features/wikis/create-wiki-page/feature.spec.int.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import { createWikiPage } from './feature';
import { CreateWikiPageSchema } from './schema';
import { getWikiPage } from '../get-wiki-page/feature';
import { getWikis } from '../get-wikis/feature';
import {
getTestConnection,
shouldSkipIntegrationTest,
} from '@/shared/test/test-helpers';
import { getOrgNameFromUrl } from '@/utils/environment';
import { AzureDevOpsError } from '@/shared/errors/azure-devops-errors';
import { z } from 'zod';
// Ensure environment variables are set for testing
process.env.AZURE_DEVOPS_DEFAULT_PROJECT =
process.env.AZURE_DEVOPS_DEFAULT_PROJECT || 'default-project';
describe('createWikiPage Integration Tests', () => {
let connection: WebApi | null = null;
let projectName: string;
let orgUrl: string;
let organizationId: string;
const testPagePath = '/IntegrationTestPage';
const testPagePathSub = '/IntegrationTestPage/SubPage';
const testPagePathDefault = '/DefaultPathPage';
const testPagePathComment = '/CommentTestPage';
beforeAll(async () => {
// Mock the required environment variable for testing
process.env.AZURE_DEVOPS_ORG_URL =
process.env.AZURE_DEVOPS_ORG_URL || 'https://example.visualstudio.com';
// Get and validate required environment variables
const envProjectName = process.env.AZURE_DEVOPS_DEFAULT_PROJECT;
if (!envProjectName) {
throw new Error(
'AZURE_DEVOPS_DEFAULT_PROJECT environment variable is required',
);
}
projectName = envProjectName;
const envOrgUrl = process.env.AZURE_DEVOPS_ORG_URL;
if (!envOrgUrl) {
throw new Error('AZURE_DEVOPS_ORG_URL environment variable is required');
}
orgUrl = envOrgUrl;
organizationId = getOrgNameFromUrl(orgUrl);
// Get a real connection using environment variables
connection = await getTestConnection();
});
// Helper function to get a valid wiki ID
async function getValidWikiId(): Promise<string | null> {
if (!connection) return null;
try {
// Get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('No wikis available in the project');
return null;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
console.log('Wiki name is undefined');
return null;
}
return wiki.name;
} catch (error) {
console.error('Error getting wikis:', error);
return null;
}
}
test('should create a new wiki page at the root', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// Get a valid wiki ID
const wikiId = await getValidWikiId();
if (!wikiId) {
console.log('Skipping test: No valid wiki ID available');
return;
}
const params: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePath,
content: 'This is content for the integration test page (root).',
};
try {
// Create the wiki page
const createdPage = await createWikiPage(params);
// Verify the result
expect(createdPage).toBeDefined();
expect(createdPage.path).toBe(testPagePath);
expect(createdPage.content).toBe(params.content);
// Verify by fetching the page
const fetchedPage = await getWikiPage({
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePath,
});
expect(fetchedPage).toBeDefined();
expect(typeof fetchedPage).toBe('string');
expect(fetchedPage).toContain(params.content);
} catch (error) {
console.error('Error in test:', error);
throw error;
}
});
test('should create a new wiki sub-page', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// Get a valid wiki ID
const wikiId = await getValidWikiId();
if (!wikiId) {
console.log('Skipping test: No valid wiki ID available');
return;
}
// First, ensure the parent page exists
const parentParams: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePath,
content: 'This is the parent page for the sub-page test.',
};
try {
// Create the parent page
await createWikiPage(parentParams);
// Now create the sub-page
const subPageParams: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePathSub,
content: 'This is content for the integration test sub-page.',
};
const createdSubPage = await createWikiPage(subPageParams);
// Verify the result
expect(createdSubPage).toBeDefined();
expect(createdSubPage.path).toBe(testPagePathSub);
expect(createdSubPage.content).toBe(subPageParams.content);
// Verify by fetching the sub-page
const fetchedSubPage = await getWikiPage({
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePathSub,
});
expect(fetchedSubPage).toBeDefined();
expect(typeof fetchedSubPage).toBe('string');
expect(fetchedSubPage).toContain(subPageParams.content);
} catch (error) {
console.error('Error in test:', error);
throw error;
}
});
test('should update an existing wiki page if path already exists', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// Get a valid wiki ID
const wikiId = await getValidWikiId();
if (!wikiId) {
console.log('Skipping test: No valid wiki ID available');
return;
}
try {
// First create a page with initial content
const initialParams: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePath,
content: 'Initial content.',
};
await createWikiPage(initialParams);
// Now update the page with new content
const updatedParams: z.infer<typeof CreateWikiPageSchema> = {
...initialParams,
content: 'Updated content for the page.',
};
const updatedPage = await createWikiPage(updatedParams);
// Verify the result
expect(updatedPage).toBeDefined();
expect(updatedPage.path).toBe(testPagePath);
expect(updatedPage.content).toBe(updatedParams.content);
// Verify by fetching the page
const fetchedPage = await getWikiPage({
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePath,
});
expect(fetchedPage).toBeDefined();
expect(typeof fetchedPage).toBe('string');
expect(fetchedPage).toContain(updatedParams.content);
} catch (error) {
console.error('Error in test:', error);
throw error;
}
});
test('should create a page with a default path if specified', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// Get a valid wiki ID
const wikiId = await getValidWikiId();
if (!wikiId) {
console.log('Skipping test: No valid wiki ID available');
return;
}
try {
const params: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePathDefault,
content: 'Content for page created with default path.',
};
const createdPage = await createWikiPage(params);
// Verify the result
expect(createdPage).toBeDefined();
expect(createdPage.path).toBe(testPagePathDefault);
expect(createdPage.content).toBe(params.content);
// Verify by fetching the page
const fetchedPage = await getWikiPage({
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePathDefault,
});
expect(fetchedPage).toBeDefined();
expect(typeof fetchedPage).toBe('string');
expect(fetchedPage).toContain(params.content);
} catch (error) {
console.error('Error in test:', error);
throw error;
}
});
test('should include comment in the wiki page creation when provided', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
// This connection must be available if we didn't skip
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// Get a valid wiki ID
const wikiId = await getValidWikiId();
if (!wikiId) {
console.log('Skipping test: No valid wiki ID available');
return;
}
try {
const params: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePathComment,
content: 'Content with comment.',
comment: 'This is a test comment for the wiki page creation',
};
const createdPage = await createWikiPage(params);
// Verify the result
expect(createdPage).toBeDefined();
expect(createdPage.path).toBe(testPagePathComment);
expect(createdPage.content).toBe(params.content);
// Verify by fetching the page
const fetchedPage = await getWikiPage({
organizationId,
projectId: projectName,
wikiId,
pagePath: testPagePathComment,
});
expect(fetchedPage).toBeDefined();
expect(typeof fetchedPage).toBe('string');
expect(fetchedPage).toContain(params.content);
// Note: The API might not return the comment in the response
// This test primarily verifies that including a comment doesn't break the API call
} catch (error) {
console.error('Error in test:', error);
throw error;
}
});
test('should handle error when wiki does not exist', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
const nonExistentWikiId = 'non-existent-wiki-12345';
const params: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: projectName,
wikiId: nonExistentWikiId,
pagePath: '/test-page',
content: 'This should fail.',
};
await expect(createWikiPage(params)).rejects.toThrow(AzureDevOpsError);
});
test('should handle error when project does not exist', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
const nonExistentProjectId = 'non-existent-project-12345';
const params: z.infer<typeof CreateWikiPageSchema> = {
organizationId,
projectId: nonExistentProjectId,
wikiId: 'any-wiki',
pagePath: '/test-page',
content: 'This should fail.',
};
await expect(createWikiPage(params)).rejects.toThrow(AzureDevOpsError);
});
test('should handle error when organization does not exist', async () => {
// Skip if no connection is available
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection');
return;
}
const nonExistentOrgId = 'non-existent-org-12345';
const params: z.infer<typeof CreateWikiPageSchema> = {
organizationId: nonExistentOrgId,
projectId: projectName,
wikiId: 'any-wiki',
pagePath: '/test-page',
content: 'This should fail.',
};
await expect(createWikiPage(params)).rejects.toThrow(AzureDevOpsError);
});
});
```
--------------------------------------------------------------------------------
/src/features/projects/get-project-details/feature.spec.unit.ts:
--------------------------------------------------------------------------------
```typescript
import { getProjectDetails } from './feature';
import {
AzureDevOpsError,
AzureDevOpsResourceNotFoundError,
} from '../../../shared/errors';
import {
TeamProject,
WebApiTeam,
} from 'azure-devops-node-api/interfaces/CoreInterfaces';
import { WebApi } from 'azure-devops-node-api';
import { WorkItemType } from 'azure-devops-node-api/interfaces/WorkItemTrackingInterfaces';
// Create mock interfaces for the APIs we'll use
interface MockCoreApi {
getProject: jest.Mock<Promise<TeamProject | null>>;
getTeams: jest.Mock<Promise<WebApiTeam[]>>;
}
interface MockWorkItemTrackingApi {
getWorkItemTypes: jest.Mock<Promise<WorkItemType[]>>;
}
interface MockProcessApi {
getProcesses: jest.Mock<Promise<any[]>>;
getProcessWorkItemTypes: jest.Mock<Promise<any[]>>;
}
// Create a mock connection that resembles WebApi with minimal implementation
interface MockConnection {
getCoreApi: jest.Mock<Promise<MockCoreApi>>;
getWorkItemTrackingApi: jest.Mock<Promise<MockWorkItemTrackingApi>>;
getProcessApi: jest.Mock<Promise<MockProcessApi>>;
serverUrl?: string;
authHandler?: unknown;
rest?: unknown;
vsoClient?: unknown;
}
// Sample data for tests
const mockProject = {
id: 'project-id',
name: 'Test Project',
description: 'A test project',
url: 'https://dev.azure.com/org/project',
state: 1, // wellFormed
revision: 123,
visibility: 0, // private
lastUpdateTime: new Date(),
capabilities: {
versioncontrol: {
sourceControlType: 'Git',
},
processTemplate: {
templateName: 'Agile',
templateTypeId: 'template-guid',
},
},
} as unknown as TeamProject;
const mockTeams: WebApiTeam[] = [
{
id: 'team-guid-1',
name: 'Team 1',
description: 'First team',
url: 'https://dev.azure.com/org/_apis/projects/project-guid/teams/team-guid-1',
identityUrl: 'https://vssps.dev.azure.com/org/_apis/Identities/team-guid-1',
} as WebApiTeam,
{
id: 'team-guid-2',
name: 'Team 2',
description: 'Second team',
url: 'https://dev.azure.com/org/_apis/projects/project-guid/teams/team-guid-2',
identityUrl: 'https://vssps.dev.azure.com/org/_apis/Identities/team-guid-2',
} as WebApiTeam,
];
const mockWorkItemTypes: WorkItemType[] = [
{
name: 'User Story',
description: 'Tracks user requirements',
referenceName: 'Microsoft.VSTS.WorkItemTypes.UserStory',
color: 'blue',
icon: 'icon-user-story',
isDisabled: false,
} as WorkItemType,
{
name: 'Bug',
description: 'Tracks defects in the product',
referenceName: 'Microsoft.VSTS.WorkItemTypes.Bug',
color: 'red',
icon: 'icon-bug',
isDisabled: false,
} as WorkItemType,
];
const mockProcesses = [
{
id: 'process-guid',
name: 'Agile',
description: 'Agile process',
isDefault: true,
type: 'system',
},
];
const mockProcessWorkItemTypes = [
{
name: 'User Story',
referenceName: 'Microsoft.VSTS.WorkItemTypes.UserStory',
description: 'Tracks user requirements',
color: 'blue',
icon: 'icon-user-story',
isDisabled: false,
states: [
{
name: 'New',
color: 'blue',
stateCategory: 'Proposed',
},
{
name: 'Active',
color: 'blue',
stateCategory: 'InProgress',
},
{
name: 'Resolved',
color: 'blue',
stateCategory: 'InProgress',
},
{
name: 'Closed',
color: 'blue',
stateCategory: 'Completed',
},
],
fields: [
{
name: 'Title',
referenceName: 'System.Title',
type: 'string',
required: true,
},
{
name: 'Description',
referenceName: 'System.Description',
type: 'html',
},
],
},
{
name: 'Bug',
referenceName: 'Microsoft.VSTS.WorkItemTypes.Bug',
description: 'Tracks defects in the product',
color: 'red',
icon: 'icon-bug',
isDisabled: false,
states: [
{
name: 'New',
color: 'red',
stateCategory: 'Proposed',
},
{
name: 'Active',
color: 'red',
stateCategory: 'InProgress',
},
{
name: 'Resolved',
color: 'red',
stateCategory: 'InProgress',
},
{
name: 'Closed',
color: 'red',
stateCategory: 'Completed',
},
],
fields: [
{
name: 'Title',
referenceName: 'System.Title',
type: 'string',
required: true,
},
{
name: 'Repro Steps',
referenceName: 'Microsoft.VSTS.TCM.ReproSteps',
type: 'html',
},
],
},
];
// Unit tests should only focus on isolated logic
describe('getProjectDetails unit', () => {
test('should throw resource not found error when project is null', async () => {
// Arrange
const mockCoreApi: MockCoreApi = {
getProject: jest.fn().mockResolvedValue(null), // Simulate project not found
getTeams: jest.fn().mockResolvedValue([]),
};
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockResolvedValue(mockCoreApi),
getWorkItemTrackingApi: jest.fn().mockResolvedValue({
getWorkItemTypes: jest.fn().mockResolvedValue([]),
}),
getProcessApi: jest.fn().mockResolvedValue({
getProcesses: jest.fn().mockResolvedValue([]),
getProcessWorkItemTypes: jest.fn().mockResolvedValue([]),
}),
};
// Act & Assert
await expect(
getProjectDetails(mockConnection as unknown as WebApi, {
projectId: 'non-existent-project',
}),
).rejects.toThrow(AzureDevOpsResourceNotFoundError);
await expect(
getProjectDetails(mockConnection as unknown as WebApi, {
projectId: 'non-existent-project',
}),
).rejects.toThrow("Project 'non-existent-project' not found");
});
test('should return basic project details when no additional options are specified', async () => {
// Arrange
const mockCoreApi: MockCoreApi = {
getProject: jest.fn().mockResolvedValue(mockProject),
getTeams: jest.fn().mockResolvedValue([]),
};
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockResolvedValue(mockCoreApi),
getWorkItemTrackingApi: jest.fn().mockResolvedValue({
getWorkItemTypes: jest.fn().mockResolvedValue([]),
}),
getProcessApi: jest.fn().mockResolvedValue({
getProcesses: jest.fn().mockResolvedValue([]),
getProcessWorkItemTypes: jest.fn().mockResolvedValue([]),
}),
};
// Act
const result = await getProjectDetails(
mockConnection as unknown as WebApi,
{
projectId: 'test-project',
},
);
// Assert
expect(result).toBeDefined();
expect(result.id).toBe(mockProject.id);
expect(result.name).toBe(mockProject.name);
expect(result.description).toBe(mockProject.description);
expect(result.url).toBe(mockProject.url);
expect(result.state).toBe(mockProject.state);
expect(result.revision).toBe(mockProject.revision);
expect(result.visibility).toBe(mockProject.visibility);
expect(result.lastUpdateTime).toBe(mockProject.lastUpdateTime);
expect(result.capabilities).toEqual(mockProject.capabilities);
// Verify that additional details are not included
expect(result.process).toBeUndefined();
expect(result.teams).toBeUndefined();
});
test('should include teams when includeTeams is true', async () => {
// Arrange
const mockCoreApi: MockCoreApi = {
getProject: jest.fn().mockResolvedValue(mockProject),
getTeams: jest.fn().mockResolvedValue(mockTeams),
};
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockResolvedValue(mockCoreApi),
getWorkItemTrackingApi: jest.fn().mockResolvedValue({
getWorkItemTypes: jest.fn().mockResolvedValue([]),
}),
getProcessApi: jest.fn().mockResolvedValue({
getProcesses: jest.fn().mockResolvedValue([]),
getProcessWorkItemTypes: jest.fn().mockResolvedValue([]),
}),
};
// Act
const result = await getProjectDetails(
mockConnection as unknown as WebApi,
{
projectId: 'test-project',
includeTeams: true,
},
);
// Assert
expect(result).toBeDefined();
expect(result.teams).toBeDefined();
expect(result.teams?.length).toBe(2);
expect(result.teams?.[0].id).toBe(mockTeams[0].id);
expect(result.teams?.[0].name).toBe(mockTeams[0].name);
expect(result.teams?.[1].id).toBe(mockTeams[1].id);
expect(result.teams?.[1].name).toBe(mockTeams[1].name);
});
test('should include process information when includeProcess is true', async () => {
// Arrange
const mockCoreApi: MockCoreApi = {
getProject: jest.fn().mockResolvedValue(mockProject),
getTeams: jest.fn().mockResolvedValue([]),
};
const mockWorkItemTrackingApi: MockWorkItemTrackingApi = {
getWorkItemTypes: jest.fn().mockResolvedValue([]),
};
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockResolvedValue(mockCoreApi),
getWorkItemTrackingApi: jest
.fn()
.mockResolvedValue(mockWorkItemTrackingApi),
getProcessApi: jest.fn(),
};
// Act
const result = await getProjectDetails(
mockConnection as unknown as WebApi,
{
projectId: 'test-project',
includeProcess: true,
},
);
// Assert
expect(result).toBeDefined();
expect(result.process).toBeDefined();
expect(result.process?.name).toBe('Agile');
});
test('should include work item types when includeWorkItemTypes is true', async () => {
// Arrange
const mockCoreApi: MockCoreApi = {
getProject: jest.fn().mockResolvedValue(mockProject),
getTeams: jest.fn().mockResolvedValue([]),
};
const mockWorkItemTrackingApi: MockWorkItemTrackingApi = {
getWorkItemTypes: jest.fn().mockResolvedValue(mockWorkItemTypes),
};
const mockProcessApi: MockProcessApi = {
getProcesses: jest.fn().mockResolvedValue(mockProcesses),
getProcessWorkItemTypes: jest
.fn()
.mockResolvedValue(mockProcessWorkItemTypes),
};
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockResolvedValue(mockCoreApi),
getWorkItemTrackingApi: jest
.fn()
.mockResolvedValue(mockWorkItemTrackingApi),
getProcessApi: jest.fn().mockResolvedValue(mockProcessApi),
};
// Act
const result = await getProjectDetails(
mockConnection as unknown as WebApi,
{
projectId: 'test-project',
includeWorkItemTypes: true,
includeProcess: true,
},
);
// Assert
expect(result).toBeDefined();
expect(result.process).toBeDefined();
expect(result.process?.workItemTypes).toBeDefined();
expect(result.process?.workItemTypes?.length).toBe(2);
expect(result.process?.workItemTypes?.[0].name).toBe('User Story');
expect(result.process?.workItemTypes?.[1].name).toBe('Bug');
});
test('should include fields when includeFields is true', async () => {
// Arrange
const mockCoreApi: MockCoreApi = {
getProject: jest.fn().mockResolvedValue(mockProject),
getTeams: jest.fn().mockResolvedValue([]),
};
const mockWorkItemTrackingApi: MockWorkItemTrackingApi = {
getWorkItemTypes: jest.fn().mockResolvedValue(mockWorkItemTypes),
};
const mockProcessApi: MockProcessApi = {
getProcesses: jest.fn().mockResolvedValue(mockProcesses),
getProcessWorkItemTypes: jest
.fn()
.mockResolvedValue(mockProcessWorkItemTypes),
};
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockResolvedValue(mockCoreApi),
getWorkItemTrackingApi: jest
.fn()
.mockResolvedValue(mockWorkItemTrackingApi),
getProcessApi: jest.fn().mockResolvedValue(mockProcessApi),
};
// Act
const result = await getProjectDetails(
mockConnection as unknown as WebApi,
{
projectId: 'test-project',
includeWorkItemTypes: true,
includeFields: true,
includeProcess: true,
},
);
// Assert
expect(result).toBeDefined();
expect(result.process).toBeDefined();
expect(result.process?.workItemTypes).toBeDefined();
expect(result.process?.workItemTypes?.[0].fields).toBeDefined();
expect(result.process?.workItemTypes?.[0].fields?.length).toBe(2);
expect(result.process?.workItemTypes?.[0].fields?.[0].name).toBe('Title');
expect(result.process?.workItemTypes?.[0].fields?.[1].name).toBe(
'Description',
);
});
test('should propagate custom errors when thrown internally', async () => {
// Arrange
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockImplementation(() => {
throw new AzureDevOpsError('Custom error');
}),
getWorkItemTrackingApi: jest.fn(),
getProcessApi: jest.fn(),
};
// Act & Assert
await expect(
getProjectDetails(mockConnection as unknown as WebApi, {
projectId: 'test-project',
}),
).rejects.toThrow(AzureDevOpsError);
await expect(
getProjectDetails(mockConnection as unknown as WebApi, {
projectId: 'test-project',
}),
).rejects.toThrow('Custom error');
});
test('should wrap unexpected errors in a friendly error message', async () => {
// Arrange
const mockConnection: MockConnection = {
getCoreApi: jest.fn().mockImplementation(() => {
throw new Error('Unexpected error');
}),
getWorkItemTrackingApi: jest.fn(),
getProcessApi: jest.fn(),
};
// Act & Assert
await expect(
getProjectDetails(mockConnection as unknown as WebApi, {
projectId: 'test-project',
}),
).rejects.toThrow('Failed to get project details: Unexpected error');
});
});
```
--------------------------------------------------------------------------------
/src/server.ts:
--------------------------------------------------------------------------------
```typescript
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
ListResourcesRequestSchema,
ReadResourceRequestSchema,
} from '@modelcontextprotocol/sdk/types.js';
import { WebApi } from 'azure-devops-node-api';
import { GitVersionType } from 'azure-devops-node-api/interfaces/GitInterfaces';
import { VERSION } from './shared/config';
import { AzureDevOpsConfig } from './shared/types';
import {
AzureDevOpsAuthenticationError,
AzureDevOpsError,
AzureDevOpsResourceNotFoundError,
AzureDevOpsValidationError,
} from './shared/errors';
import { handleResponseError } from './shared/errors/handle-request-error';
import { AuthenticationMethod, AzureDevOpsClient } from './shared/auth';
// Import environment defaults when needed in feature handlers
// Import feature modules with request handlers and tool definitions
import {
workItemsTools,
isWorkItemsRequest,
handleWorkItemsRequest,
} from './features/work-items';
import {
projectsTools,
isProjectsRequest,
handleProjectsRequest,
} from './features/projects';
import {
repositoriesTools,
isRepositoriesRequest,
handleRepositoriesRequest,
} from './features/repositories';
import {
organizationsTools,
isOrganizationsRequest,
handleOrganizationsRequest,
} from './features/organizations';
import {
searchTools,
isSearchRequest,
handleSearchRequest,
} from './features/search';
import {
usersTools,
isUsersRequest,
handleUsersRequest,
} from './features/users';
import {
pullRequestsTools,
isPullRequestsRequest,
handlePullRequestsRequest,
} from './features/pull-requests';
import {
pipelinesTools,
isPipelinesRequest,
handlePipelinesRequest,
} from './features/pipelines';
import {
wikisTools,
isWikisRequest,
handleWikisRequest,
} from './features/wikis';
// Create a safe console logging function that won't interfere with MCP protocol
function safeLog(message: string) {
process.stderr.write(`${message}\n`);
}
/**
* Type definition for the Azure DevOps MCP Server
*/
export type AzureDevOpsServer = Server;
/**
* Create an Azure DevOps MCP Server
*
* @param config The Azure DevOps configuration
* @returns A configured MCP server instance
*/
export function createAzureDevOpsServer(config: AzureDevOpsConfig): Server {
// Validate the configuration
validateConfig(config);
// Initialize the MCP server
const server = new Server(
{
name: 'azure-devops-mcp',
version: VERSION,
},
{
capabilities: {
tools: {},
resources: {},
},
},
);
// Register the ListTools request handler
server.setRequestHandler(ListToolsRequestSchema, () => {
// Combine tools from all features
const tools = [
...usersTools,
...organizationsTools,
...projectsTools,
...repositoriesTools,
...workItemsTools,
...searchTools,
...pullRequestsTools,
...pipelinesTools,
...wikisTools,
];
return { tools };
});
// Register the resource handlers
// ListResources - register available resource templates
server.setRequestHandler(ListResourcesRequestSchema, async () => {
// Create resource templates for repository content
const templates = [
// Default branch content
{
uriTemplate: 'ado://{organization}/{project}/{repo}/contents{/path*}',
name: 'Repository Content',
description: 'Content from the default branch of a repository',
},
// Branch specific content
{
uriTemplate:
'ado://{organization}/{project}/{repo}/branches/{branch}/contents{/path*}',
name: 'Branch Content',
description: 'Content from a specific branch of a repository',
},
// Commit specific content
{
uriTemplate:
'ado://{organization}/{project}/{repo}/commits/{commit}/contents{/path*}',
name: 'Commit Content',
description: 'Content from a specific commit in a repository',
},
// Tag specific content
{
uriTemplate:
'ado://{organization}/{project}/{repo}/tags/{tag}/contents{/path*}',
name: 'Tag Content',
description: 'Content from a specific tag in a repository',
},
// Pull request specific content
{
uriTemplate:
'ado://{organization}/{project}/{repo}/pullrequests/{prId}/contents{/path*}',
name: 'Pull Request Content',
description: 'Content from a specific pull request in a repository',
},
];
return {
resources: [],
templates,
};
});
// ReadResource - handle reading content from the templates
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
try {
const uri = new URL(request.params.uri);
// Parse the URI to extract components
const segments = uri.pathname.split('/').filter(Boolean);
// Check if it's an Azure DevOps resource URI
if (uri.protocol !== 'ado:') {
throw new AzureDevOpsResourceNotFoundError(
`Unsupported protocol: ${uri.protocol}`,
);
}
// Extract organization, project, and repo
// const organization = segments[0]; // Currently unused but kept for future use
const project = segments[1];
const repo = segments[2];
// Get a connection to Azure DevOps
const connection = await getConnection(config);
// Default path is root if not specified
let path = '/';
// Extract path from the remaining segments, if there are at least 5 segments (org/project/repo/contents/path)
if (segments.length >= 5 && segments[3] === 'contents') {
path = '/' + segments.slice(4).join('/');
}
// Determine version control parameters based on URI pattern
let versionType: number | undefined;
let version: string | undefined;
if (segments[3] === 'branches' && segments.length >= 5) {
versionType = GitVersionType.Branch;
version = segments[4];
// Extract path if present
if (segments.length >= 7 && segments[5] === 'contents') {
path = '/' + segments.slice(6).join('/');
}
} else if (segments[3] === 'commits' && segments.length >= 5) {
versionType = GitVersionType.Commit;
version = segments[4];
// Extract path if present
if (segments.length >= 7 && segments[5] === 'contents') {
path = '/' + segments.slice(6).join('/');
}
} else if (segments[3] === 'tags' && segments.length >= 5) {
versionType = GitVersionType.Tag;
version = segments[4];
// Extract path if present
if (segments.length >= 7 && segments[5] === 'contents') {
path = '/' + segments.slice(6).join('/');
}
} else if (segments[3] === 'pullrequests' && segments.length >= 5) {
// TODO: For PR head, we need to get the source branch or commit
// Currently just use the default branch as a fallback
// versionType = GitVersionType.Branch;
// version = 'PR-' + segments[4];
// Extract path if present
if (segments.length >= 7 && segments[5] === 'contents') {
path = '/' + segments.slice(6).join('/');
}
}
// Get the content
const versionDescriptor =
versionType && version ? { versionType, version } : undefined;
// Import the getFileContent function from repositories feature
const { getFileContent } = await import(
'./features/repositories/get-file-content/index.js'
);
const fileContent = await getFileContent(
connection,
project,
repo,
path,
versionDescriptor,
);
// Return the content based on whether it's a file or directory
return {
contents: [
{
uri: request.params.uri,
mimeType: fileContent.isDirectory
? 'application/json'
: getMimeType(path),
text: fileContent.content,
},
],
};
} catch (error) {
safeLog(`Error reading resource: ${error}`);
if (error instanceof AzureDevOpsError) {
throw error;
}
throw new AzureDevOpsResourceNotFoundError(
`Failed to read resource: ${error instanceof Error ? error.message : String(error)}`,
);
}
});
// Register the CallTool request handler
server.setRequestHandler(CallToolRequestSchema, async (request) => {
try {
// Note: We don't need to validate the presence of arguments here because:
// 1. The schema validations (via zod.parse) will check for required parameters
// 2. Default values from environment.ts are applied for optional parameters (projectId, organizationId)
// 3. Arguments can be omitted entirely for tools with no required parameters
// Get a connection to Azure DevOps
const connection = await getConnection(config);
// Route the request to the appropriate feature handler
if (isWorkItemsRequest(request)) {
return await handleWorkItemsRequest(connection, request);
}
if (isProjectsRequest(request)) {
return await handleProjectsRequest(connection, request);
}
if (isRepositoriesRequest(request)) {
return await handleRepositoriesRequest(connection, request);
}
if (isOrganizationsRequest(request)) {
// Organizations feature doesn't need the config object anymore
return await handleOrganizationsRequest(connection, request);
}
if (isSearchRequest(request)) {
return await handleSearchRequest(connection, request);
}
if (isUsersRequest(request)) {
return await handleUsersRequest(connection, request);
}
if (isPullRequestsRequest(request)) {
return await handlePullRequestsRequest(connection, request);
}
if (isPipelinesRequest(request)) {
return await handlePipelinesRequest(connection, request);
}
if (isWikisRequest(request)) {
return await handleWikisRequest(connection, request);
}
// If we get here, the tool is not recognized by any feature handler
throw new Error(`Unknown tool: ${request.params.name}`);
} catch (error) {
return handleResponseError(error);
}
});
return server;
}
/**
* Get a mime type based on file extension
*
* @param path File path
* @returns Mime type string
*/
function getMimeType(path: string): string {
const extension = path.split('.').pop()?.toLowerCase();
switch (extension) {
case 'txt':
return 'text/plain';
case 'html':
case 'htm':
return 'text/html';
case 'css':
return 'text/css';
case 'js':
return 'application/javascript';
case 'json':
return 'application/json';
case 'xml':
return 'application/xml';
case 'md':
return 'text/markdown';
case 'png':
return 'image/png';
case 'jpg':
case 'jpeg':
return 'image/jpeg';
case 'gif':
return 'image/gif';
case 'webp':
return 'image/webp';
case 'svg':
return 'image/svg+xml';
case 'pdf':
return 'application/pdf';
case 'ts':
case 'tsx':
return 'application/typescript';
case 'py':
return 'text/x-python';
case 'cs':
return 'text/x-csharp';
case 'java':
return 'text/x-java';
case 'c':
return 'text/x-c';
case 'cpp':
case 'cc':
return 'text/x-c++';
case 'go':
return 'text/x-go';
case 'rs':
return 'text/x-rust';
case 'rb':
return 'text/x-ruby';
case 'sh':
return 'text/x-sh';
case 'yaml':
case 'yml':
return 'text/yaml';
default:
return 'text/plain';
}
}
/**
* Validate the Azure DevOps configuration
*
* @param config The configuration to validate
* @throws {AzureDevOpsValidationError} If the configuration is invalid
*/
function validateConfig(config: AzureDevOpsConfig): void {
if (!config.organizationUrl) {
process.stderr.write(
'ERROR: Organization URL is required but was not provided.\n',
);
process.stderr.write(
`Config: ${JSON.stringify(
{
organizationUrl: config.organizationUrl,
authMethod: config.authMethod,
defaultProject: config.defaultProject,
// Hide PAT for security
personalAccessToken: config.personalAccessToken
? 'REDACTED'
: undefined,
apiVersion: config.apiVersion,
},
null,
2,
)}\n`,
);
throw new AzureDevOpsValidationError('Organization URL is required');
}
// Set default authentication method if not specified
if (!config.authMethod) {
config.authMethod = AuthenticationMethod.AzureIdentity;
}
// Validate PAT if using PAT authentication
if (
config.authMethod === AuthenticationMethod.PersonalAccessToken &&
!config.personalAccessToken
) {
throw new AzureDevOpsValidationError(
'Personal access token is required when using PAT authentication',
);
}
}
/**
* Create a connection to Azure DevOps
*
* @param config The configuration to use
* @returns A WebApi connection
*/
export async function getConnection(
config: AzureDevOpsConfig,
): Promise<WebApi> {
try {
// Create a client with the appropriate authentication method
const client = new AzureDevOpsClient({
method: config.authMethod || AuthenticationMethod.AzureIdentity,
organizationUrl: config.organizationUrl,
personalAccessToken: config.personalAccessToken,
});
// Test the connection by getting the Core API
await client.getCoreApi();
// Return the underlying WebApi client
return await client.getWebApiClient();
} catch (error) {
throw new AzureDevOpsAuthenticationError(
`Failed to connect to Azure DevOps: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
```
--------------------------------------------------------------------------------
/src/features/pull-requests/update-pull-request/feature.spec.int.ts:
--------------------------------------------------------------------------------
```typescript
import { WebApi } from 'azure-devops-node-api';
import { updatePullRequest } from './feature';
import { createPullRequest } from '../create-pull-request/feature';
import { listWorkItems } from '../../work-items/list-work-items/feature';
import {
getTestConnection,
shouldSkipIntegrationTest,
} from '@/shared/test/test-helpers';
describe('updatePullRequest integration', () => {
let connection: WebApi | null = null;
let projectName: string;
let repositoryName: string;
let pullRequestId: number;
let workItemId: number | null = null;
// Generate unique identifiers using timestamp
const timestamp = Date.now();
const randomSuffix = Math.floor(Math.random() * 1000);
const uniqueBranchName = `test-branch-${timestamp}-${randomSuffix}`;
const uniqueTitle = `Test PR ${timestamp}-${randomSuffix}`;
const updatedTitle = `Updated PR ${timestamp}-${randomSuffix}`;
beforeAll(async () => {
// Skip if integration tests should be skipped
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
connection = await getTestConnection();
// Get project and repository names from environment variables
projectName = process.env.AZURE_DEVOPS_DEFAULT_PROJECT || 'DefaultProject';
repositoryName =
process.env.AZURE_DEVOPS_DEFAULT_REPOSITORY || 'DefaultRepo';
// Find an existing work item to use in tests
if (!connection) {
throw new Error('Connection is null');
}
const workItems = await listWorkItems(connection, {
projectId: projectName,
top: 1, // Just need one work item
});
if (workItems && workItems.length > 0 && workItems[0].id) {
workItemId = workItems[0].id;
}
// Create a test pull request or find an existing one
const gitApi = await connection.getGitApi();
// Get the default branch's object ID
const repository = await gitApi.getRepository(repositoryName, projectName);
const defaultBranch =
repository.defaultBranch?.replace('refs/heads/', '') || 'main';
// Get the latest commit on the default branch
const commits = await gitApi.getCommits(
repositoryName,
{
$top: 1,
itemVersion: {
version: defaultBranch,
versionType: 0, // 0 = branch
},
},
projectName,
);
if (!commits || commits.length === 0) {
throw new Error('No commits found in repository');
}
// Create a new branch
const refUpdate = {
name: `refs/heads/${uniqueBranchName}`,
oldObjectId: '0000000000000000000000000000000000000000',
newObjectId: commits[0].commitId,
};
const updateResult = await gitApi.updateRefs(
[refUpdate],
repositoryName,
projectName,
);
if (
!updateResult ||
updateResult.length === 0 ||
!updateResult[0].success
) {
throw new Error('Failed to create new branch');
}
// Create a test pull request
const testPullRequest = await createPullRequest(
connection,
projectName,
repositoryName,
{
title: uniqueTitle,
description: 'Test pull request for integration testing',
sourceRefName: `refs/heads/${uniqueBranchName}`,
targetRefName: repository.defaultBranch || 'refs/heads/main',
isDraft: true,
},
);
pullRequestId = testPullRequest.pullRequestId!;
});
afterAll(async () => {
// Clean up created resources
if (!shouldSkipIntegrationTest() && connection && pullRequestId) {
try {
// Check the current state of the pull request
const gitApi = await connection.getGitApi();
const pullRequest = await gitApi.getPullRequestById(
pullRequestId,
projectName,
);
// Only try to abandon if it's still active (status 1)
if (pullRequest && pullRequest.status === 1) {
await gitApi.updatePullRequest(
{
status: 2, // 2 = Abandoned
},
repositoryName,
pullRequestId,
projectName,
);
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
} catch (_) {
// Ignore cleanup errors
}
}
});
test('should update pull request title and description', async () => {
// Skip if integration tests should be skipped
if (shouldSkipIntegrationTest() || !connection) {
console.log('Skipping test due to missing connection');
return;
}
const updatedDescription = 'Updated description for integration testing';
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
title: updatedTitle,
description: updatedDescription,
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
expect(result.title).toBe(updatedTitle);
expect(result.description).toBe(updatedDescription);
}, 30000); // 30 second timeout for integration test
test('should update pull request draft status', async () => {
// Skip if integration tests should be skipped
if (shouldSkipIntegrationTest() || !connection) {
console.log('Skipping test due to missing connection');
return;
}
// Mark as not a draft
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
isDraft: false,
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
expect(result.isDraft).toBe(false);
}, 30000); // 30 second timeout for integration test
test('should add work item links to pull request', async () => {
// Skip if no work items were found
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection or work item');
return;
}
// Add the work item link
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
addWorkItemIds: [workItemId!],
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
// Get the pull request work items using the proper API
const gitApi = await connection!.getGitApi();
// Add a delay to allow Azure DevOps to process the work item link
await new Promise((resolve) => setTimeout(resolve, 5000));
// Use the getPullRequestWorkItemRefs method to get the work items
const workItemRefs = await gitApi.getPullRequestWorkItemRefs(
repositoryName,
pullRequestId,
projectName,
);
// Verify that work items are linked
expect(workItemRefs).toBeDefined();
expect(Array.isArray(workItemRefs)).toBe(true);
// Check if our work item is in the list
const hasWorkItem = workItemRefs.some(
(ref) => ref.id !== undefined && Number(ref.id) === workItemId,
);
expect(hasWorkItem).toBe(true);
}, 60000); // 60 second timeout for integration test
test('should remove work item links from pull request', async () => {
// Skip if no work items were found
if (shouldSkipIntegrationTest()) {
console.log('Skipping test due to missing connection or work item');
return;
}
// First ensure the work item is linked
try {
await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
addWorkItemIds: [workItemId!],
});
// Add a delay to allow Azure DevOps to process the work item link
await new Promise((resolve) => setTimeout(resolve, 3000));
} catch (error) {
// If there's an error adding the link, that's okay
console.log(
"Error adding work item (already be linked so that's 👍):",
error instanceof Error ? error.message : String(error),
);
}
// Then remove the work item link
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
removeWorkItemIds: [workItemId!],
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
// Get the pull request work items using the proper API
const gitApi = await connection!.getGitApi();
// Add a delay to allow Azure DevOps to process the work item unlink
await new Promise((resolve) => setTimeout(resolve, 5000));
// Use the getPullRequestWorkItemRefs method to get the work items
const workItemRefs = await gitApi.getPullRequestWorkItemRefs(
repositoryName,
pullRequestId,
projectName,
);
// Verify that work items are properly unlinked
expect(workItemRefs).toBeDefined();
expect(Array.isArray(workItemRefs)).toBe(true);
// Check if our work item is not in the list
const hasWorkItem = workItemRefs.some(
(ref) => ref.id !== undefined && Number(ref.id) === workItemId,
);
expect(hasWorkItem).toBe(false);
}, 60000); // 60 second timeout for integration test
test('should add reviewers to pull request', async () => {
// Skip if integration tests should be skipped
if (shouldSkipIntegrationTest() || !connection) {
console.log('Skipping test due to missing connection');
return;
}
// Find an actual user in the organization to use as a reviewer
const gitApi = await connection.getGitApi();
// Get the pull request creator as a reviewer (they always exist)
const pullRequest = await gitApi.getPullRequestById(
pullRequestId,
projectName,
)!;
// Use the pull request creator's ID as the reviewer
const reviewer = pullRequest.createdBy!.id!;
// Add the reviewer
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
addReviewers: [reviewer],
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
// Add a delay to allow Azure DevOps to process the reviewer addition
await new Promise((resolve) => setTimeout(resolve, 1000));
const reviewers = await gitApi.getPullRequestReviewers(
repositoryName,
pullRequestId,
projectName,
);
// Verify that the reviewer was added
expect(reviewers).toBeDefined();
expect(Array.isArray(reviewers)).toBe(true);
// Check if our reviewer is in the list by ID
const hasReviewer = reviewers.some((r) => r.id === reviewer);
expect(hasReviewer).toBe(true);
}, 60000); // 60 second timeout for integration test
test('should remove reviewers from pull request', async () => {
// Skip if integration tests should be skipped
if (shouldSkipIntegrationTest() || !connection) {
console.log('Skipping test due to missing connection');
return;
}
// Find an actual user in the organization to use as a reviewer
const gitApi = await connection.getGitApi();
// Get the pull request creator as a reviewer (they always exist)
const pullRequest = await gitApi.getPullRequestById(
pullRequestId,
projectName,
);
if (!pullRequest || !pullRequest.createdBy || !pullRequest.createdBy.id) {
throw new Error('Could not determine pull request creator');
}
// Use the pull request creator's ID as the reviewer
const reviewer = pullRequest.createdBy.id;
// First ensure the reviewer is added
try {
await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
addReviewers: [reviewer],
});
// Add a delay to allow Azure DevOps to process the reviewer addition
await new Promise((resolve) => setTimeout(resolve, 3000));
} catch (error) {
// If there's an error adding the reviewer, that's okay
console.log(
'Error adding reviewer (might already be added):',
error instanceof Error ? error.message : String(error),
);
}
// Then remove the reviewer
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
removeReviewers: [reviewer],
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
// Add a delay to allow Azure DevOps to process the reviewer removal
await new Promise((resolve) => setTimeout(resolve, 3000));
const reviewers = await gitApi.getPullRequestReviewers(
repositoryName,
pullRequestId,
projectName,
);
// Verify that the reviewer was removed
expect(reviewers).toBeDefined();
expect(Array.isArray(reviewers)).toBe(true);
// Check if our reviewer is not in the list
const hasReviewer = reviewers.some((r) => r.id === reviewer);
expect(hasReviewer).toBe(false);
}, 60000); // 60 second timeout for integration test
test('should update pull request with additional properties', async () => {
// Skip if integration tests should be skipped
if (shouldSkipIntegrationTest() || !connection) {
console.log('Skipping test due to missing connection');
return;
}
// Use a custom property that Azure DevOps supports
const customProperty = 'autoComplete';
const customValue = true;
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
additionalProperties: {
[customProperty]: customValue,
},
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
// For autoComplete specifically, we can check if it's in the response
if (customProperty in result) {
expect(result[customProperty]).toBe(customValue);
}
}, 30000); // 30 second timeout for integration test
test('should update pull request status to abandoned', async () => {
// Skip if integration tests should be skipped
if (shouldSkipIntegrationTest() || !connection) {
console.log('Skipping test due to missing connection');
return;
}
// Abandon the pull request instead of completing it
// Completing requires additional setup that's complex for integration tests
const result = await updatePullRequest({
projectId: projectName,
repositoryId: repositoryName,
pullRequestId,
status: 'abandoned',
});
// Verify the update was successful
expect(result).toBeDefined();
expect(result.pullRequestId).toBe(pullRequestId);
expect(result.status).toBe(2); // 2 = Abandoned
}, 30000); // 30 second timeout for integration test
});
```
--------------------------------------------------------------------------------
/src/features/wikis/list-wiki-pages/feature.spec.unit.ts:
--------------------------------------------------------------------------------
```typescript
import { listWikiPages, WikiPageSummary } from './feature';
import * as azureDevOpsClient from '../../../clients/azure-devops';
import {
AzureDevOpsError,
AzureDevOpsResourceNotFoundError,
AzureDevOpsPermissionError,
} from '../../../shared/errors/azure-devops-errors';
// Mock the Azure DevOps client
jest.mock('../../../clients/azure-devops');
// Mock the environment utilities to avoid dependency on environment variables
jest.mock('../../../utils/environment', () => ({
defaultOrg: 'azure-devops-mcp-testing',
defaultProject: 'eShopOnWeb',
}));
describe('listWikiPages unit', () => {
// Mock WikiClient
const mockWikiClient = {
listWikiPages: jest.fn(),
};
// Mock getWikiClient function
const mockGetWikiClient =
azureDevOpsClient.getWikiClient as jest.MockedFunction<
typeof azureDevOpsClient.getWikiClient
>;
beforeEach(() => {
// Clear mock calls between tests
jest.clearAllMocks();
// Setup default mock implementation
mockGetWikiClient.mockResolvedValue(mockWikiClient as any);
});
describe('Happy Path Scenarios', () => {
test('should return wiki pages successfully', async () => {
// Mock data
const mockPages: WikiPageSummary[] = [
{
id: 1,
path: '/Home',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/1',
order: 1,
},
{
id: 2,
path: '/Getting-Started',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/2',
order: 2,
},
];
// Setup mock responses
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
// Call the function
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
// Assertions
expect(mockGetWikiClient).toHaveBeenCalledWith({
organizationId: 'test-org',
});
expect(mockWikiClient.listWikiPages).toHaveBeenCalledWith(
'test-project',
'test-wiki',
);
expect(result).toEqual(mockPages);
expect(result.length).toBe(2);
});
test('should handle basic listing without parameters', async () => {
const mockPages: WikiPageSummary[] = [
{
id: 3,
path: '/docs/api',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/3',
order: 1,
},
];
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(mockWikiClient.listWikiPages).toHaveBeenCalledWith(
'test-project',
'test-wiki',
);
expect(result).toEqual(mockPages);
});
test('should handle nested pages correctly', async () => {
const mockPages: WikiPageSummary[] = [
{
id: 4,
path: '/deep/nested/page',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/4',
order: 1,
},
];
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(mockWikiClient.listWikiPages).toHaveBeenCalledWith(
'test-project',
'test-wiki',
);
expect(result).toEqual(mockPages);
});
test('should handle empty wiki correctly', async () => {
const mockPages: WikiPageSummary[] = [];
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(mockWikiClient.listWikiPages).toHaveBeenCalledWith(
'test-project',
'test-wiki',
);
expect(result).toEqual(mockPages);
});
test('should return empty array when no pages found', async () => {
mockWikiClient.listWikiPages.mockResolvedValue([]);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'empty-wiki',
});
expect(result).toEqual([]);
expect(Array.isArray(result)).toBe(true);
});
test('should use default organization and project when not provided', async () => {
const mockPages: WikiPageSummary[] = [
{
id: 5,
path: '/default-page',
url: 'https://dev.azure.com/default-org/default-project/_wiki/wikis/wiki1/5',
order: 1,
},
];
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
const result = await listWikiPages({
wikiId: 'test-wiki',
});
expect(mockGetWikiClient).toHaveBeenCalledWith({
organizationId: 'azure-devops-mcp-testing', // Uses default from environment
});
expect(result).toEqual(mockPages);
});
});
describe('Error Scenarios', () => {
test('should handle network timeout errors', async () => {
const timeoutError = new Error('Network timeout');
timeoutError.name = 'ETIMEDOUT';
mockWikiClient.listWikiPages.mockRejectedValue(timeoutError);
await expect(
listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
}),
).rejects.toThrow(AzureDevOpsError);
});
test('should handle connection refused errors', async () => {
const connectionError = new Error('Connection refused');
connectionError.name = 'ECONNREFUSED';
mockWikiClient.listWikiPages.mockRejectedValue(connectionError);
await expect(
listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
}),
).rejects.toThrow(AzureDevOpsError);
});
test('should propagate AzureDevOpsResourceNotFoundError from client', async () => {
const notFoundError = new AzureDevOpsResourceNotFoundError(
'Wiki not found: test-wiki',
);
mockWikiClient.listWikiPages.mockRejectedValue(notFoundError);
await expect(
listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'non-existent-wiki',
}),
).rejects.toThrow(AzureDevOpsResourceNotFoundError);
});
test('should propagate AzureDevOpsPermissionError from client', async () => {
const permissionError = new AzureDevOpsPermissionError(
'Permission denied to access wiki',
);
mockWikiClient.listWikiPages.mockRejectedValue(permissionError);
await expect(
listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'restricted-wiki',
}),
).rejects.toThrow(AzureDevOpsPermissionError);
});
test('should wrap unknown errors in AzureDevOpsError', async () => {
const unknownError = new Error('Unknown error occurred');
mockWikiClient.listWikiPages.mockRejectedValue(unknownError);
await expect(
listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
}),
).rejects.toThrow(AzureDevOpsError);
try {
await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
} catch (error) {
expect(error).toBeInstanceOf(AzureDevOpsError);
expect((error as AzureDevOpsError).message).toBe(
'Failed to list wiki pages',
);
}
});
test('should handle client creation failure', async () => {
const clientError = new Error('Failed to create client');
mockGetWikiClient.mockRejectedValue(clientError);
await expect(
listWikiPages({
organizationId: 'invalid-org',
projectId: 'test-project',
wikiId: 'test-wiki',
}),
).rejects.toThrow(AzureDevOpsError);
});
});
describe('Edge Cases and Input Validation', () => {
test('should handle malformed API response gracefully', async () => {
// Mock malformed response (missing required fields)
const malformedPages = [
{
id: 'invalid-id', // Should be number
path: null, // Should be string
url: undefined, // Should be string
},
];
mockWikiClient.listWikiPages.mockResolvedValue(malformedPages as any);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
// Should still return the data as-is (transformation happens in client)
expect(result).toEqual(malformedPages);
});
test('should handle null/undefined response from client', async () => {
mockWikiClient.listWikiPages.mockResolvedValue(null as any);
await expect(
listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
}),
).rejects.toThrow(AzureDevOpsError);
});
test('should handle very large page collections', async () => {
// Create a large mock dataset
const largeMockPages: WikiPageSummary[] = Array.from(
{ length: 10000 },
(_, i) => ({
id: i + 1,
path: `/page-${i + 1}`,
url: `https://dev.azure.com/org/project/_wiki/wikis/wiki1/${i + 1}`,
order: i + 1,
}),
);
mockWikiClient.listWikiPages.mockResolvedValue(largeMockPages);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'large-wiki',
});
expect(result).toEqual(largeMockPages);
expect(result.length).toBe(10000);
});
test('should handle pages with special characters in paths', async () => {
const specialCharPages: WikiPageSummary[] = [
{
id: 1,
path: '/页面-中文',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/1',
order: 1,
},
{
id: 2,
path: '/página-español',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/2',
order: 2,
},
{
id: 3,
path: '/page with spaces & symbols!@#$%',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/3',
order: 3,
},
];
mockWikiClient.listWikiPages.mockResolvedValue(specialCharPages);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'special-wiki',
});
expect(result).toEqual(specialCharPages);
});
test('should handle pages with missing optional order field', async () => {
const pagesWithoutOrder: WikiPageSummary[] = [
{
id: 1,
path: '/page-1',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/1',
// order field is optional and missing
} as WikiPageSummary,
{
id: 2,
path: '/page-2',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/2',
order: 5,
},
];
mockWikiClient.listWikiPages.mockResolvedValue(pagesWithoutOrder);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(result).toEqual(pagesWithoutOrder);
expect(result[0].order).toBeUndefined();
expect(result[1].order).toBe(5);
});
});
describe('Parameter Validation Edge Cases', () => {
test('should handle basic parameter validation', async () => {
const mockPages: WikiPageSummary[] = [];
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(mockWikiClient.listWikiPages).toHaveBeenCalledWith(
'test-project',
'test-wiki',
);
await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(mockWikiClient.listWikiPages).toHaveBeenCalledWith(
'test-project',
'test-wiki',
);
});
test('should handle empty string parameters', async () => {
const mockPages: WikiPageSummary[] = [];
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
await listWikiPages({
organizationId: '',
projectId: '',
wikiId: 'test-wiki',
});
expect(mockGetWikiClient).toHaveBeenCalledWith({
organizationId: 'azure-devops-mcp-testing', // Empty string gets overridden by default
});
expect(mockWikiClient.listWikiPages).toHaveBeenCalledWith(
'eShopOnWeb', // Empty string gets overridden by default project
'test-wiki',
);
});
});
describe('Data Transformation and Mapping', () => {
test('should preserve all WikiPageSummary fields correctly', async () => {
const mockPages: WikiPageSummary[] = [
{
id: 42,
path: '/test-page',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/42',
order: 10,
},
];
mockWikiClient.listWikiPages.mockResolvedValue(mockPages);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(result[0]).toEqual({
id: 42,
path: '/test-page',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/42',
order: 10,
});
});
test('should handle mixed data types in response', async () => {
const mixedPages = [
{
id: 1,
path: '/normal-page',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/1',
order: 1,
},
{
id: 2,
path: '/page-without-order',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/2',
// order is undefined
},
{
id: 3,
path: '/page-with-zero-order',
url: 'https://dev.azure.com/org/project/_wiki/wikis/wiki1/3',
order: 0,
},
];
mockWikiClient.listWikiPages.mockResolvedValue(
mixedPages as WikiPageSummary[],
);
const result = await listWikiPages({
organizationId: 'test-org',
projectId: 'test-project',
wikiId: 'test-wiki',
});
expect(result).toEqual(mixedPages);
expect(result[1].order).toBeUndefined();
expect(result[2].order).toBe(0);
});
});
});
```
--------------------------------------------------------------------------------
/src/features/wikis/list-wiki-pages/feature.spec.int.ts:
--------------------------------------------------------------------------------
```typescript
import { listWikiPages, WikiPageSummary } from './feature';
import { getWikis } from '../get-wikis/feature';
import {
getTestConnection,
shouldSkipIntegrationTest,
} from '@/shared/test/test-helpers';
import { getOrgNameFromUrl } from '@/utils/environment';
import { AzureDevOpsError } from '@/shared/errors/azure-devops-errors';
// Ensure environment variables are set for testing
process.env.AZURE_DEVOPS_DEFAULT_PROJECT =
process.env.AZURE_DEVOPS_DEFAULT_PROJECT || 'default-project';
describe('listWikiPages integration', () => {
let projectName: string;
let orgUrl: string;
let organizationId: string;
beforeAll(async () => {
// Mock the required environment variable for testing
process.env.AZURE_DEVOPS_ORG_URL =
process.env.AZURE_DEVOPS_ORG_URL || 'https://example.visualstudio.com';
// Get and validate required environment variables
const envProjectName = process.env.AZURE_DEVOPS_DEFAULT_PROJECT;
if (!envProjectName) {
throw new Error(
'AZURE_DEVOPS_DEFAULT_PROJECT environment variable is required',
);
}
projectName = envProjectName;
const envOrgUrl = process.env.AZURE_DEVOPS_ORG_URL;
if (!envOrgUrl) {
throw new Error('AZURE_DEVOPS_ORG_URL environment variable is required');
}
orgUrl = envOrgUrl;
organizationId = getOrgNameFromUrl(orgUrl);
});
describe('Happy Path Tests', () => {
test('should list pages in real test wiki', async () => {
// Skip if no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
// List wiki pages
const result = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
// Verify the result structure
expect(result).toBeDefined();
expect(Array.isArray(result)).toBe(true);
// If pages exist, verify their structure matches WikiPageSummary interface
if (result.length > 0) {
const page = result[0];
expect(page).toHaveProperty('id');
expect(page).toHaveProperty('path');
expect(page).toHaveProperty('url');
expect(typeof page.id).toBe('number');
expect(typeof page.path).toBe('string');
// url and order are optional
if (page.url !== undefined) {
expect(typeof page.url).toBe('string');
}
if (page.order !== undefined) {
expect(typeof page.order).toBe('number');
}
}
});
test('should handle wiki listing for different wiki structures', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
// Get all pages for different wiki structures
const allPages = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
expect(Array.isArray(allPages)).toBe(true);
// If we have pages, verify they have expected structure
if (allPages.length > 0) {
const firstPage = allPages[0];
expect(firstPage).toHaveProperty('id');
expect(firstPage).toHaveProperty('path');
expect(firstPage).toHaveProperty('url');
// Verify nested pages if they exist
const nestedPages = allPages.filter(
(page) => page.path.includes('/') && page.path !== '/',
);
console.log(
`Found ${nestedPages.length} nested pages out of ${allPages.length} total pages`,
);
}
});
test('should handle basic wiki page listing consistently', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
// Test basic page listing
const firstResult = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
expect(Array.isArray(firstResult)).toBe(true);
// Test again to ensure consistency
const secondResult = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
expect(Array.isArray(secondResult)).toBe(true);
// Results should be consistent
expect(secondResult.length).toBe(firstResult.length);
});
});
describe('Error Scenarios', () => {
test('should handle invalid wikiId (expect 404 error)', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
const invalidWikiId = 'non-existent-wiki-id-12345';
await expect(
listWikiPages({
organizationId,
projectId: projectName,
wikiId: invalidWikiId,
}),
).rejects.toThrow(AzureDevOpsError);
});
test('should handle invalid projectId', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
const invalidProjectId = 'non-existent-project-12345';
await expect(
listWikiPages({
organizationId,
projectId: invalidProjectId,
wikiId: 'any-wiki',
}),
).rejects.toThrow(AzureDevOpsError);
});
test('should handle invalid organizationId', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
const invalidOrgId = 'non-existent-org-12345';
await expect(
listWikiPages({
organizationId: invalidOrgId,
projectId: projectName,
wikiId: 'any-wiki',
}),
).rejects.toThrow(AzureDevOpsError);
});
});
describe('Edge Cases', () => {
test('should handle empty wikis gracefully', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
// Test with a path that likely doesn't exist
const result = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
// Should return an array (may be empty or contain all pages depending on API behavior)
expect(Array.isArray(result)).toBe(true);
// Note: Azure DevOps API may return all pages when path doesn't match
console.log(`Path filter test returned ${result.length} pages`);
});
test('should handle deeply nested paths', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
// Test with default parameters
const result = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
expect(Array.isArray(result)).toBe(true);
// Should not throw error with basic parameters
});
test('should handle boundary recursionLevel values', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
// Test basic page listing
const firstResult = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
expect(Array.isArray(firstResult)).toBe(true);
// Test again for consistency
const secondResult = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
expect(Array.isArray(secondResult)).toBe(true);
});
});
describe('Data Structure Validation', () => {
test('should verify returned data structure matches WikiPageSummary interface', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
const result = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
expect(Array.isArray(result)).toBe(true);
// Validate each page in the result
result.forEach((page: WikiPageSummary) => {
// Required fields
expect(page).toHaveProperty('id');
expect(page).toHaveProperty('path');
expect(page).toHaveProperty('url');
expect(typeof page.id).toBe('number');
expect(typeof page.path).toBe('string');
// Optional fields
if (page.url !== undefined) {
expect(typeof page.url).toBe('string');
}
if (page.order !== undefined) {
expect(typeof page.order).toBe('number');
}
// Validate URL format (if present)
if (page.url !== undefined) {
expect(page.url).toMatch(/^https?:\/\//);
}
// Validate path format (should start with /)
expect(page.path).toMatch(/^\//);
});
});
});
describe('Performance and Pagination', () => {
test('should handle large wiki structures efficiently', async () => {
// Skip if integration tests are disabled or no connection available
if (shouldSkipIntegrationTest()) {
return;
}
// Get a real connection using environment variables
const connection = await getTestConnection();
if (!connection) {
throw new Error(
'Connection should be available when test is not skipped',
);
}
// First get available wikis
const wikis = await getWikis(connection, { projectId: projectName });
// Skip if no wikis are available
if (wikis.length === 0) {
console.log('Skipping test: No wikis available in the project');
return;
}
// Use the first available wiki
const wiki = wikis[0];
if (!wiki.name) {
throw new Error('Wiki name is undefined');
}
const startTime = Date.now();
const result = await listWikiPages({
organizationId,
projectId: projectName,
wikiId: wiki.name,
});
const endTime = Date.now();
const duration = endTime - startTime;
expect(Array.isArray(result)).toBe(true);
// Performance check - should complete within reasonable time (30 seconds)
expect(duration).toBeLessThan(30000);
console.log(`Retrieved ${result.length} pages in ${duration}ms`);
});
});
});
```
--------------------------------------------------------------------------------
/.github/skills/skill-creator/SKILL.md:
--------------------------------------------------------------------------------
```markdown
---
name: skill-creator
description: Guide for creating effective skills. This skill should be used when users want to create a new skill (or update an existing skill) that extends Claude's capabilities with specialized knowledge, workflows, or tool integrations.
license: Complete terms in LICENSE.txt
---
# Skill Creator
This skill provides guidance for creating effective skills.
## About Skills
Skills are modular, self-contained packages that extend Claude's capabilities by providing
specialized knowledge, workflows, and tools. Think of them as "onboarding guides" for specific
domains or tasks—they transform Claude from a general-purpose agent into a specialized agent
equipped with procedural knowledge that no model can fully possess.
### What Skills Provide
1. Specialized workflows - Multi-step procedures for specific domains
2. Tool integrations - Instructions for working with specific file formats or APIs
3. Domain expertise - Company-specific knowledge, schemas, business logic
4. Bundled resources - Scripts, references, and assets for complex and repetitive tasks
## Core Principles
### Concise is Key
The context window is a public good. Skills share the context window with everything else Claude needs: system prompt, conversation history, other Skills' metadata, and the actual user request.
**Default assumption: Claude is already very smart.** Only add context Claude doesn't already have. Challenge each piece of information: "Does Claude really need this explanation?" and "Does this paragraph justify its token cost?"
Prefer concise examples over verbose explanations.
### Set Appropriate Degrees of Freedom
Match the level of specificity to the task's fragility and variability:
**High freedom (text-based instructions)**: Use when multiple approaches are valid, decisions depend on context, or heuristics guide the approach.
**Medium freedom (pseudocode or scripts with parameters)**: Use when a preferred pattern exists, some variation is acceptable, or configuration affects behavior.
**Low freedom (specific scripts, few parameters)**: Use when operations are fragile and error-prone, consistency is critical, or a specific sequence must be followed.
Think of Claude as exploring a path: a narrow bridge with cliffs needs specific guardrails (low freedom), while an open field allows many routes (high freedom).
### Anatomy of a Skill
Every skill consists of a required SKILL.md file and optional bundled resources:
```
skill-name/
├── SKILL.md (required)
│ ├── YAML frontmatter metadata (required)
│ │ ├── name: (required)
│ │ └── description: (required)
│ └── Markdown instructions (required)
└── Bundled Resources (optional)
├── scripts/ - Executable code (Python/Bash/etc.)
├── references/ - Documentation intended to be loaded into context as needed
└── assets/ - Files used in output (templates, icons, fonts, etc.)
```
#### SKILL.md (required)
Every SKILL.md consists of:
- **Frontmatter** (YAML): Contains `name` and `description` fields. These are the only fields that Claude reads to determine when the skill gets used, thus it is very important to be clear and comprehensive in describing what the skill is, and when it should be used.
- **Body** (Markdown): Instructions and guidance for using the skill. Only loaded AFTER the skill triggers (if at all).
#### Bundled Resources (optional)
##### Scripts (`scripts/`)
Executable code (Python/Bash/etc.) for tasks that require deterministic reliability or are repeatedly rewritten.
- **When to include**: When the same code is being rewritten repeatedly or deterministic reliability is needed
- **Example**: `scripts/rotate_pdf.py` for PDF rotation tasks
- **Benefits**: Token efficient, deterministic, may be executed without loading into context
- **Note**: Scripts may still need to be read by Claude for patching or environment-specific adjustments
##### References (`references/`)
Documentation and reference material intended to be loaded as needed into context to inform Claude's process and thinking.
- **When to include**: For documentation that Claude should reference while working
- **Examples**: `references/finance.md` for financial schemas, `references/mnda.md` for company NDA template, `references/policies.md` for company policies, `references/api_docs.md` for API specifications
- **Use cases**: Database schemas, API documentation, domain knowledge, company policies, detailed workflow guides
- **Benefits**: Keeps SKILL.md lean, loaded only when Claude determines it's needed
- **Best practice**: If files are large (>10k words), include grep search patterns in SKILL.md
- **Avoid duplication**: Information should live in either SKILL.md or references files, not both. Prefer references files for detailed information unless it's truly core to the skill—this keeps SKILL.md lean while making information discoverable without hogging the context window. Keep only essential procedural instructions and workflow guidance in SKILL.md; move detailed reference material, schemas, and examples to references files.
##### Assets (`assets/`)
Files not intended to be loaded into context, but rather used within the output Claude produces.
- **When to include**: When the skill needs files that will be used in the final output
- **Examples**: `assets/logo.png` for brand assets, `assets/slides.pptx` for PowerPoint templates, `assets/frontend-template/` for HTML/React boilerplate, `assets/font.ttf` for typography
- **Use cases**: Templates, images, icons, boilerplate code, fonts, sample documents that get copied or modified
- **Benefits**: Separates output resources from documentation, enables Claude to use files without loading them into context
#### What to Not Include in a Skill
A skill should only contain essential files that directly support its functionality. Do NOT create extraneous documentation or auxiliary files, including:
- README.md
- INSTALLATION_GUIDE.md
- QUICK_REFERENCE.md
- CHANGELOG.md
- etc.
The skill should only contain the information needed for an AI agent to do the job at hand. It should not contain auxilary context about the process that went into creating it, setup and testing procedures, user-facing documentation, etc. Creating additional documentation files just adds clutter and confusion.
### Progressive Disclosure Design Principle
Skills use a three-level loading system to manage context efficiently:
1. **Metadata (name + description)** - Always in context (~100 words)
2. **SKILL.md body** - When skill triggers (<5k words)
3. **Bundled resources** - As needed by Claude (Unlimited because scripts can be executed without reading into context window)
#### Progressive Disclosure Patterns
Keep SKILL.md body to the essentials and under 500 lines to minimize context bloat. Split content into separate files when approaching this limit. When splitting out content into other files, it is very important to reference them from SKILL.md and describe clearly when to read them, to ensure the reader of the skill knows they exist and when to use them.
**Key principle:** When a skill supports multiple variations, frameworks, or options, keep only the core workflow and selection guidance in SKILL.md. Move variant-specific details (patterns, examples, configuration) into separate reference files.
**Pattern 1: High-level guide with references**
```markdown
# PDF Processing
## Quick start
Extract text with pdfplumber:
[code example]
## Advanced features
- **Form filling**: See [FORMS.md](FORMS.md) for complete guide
- **API reference**: See [REFERENCE.md](REFERENCE.md) for all methods
- **Examples**: See [EXAMPLES.md](EXAMPLES.md) for common patterns
```
Claude loads FORMS.md, REFERENCE.md, or EXAMPLES.md only when needed.
**Pattern 2: Domain-specific organization**
For Skills with multiple domains, organize content by domain to avoid loading irrelevant context:
```
bigquery-skill/
├── SKILL.md (overview and navigation)
└── reference/
├── finance.md (revenue, billing metrics)
├── sales.md (opportunities, pipeline)
├── product.md (API usage, features)
└── marketing.md (campaigns, attribution)
```
When a user asks about sales metrics, Claude only reads sales.md.
Similarly, for skills supporting multiple frameworks or variants, organize by variant:
```
cloud-deploy/
├── SKILL.md (workflow + provider selection)
└── references/
├── aws.md (AWS deployment patterns)
├── gcp.md (GCP deployment patterns)
└── azure.md (Azure deployment patterns)
```
When the user chooses AWS, Claude only reads aws.md.
**Pattern 3: Conditional details**
Show basic content, link to advanced content:
```markdown
# DOCX Processing
## Creating documents
Use docx-js for new documents. See [DOCX-JS.md](DOCX-JS.md).
## Editing documents
For simple edits, modify the XML directly.
**For tracked changes**: See [REDLINING.md](REDLINING.md)
**For OOXML details**: See [OOXML.md](OOXML.md)
```
Claude reads REDLINING.md or OOXML.md only when the user needs those features.
**Important guidelines:**
- **Avoid deeply nested references** - Keep references one level deep from SKILL.md. All reference files should link directly from SKILL.md.
- **Structure longer reference files** - For files longer than 100 lines, include a table of contents at the top so Claude can see the full scope when previewing.
## Skill Creation Process
Skill creation involves these steps:
1. Understand the skill with concrete examples
2. Plan reusable skill contents (scripts, references, assets)
3. Initialize the skill (run init_skill.py)
4. Edit the skill (implement resources and write SKILL.md)
5. Iterate based on real usage
Follow these steps in order, skipping only if there is a clear reason why they are not applicable.
### Step 1: Understanding the Skill with Concrete Examples
Skip this step only when the skill's usage patterns are already clearly understood. It remains valuable even when working with an existing skill.
To create an effective skill, clearly understand concrete examples of how the skill will be used. This understanding can come from either direct user examples or generated examples that are validated with user feedback.
For example, when building an image-editor skill, relevant questions include:
- "What functionality should the image-editor skill support? Editing, rotating, anything else?"
- "Can you give some examples of how this skill would be used?"
- "I can imagine users asking for things like 'Remove the red-eye from this image' or 'Rotate this image'. Are there other ways you imagine this skill being used?"
- "What would a user say that should trigger this skill?"
To avoid overwhelming users, avoid asking too many questions in a single message. Start with the most important questions and follow up as needed for better effectiveness.
Conclude this step when there is a clear sense of the functionality the skill should support.
### Step 2: Planning the Reusable Skill Contents
To turn concrete examples into an effective skill, analyze each example by:
1. Considering how to execute on the example from scratch
2. Identifying what scripts, references, and assets would be helpful when executing these workflows repeatedly
Example: When building a `pdf-editor` skill to handle queries like "Help me rotate this PDF," the analysis shows:
1. Rotating a PDF requires re-writing the same code each time
2. A `scripts/rotate_pdf.py` script would be helpful to store in the skill
Example: When designing a `frontend-webapp-builder` skill for queries like "Build me a todo app" or "Build me a dashboard to track my steps," the analysis shows:
1. Writing a frontend webapp requires the same boilerplate HTML/React each time
2. An `assets/hello-world/` template containing the boilerplate HTML/React project files would be helpful to store in the skill
Example: When building a `big-query` skill to handle queries like "How many users have logged in today?" the analysis shows:
1. Querying BigQuery requires re-discovering the table schemas and relationships each time
2. A `references/schema.md` file documenting the table schemas would be helpful to store in the skill
To establish the skill's contents, analyze each concrete example to create a list of the reusable resources to include: scripts, references, and assets.
### Step 3: Initializing the Skill
At this point, it is time to actually create the skill.
Skip this step only if the skill being developed already exists, and iteration or packaging is needed. In this case, continue to the next step.
When creating a new skill from scratch, always run the `init_skill.py` script. The script conveniently generates a new template skill directory that automatically includes everything a skill requires, making the skill creation process much more efficient and reliable.
Usage:
```bash
scripts/init_skill.py <skill-name> --path <output-directory>
```
The script:
- Creates the skill directory at the specified path
- Generates a SKILL.md template with proper frontmatter and TODO placeholders
- Creates example resource directories: `scripts/`, `references/`, and `assets/`
- Adds example files in each directory that can be customized or deleted
After initialization, customize or remove the generated SKILL.md and example files as needed.
### Step 4: Edit the Skill
When editing the (newly-generated or existing) skill, remember that the skill is being created for another instance of Claude to use. Include information that would be beneficial and non-obvious to Claude. Consider what procedural knowledge, domain-specific details, or reusable assets would help another Claude instance execute these tasks more effectively.
#### Learn Proven Design Patterns
Consult these helpful guides based on your skill's needs:
- **Multi-step processes**: See references/workflows.md for sequential workflows and conditional logic
- **Specific output formats or quality standards**: See references/output-patterns.md for template and example patterns
These files contain established best practices for effective skill design.
#### Start with Reusable Skill Contents
To begin implementation, start with the reusable resources identified above: `scripts/`, `references/`, and `assets/` files. Note that this step may require user input. For example, when implementing a `brand-guidelines` skill, the user may need to provide brand assets or templates to store in `assets/`, or documentation to store in `references/`.
Added scripts must be tested by actually running them to ensure there are no bugs and that the output matches what is expected. If there are many similar scripts, only a representative sample needs to be tested to ensure confidence that they all work while balancing time to completion.
Any example files and directories not needed for the skill should be deleted. The initialization script creates example files in `scripts/`, `references/`, and `assets/` to demonstrate structure, but most skills won't need all of them.
#### Update SKILL.md
**Writing Guidelines:** Always use imperative/infinitive form.
##### Frontmatter
Write the YAML frontmatter with `name` and `description`:
- `name`: The skill name
- `description`: This is the primary triggering mechanism for your skill, and helps Claude understand when to use the skill.
- Include both what the Skill does and specific triggers/contexts for when to use it.
- Include all "when to use" information here - Not in the body. The body is only loaded after triggering, so "When to Use This Skill" sections in the body are not helpful to Claude.
- Example description for a `docx` skill: "Comprehensive document creation, editing, and analysis with support for tracked changes, comments, formatting preservation, and text extraction. Use when Claude needs to work with professional documents (.docx files) for: (1) Creating new documents, (2) Modifying or editing content, (3) Working with tracked changes, (4) Adding comments, or any other document tasks"
Do not include any other fields in YAML frontmatter.
##### Body
Write instructions for using the skill and its bundled resources.
### Step 5: Iterate
After testing the skill, users may request improvements. Often this happens right after using the skill, with fresh context of how the skill performed.
**Iteration workflow:**
1. Use the skill on real tasks
2. Notice struggles or inefficiencies
3. Identify how SKILL.md or bundled resources should be updated
4. Implement changes and test again
```
--------------------------------------------------------------------------------
/tasks.json:
--------------------------------------------------------------------------------
```json
{
"tasks": [
{
"id": "42e6533a-f407-4286-be04-4d76fdfd8734",
"name": "Create list-wiki-pages directory structure and schema",
"description": "Create the folder structure src/features/wikis/list-wiki-pages/ with schema.ts and index.ts files. Implement Zod schema validation for ListWikiPagesSchema with organizationId, projectId, wikiId, path, and recursionLevel parameters following existing wiki patterns.",
"status": "completed",
"dependencies": [],
"createdAt": "2025-05-26T16:18:03.641Z",
"updatedAt": "2025-05-26T16:18:03.641Z",
"relatedFiles": [
{
"path": "src/features/wikis/list-wiki-pages/schema.ts",
"type": "CREATE",
"description": "Zod schema for list wiki pages parameters"
},
{
"path": "src/features/wikis/list-wiki-pages/index.ts",
"type": "CREATE",
"description": "Export file for list wiki pages feature"
},
{
"path": "src/features/wikis/get-wikis/schema.ts",
"type": "REFERENCE",
"description": "Reference pattern for schema structure"
},
{
"path": "src/utils/environment.ts",
"type": "REFERENCE",
"description": "Default organization and project utilities"
}
],
"implementationGuide": "1. Create directory: src/features/wikis/list-wiki-pages/\n2. Create schema.ts with ListWikiPagesSchema using z.object():\n - organizationId: z.string().optional().describe()\n - projectId: z.string().optional().describe()\n - wikiId: z.string().describe()\n - path: z.string().optional().describe()\n - recursionLevel: z.number().int().min(1).max(50).optional().describe()\n3. Import defaultOrg, defaultProject from utils/environment\n4. Create index.ts with exports for schema and future feature function\n5. Follow exact patterns from src/features/wikis/get-wikis/schema.ts",
"verificationCriteria": "Schema compiles without errors, exports are properly defined, follows existing naming conventions, includes proper TypeScript types and Zod validation",
"analysisResult": "Implement list_wiki_pages tool for Azure DevOps MCP server following GitHub issue #184 requirements. The implementation extends existing WikiClient with Azure DevOps Pages Batch API support, includes comprehensive pagination handling, and maintains consistency with established codebase patterns for schema validation, error handling, and testing."
},
{
"id": "6b895c15-b337-444b-908a-e50a5ae07da3",
"name": "Extend WikiClient with listWikiPages method",
"description": "Add listWikiPages method to WikiClient class in src/clients/azure-devops.ts. Implement Azure DevOps Pages Batch API call with POST request, pagination loop using continuationToken, and proper error handling.",
"status": "completed",
"dependencies": [],
"createdAt": "2025-05-26T16:18:03.641Z",
"updatedAt": "2025-05-26T21:57:06.473Z",
"relatedFiles": [
{
"path": "src/clients/azure-devops.ts",
"type": "TO_MODIFY",
"description": "Add listWikiPages method to WikiClient class",
"lineStart": 45,
"lineEnd": 532
}
],
"implementationGuide": "1. Add listWikiPages method to WikiClient class\n2. Method signature: async listWikiPages(projectId: string, wikiId: string, options?: {path?: string, recursionLevel?: number})\n3. Implement POST request to: {baseUrl}/{project}/_apis/wiki/wikis/{wikiId}/pagesbatch?api-version=7.1\n4. Request body: {top: 1000, continuationToken?, path?, recursionLevel?}\n5. Pagination loop: while continuationToken exists, make subsequent requests\n6. Concatenate all results from response.data.value arrays\n7. Error handling: 404 -> AzureDevOpsResourceNotFoundError, 401/403 -> AzureDevOpsPermissionError\n8. Return WikiPageSummary[] with {id, path, url, order} fields\n9. Sort results by order then path",
"verificationCriteria": "Method compiles without errors, implements proper pagination loop, handles all error cases, returns correctly typed results, follows existing WikiClient method patterns",
"analysisResult": "Implement list_wiki_pages tool for Azure DevOps MCP server following GitHub issue #184 requirements. The implementation extends existing WikiClient with Azure DevOps Pages Batch API support, includes comprehensive pagination handling, and maintains consistency with established codebase patterns for schema validation, error handling, and testing.",
"summary": "Successfully implemented the listWikiPages method in WikiClient class. The implementation includes: 1) Added WikiPageSummary interface with id, path, url, and order fields as required. 2) Implemented POST request to Azure DevOps Pages Batch API with proper pagination using continuationToken. 3) Added comprehensive error handling for 404 (AzureDevOpsResourceNotFoundError) and 401/403 (AzureDevOpsPermissionError) status codes. 4) Implemented sorting by order then path as specified. 5) Method signature matches requirements with optional path and recursionLevel parameters. 6) Code compiles without errors and follows existing WikiClient patterns. 7) All TypeScript types are properly defined and exported.",
"completedAt": "2025-05-26T21:57:06.472Z"
},
{
"id": "6f042d63-fa61-42c9-b7b0-820495aec9ba",
"name": "Implement list-wiki-pages feature function",
"description": "Create feature.ts with listWikiPages function that uses the WikiClient method. Define WikiPageSummary interface and implement the main feature logic with proper error handling and type safety.",
"status": "completed",
"dependencies": [
{
"taskId": "42e6533a-f407-4286-be04-4d76fdfd8734"
},
{
"taskId": "6b895c15-b337-444b-908a-e50a5ae07da3"
}
],
"createdAt": "2025-05-26T16:18:03.641Z",
"updatedAt": "2025-05-26T22:44:06.001Z",
"relatedFiles": [
{
"path": "src/features/wikis/list-wiki-pages/feature.ts",
"type": "CREATE",
"description": "Main feature implementation"
}
],
"implementationGuide": "1. Create src/features/wikis/list-wiki-pages/feature.ts\n2. Define WikiPageSummary interface: {id: number, path: string, url: string, order?: number}\n3. Define ListWikiPagesOptions interface matching schema\n4. Implement listWikiPages function:\n - Import WikiClient from clients/azure-devops\n - Use organizationId || defaultOrg, projectId || defaultProject\n - Call wikiClient.listWikiPages() with proper parameters\n - Handle errors with try/catch and proper error type conversion\n - Return WikiPageSummary[] array\n5. Follow patterns from src/features/wikis/get-wiki-page/feature.ts",
"verificationCriteria": "Feature function compiles and exports correctly, proper error handling, type safety maintained, follows existing feature patterns, integrates properly with WikiClient",
"analysisResult": "Implement list_wiki_pages tool for Azure DevOps MCP server following GitHub issue #184 requirements. The implementation extends existing WikiClient with Azure DevOps Pages Batch API support, includes comprehensive pagination handling, and maintains consistency with established codebase patterns for schema validation, error handling, and testing.",
"summary": "Successfully implemented the list-wiki-pages feature function with all required components: Created src/features/wikis/list-wiki-pages/feature.ts with WikiPageSummary interface {id: number, path: string, url: string, order?: number}, imported ListWikiPagesOptions from schema, implemented listWikiPages function using WikiClient.listWikiPages() method with proper error handling, default organization/project handling, and type conversion from client's string id to number id. The implementation follows established patterns from get-wiki-page feature, compiles without TypeScript errors, and integrates properly with the existing WikiClient.",
"completedAt": "2025-05-26T22:44:06.000Z"
},
{
"id": "29f527f5-a069-4c2d-900b-eb3c7ac478d2",
"name": "Add tool definition and update wikis module exports",
"description": "Add list_wiki_pages tool definition to tool-definitions.ts and update the main wikis index.ts to include the new feature exports and request handler case.",
"status": "completed",
"dependencies": [
{
"taskId": "42e6533a-f407-4286-be04-4d76fdfd8734"
},
{
"taskId": "6f042d63-fa61-42c9-b7b0-820495aec9ba"
}
],
"createdAt": "2025-05-26T16:18:03.641Z",
"updatedAt": "2025-05-26T22:52:55.297Z",
"relatedFiles": [
{
"path": "src/features/wikis/tool-definitions.ts",
"type": "TO_MODIFY",
"description": "Add list_wiki_pages tool definition"
},
{
"path": "src/features/wikis/index.ts",
"type": "TO_MODIFY",
"description": "Add exports and request handler case"
}
],
"implementationGuide": "1. Update src/features/wikis/tool-definitions.ts:\n - Import ListWikiPagesSchema\n - Add tool definition: {name: 'list_wiki_pages', description: 'List pages within an Azure DevOps wiki', inputSchema: zodToJsonSchema(ListWikiPagesSchema)}\n2. Update src/features/wikis/index.ts:\n - Add exports: export {listWikiPages, ListWikiPagesSchema} from './list-wiki-pages'\n - Add 'list_wiki_pages' to isWikisRequest array\n - Add case in handleWikisRequest switch statement\n - Parse args with ListWikiPagesSchema.parse()\n - Call listWikiPages with proper parameters\n - Return JSON.stringify(result, null, 2) in content array",
"verificationCriteria": "Tool definition is properly added, exports are correct, request handler case works, follows existing patterns for tool registration and handling",
"analysisResult": "Implement list_wiki_pages tool for Azure DevOps MCP server following GitHub issue #184 requirements. The implementation extends existing WikiClient with Azure DevOps Pages Batch API support, includes comprehensive pagination handling, and maintains consistency with established codebase patterns for schema validation, error handling, and testing.",
"summary": "Successfully implemented the list_wiki_pages tool definition and updated wikis module exports. Added ListWikiPagesSchema import to tool-definitions.ts, created the tool definition with proper name, description, and schema. Updated main wikis index.ts to export listWikiPages and ListWikiPagesSchema, added 'list_wiki_pages' to the request identifier array, and implemented the request handler case with proper argument parsing and function call. Also fixed the missing export in list-wiki-pages/index.ts. All changes follow existing patterns and the build compiles successfully without errors.",
"completedAt": "2025-05-26T22:52:55.296Z"
},
{
"id": "457c0d1b-3635-49d7-916e-0e9aeb4f370f",
"name": "Implement comprehensive integration tests",
"description": "Create feature.spec.int.ts with comprehensive integration tests that test against real Azure DevOps API. This is the primary testing approach, covering happy path, error scenarios, and edge cases with real API responses.",
"status": "completed",
"dependencies": [
{
"taskId": "6f042d63-fa61-42c9-b7b0-820495aec9ba"
}
],
"createdAt": "2025-05-26T16:18:03.641Z",
"updatedAt": "2025-05-26T23:24:09.973Z",
"relatedFiles": [
{
"path": "src/features/wikis/list-wiki-pages/feature.spec.int.ts",
"type": "CREATE",
"description": "Integration tests for list wiki pages feature"
}
],
"implementationGuide": "1. Create src/features/wikis/list-wiki-pages/feature.spec.int.ts\n2. Add environment guard: process.env.AZDO_INT_TESTS === 'true'\n3. Comprehensive test cases with real Azure DevOps API:\n - List pages in real test wiki (happy path)\n - Handle invalid wikiId (expect 404 error)\n - Test path filtering with real wiki structure\n - Test recursionLevel parameter with various values\n - Test pagination with large wiki structures\n - Verify returned data structure matches WikiPageSummary interface\n - Test edge cases like empty wikis, deeply nested paths\n - Error scenarios: permission errors, network issues\n4. Follow patterns from src/features/wikis/get-wikis/feature.spec.int.ts\n5. Use real Azure DevOps connection and test data\n6. Include proper cleanup and comprehensive error handling",
"verificationCriteria": "Integration tests provide comprehensive coverage with real Azure DevOps API, proper environment guards, tests validate real data structure and all major scenarios, follows existing integration test patterns",
"analysisResult": "Implement list_wiki_pages tool for Azure DevOps MCP server following GitHub issue #184 requirements. The implementation extends existing WikiClient with Azure DevOps Pages Batch API support, includes comprehensive pagination handling, and maintains consistency with established codebase patterns for schema validation, error handling, and testing.",
"summary": "Successfully implemented comprehensive integration tests for list-wiki-pages feature. Created src/features/wikis/list-wiki-pages/feature.spec.int.ts with complete test coverage including: environment guard (AZDO_INT_TESTS === 'true'), happy path tests with real Azure DevOps API, error scenarios for invalid wikiId/projectId/organizationId, edge cases for empty wikis and deeply nested paths, data structure validation matching WikiPageSummary interface, performance tests for large wiki structures, path filtering tests, recursionLevel parameter testing with boundary values (1-50), and proper cleanup with comprehensive error handling. All tests follow existing integration test patterns from get-wikis and get-wiki-page features.",
"completedAt": "2025-05-26T23:24:09.973Z"
},
{
"id": "68804833-8bdd-4dda-ab6b-dc22f540a0e3",
"name": "Implement unit tests for coverage gaps",
"description": "Create feature.spec.unit.ts with unit tests to fill coverage gaps not covered by integration tests. Use mocks only when absolutely necessary for scenarios that cannot be tested with real Azure DevOps API.",
"status": "completed",
"dependencies": [
{
"taskId": "457c0d1b-3635-49d7-916e-0e9aeb4f370f"
}
],
"createdAt": "2025-05-26T16:18:03.641Z",
"updatedAt": "2025-05-26T23:31:32.356Z",
"relatedFiles": [
{
"path": "src/features/wikis/list-wiki-pages/feature.spec.unit.ts",
"type": "CREATE",
"description": "Unit tests for list wiki pages feature"
}
],
"implementationGuide": "1. Create src/features/wikis/list-wiki-pages/feature.spec.unit.ts\n2. Mock WikiClient and its listWikiPages method only for scenarios not covered by integration tests\n3. Focus on edge cases and error scenarios that are difficult to reproduce with real API:\n - Network failures and timeouts\n - Malformed API responses\n - Edge cases in pagination logic\n - Input validation edge cases\n4. Follow patterns from src/features/wikis/get-wikis/feature.spec.unit.ts\n5. Use jest.mock() for WikiClient only when necessary\n6. Complement integration tests rather than duplicate coverage",
"verificationCriteria": "Unit tests fill gaps in integration test coverage, minimal use of mocks, tests focus on scenarios that cannot be tested with real API, follows existing test patterns",
"analysisResult": "Implement list_wiki_pages tool for Azure DevOps MCP server following GitHub issue #184 requirements. The implementation extends existing WikiClient with Azure DevOps Pages Batch API support, includes comprehensive pagination handling, and maintains consistency with established codebase patterns for schema validation, error handling, and testing.",
"summary": "Successfully implemented comprehensive unit tests for list-wiki-pages feature. Created src/features/wikis/list-wiki-pages/feature.spec.unit.ts with 394 lines of focused unit tests that complement integration tests. Tests cover scenarios not easily testable with real API: network failures/timeouts, malformed API responses, edge cases in pagination logic, input validation edge cases, large datasets (10,000 pages), special characters in paths, boundary recursionLevel values, client creation failures, and data transformation scenarios. Used minimal mocking (only WikiClient when necessary) following patterns from existing unit tests. All tests focus on scenarios that cannot be reliably tested with real Azure DevOps API while avoiding duplication of integration test coverage.",
"completedAt": "2025-05-26T23:31:32.355Z"
}
]
}
```
--------------------------------------------------------------------------------
/docs/tools/pipelines.md:
--------------------------------------------------------------------------------
```markdown
# Pipeline Tools
This document describes the tools available for working with Azure DevOps pipelines.
## Table of Contents
- [`list_pipelines`](#list_pipelines) - List pipelines in a project
- [`get_pipeline`](#get_pipeline) - Get details of a specific pipeline
- [`list_pipeline_runs`](#list_pipeline_runs) - List recent runs for a pipeline
- [`get_pipeline_run`](#get_pipeline_run) - Get details of a specific run (plus artifacts)
- [`download_pipeline_artifact`](#download_pipeline_artifact) - Download an artifact file as text
- [`pipeline_timeline`](#pipeline_timeline) - Retrieve the stage and job timeline for a run
- [`get_pipeline_log`](#get_pipeline_log) - Retrieve raw or JSON-formatted log content
- [`trigger_pipeline`](#trigger_pipeline) - Trigger a pipeline run
## list_pipelines
Lists pipelines in a project.
### Parameters
| Parameter | Type | Required | Description |
| ----------- | ------ | -------- | --------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `orderBy` | string | No | Order by field and direction (e.g., "createdDate desc") |
| `top` | number | No | Maximum number of pipelines to return |
### Response
Returns an array of pipeline objects:
```json
{
"count": 2,
"value": [
{
"id": 4,
"revision": 2,
"name": "Node.js build pipeline",
"folder": "\\",
"url": "https://dev.azure.com/organization/project/_apis/pipelines/4"
},
{
"id": 1,
"revision": 1,
"name": "Sample Pipeline",
"folder": "\\",
"url": "https://dev.azure.com/organization/project/_apis/pipelines/1"
}
]
}
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the project does not exist
- Returns `AzureDevOpsAuthenticationError` if authentication fails
- Returns generic error messages for other failures
### Example Usage
```javascript
// Using default project from environment
const result = await callTool('list_pipelines', {});
// Specifying project and limiting results
const limitedResult = await callTool('list_pipelines', {
projectId: 'my-project',
top: 10,
orderBy: 'name asc',
});
```
## get_pipeline
Gets details of a specific pipeline.
### Parameters
| Parameter | Type | Required | Description |
| ----------------- | ------ | -------- | ----------------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `pipelineId` | number | Yes | The numeric ID of the pipeline to retrieve |
| `pipelineVersion` | number | No | The version of the pipeline to retrieve (latest if not specified) |
### Response
Returns a pipeline object with the following structure:
```json
{
"id": 4,
"revision": 2,
"name": "Node.js build pipeline",
"folder": "\\",
"url": "https://dev.azure.com/organization/project/_apis/pipelines/4",
"_links": {
"self": {
"href": "https://dev.azure.com/organization/project/_apis/pipelines/4"
},
"web": {
"href": "https://dev.azure.com/organization/project/_build/definition?definitionId=4"
}
},
"configuration": {
"path": "azure-pipelines.yml",
"repository": {
"id": "bd0e8130-7fba-4f3b-8559-54760b6e7248",
"type": "azureReposGit"
},
"type": "yaml"
}
}
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the pipeline or project does not exist
- Returns `AzureDevOpsAuthenticationError` if authentication fails
- Returns generic error messages for other failures
### Example Usage
```javascript
// Get latest version of a pipeline
const result = await callTool('get_pipeline', {
pipelineId: 4,
});
// Get specific version of a pipeline
const versionResult = await callTool('get_pipeline', {
projectId: 'my-project',
pipelineId: 4,
pipelineVersion: 2,
});
```
## list_pipeline_runs
Lists recent runs for a given pipeline with optional filtering by branch, state, result, or time window.
### Parameters
| Parameter | Type | Required | Description |
| ------------------- | ------------ | -------- | --------------------------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `pipelineId` | number | Yes | Numeric ID of the pipeline whose runs should be returned |
| `top` | number | No | Maximum rows to return (1-100, default 50) |
| `continuationToken` | string | No | Continuation token for paging through long histories |
| `branch` | string | No | Filter runs by branch (accepts `main` or full ref like `refs/heads/main`) |
| `state` | string | No | Filter by current run state (`notStarted`, `inProgress`, `completed`, etc.) |
| `result` | string | No | Filter by final run result (`succeeded`, `failed`, `canceled`, etc.) |
| `createdFrom` | string (ISO) | No | Restrict runs created on or after this timestamp |
| `createdTo` | string (ISO) | No | Restrict runs created on or before this timestamp |
| `orderBy` | string | No | Sort order for creation date (`createdDate desc` by default) |
### Response
Returns the runs plus a continuation token when more pages are available:
```json
{
"runs": [
{
"id": 13590799,
"name": "20251001.2",
"createdDate": "2025-10-01T08:59:27.343Z",
"state": "completed",
"result": "succeeded",
"pipeline": { "id": 69847, "name": "embed-confluence-content" },
"_links": {
"web": {
"href": "https://dev.azure.com/org/project/_build/results?buildId=13590799"
}
}
}
],
"continuationToken": "eyJwYWdlIjoxfQ=="
}
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the pipeline or project does not exist
- Returns `AzureDevOpsAuthenticationError` if authentication fails
- Returns generic error messages for other failures
### Example Usage
```javascript
// Fetch the latest 10 runs on the main branch
const runs = await callTool('list_pipeline_runs', {
pipelineId: 69847,
top: 10,
branch: 'main',
});
// Continue from a previous page
const nextPage = await callTool('list_pipeline_runs', {
pipelineId: 69847,
continuationToken: runs.continuationToken,
});
```
## get_pipeline_run
Gets detailed information about a single pipeline run, including artifact listings when available.
### Parameters
| Parameter | Type | Required | Description |
| ------------ | ------ | -------- | --------------------------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `runId` | number | Yes | Numeric ID of the pipeline run |
| `pipelineId` | number | No | Optional guard to validate the run belongs to the specified pipeline |
### Response
Returns the pipeline run enriched with artifact metadata:
```json
{
"id": 13590799,
"name": "20251001.2",
"state": "completed",
"result": "succeeded",
"pipeline": { "id": 69847, "name": "embed-confluence-content" },
"createdDate": "2025-10-01T08:59:27.343Z",
"artifacts": [
{
"name": "embedding-batch",
"type": "PipelineArtifact",
"downloadUrl": "https://.../embedding-batch.zip",
"signedContentUrl": "https://.../signedContent",
"items": [
{ "path": "logs", "itemType": "folder" },
{ "path": "logs/summary.json", "itemType": "file" }
]
},
{
"name": "embedding-metrics",
"type": "Container",
"containerId": 39106000,
"rootPath": "embedding-metrics",
"items": [
{ "path": "embedding_metrics.json", "itemType": "file", "size": 2048 }
]
}
]
}
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the run or project does not exist
- Returns `AzureDevOpsResourceNotFoundError` if `pipelineId` is provided and does not match the run
- Returns `AzureDevOpsAuthenticationError` if authentication fails
- Returns generic error messages for other failures
### Example Usage
```javascript
// Inspect a run and the files it produced
const run = await callTool('get_pipeline_run', {
runId: 13590799,
});
// Guard against a run from another pipeline
await callTool('get_pipeline_run', {
pipelineId: 69847,
runId: 13590799,
});
```
## download_pipeline_artifact
Downloads a single file from a pipeline run artifact (container or pipeline artifact) and returns its textual content.
### Parameters
| Parameter | Type | Required | Description |
| -------------- | ------ | -------- | --------------------------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `runId` | number | Yes | Numeric ID of the pipeline run |
| `artifactPath` | string | Yes | Artifact name and file path (e.g., `artifact-name/path/to/file.json`) |
| `pipelineId` | number | No | Optional guard to disambiguate runs triggered by different pipelines |
### Response
Returns the file content along with artifact metadata:
```json
{
"artifact": "embedding-metrics",
"path": "embedding-metrics/embedding_metrics.json",
"content": "{\n \"status\": \"ok\"\n}"
}
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the artifact or requested file is missing
- Returns `AzureDevOpsAuthenticationError` if the artifact storage cannot be accessed
- Returns generic error messages for other failures
### Example Usage
```javascript
// Download a JSON summary generated by the run
const artifact = await callTool('download_pipeline_artifact', {
runId: 13590799,
artifactPath: 'embedding-metrics/embedding_metrics.json',
});
console.log(JSON.parse(artifact.content));
```
## pipeline_timeline
Retrieves the timeline of stages, jobs, and tasks for a pipeline run with optional filtering by state and result.
### Parameters
| Parameter | Type | Required | Description |
| ------------ | ---------------------- | -------- | ------------------------------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `runId` | number | Yes | Numeric ID of the pipeline run |
| `timelineId` | string | No | Fetch a specific timeline record (otherwise returns the default timeline) |
| `pipelineId` | number | No | Optional reference to the pipeline for documentation purposes |
| `state` | string or string array | No | Filter returned records by state (`pending`, `inProgress`, `completed`) |
| `result` | string or string array | No | Filter returned records by result (`succeeded`, `failed`, `canceled`, etc.) |
### Response
Returns the timeline records (filtered when requested):
```json
{
"id": "f9d9c210-06c7-4e53-b45f-6b661341fa7f",
"records": [
{
"id": "Stage_1",
"name": "Build",
"type": "Stage",
"state": "completed",
"result": "succeeded",
"startTime": "2025-10-01T08:59:30.123Z",
"finishTime": "2025-10-01T09:02:10.456Z"
},
{
"id": "Job_1",
"name": "Agent job",
"type": "Job",
"parentId": "Stage_1",
"state": "completed",
"result": "succeeded"
}
]
}
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the timeline or project does not exist
- Returns `AzureDevOpsAuthenticationError` if authentication fails
- Returns generic error messages for other failures
### Example Usage
```javascript
// Fetch the full timeline
const timeline = await callTool('pipeline_timeline', {
runId: 13590799,
});
// Only keep completed records
const completed = await callTool('pipeline_timeline', {
runId: 13590799,
state: 'completed',
});
```
## get_pipeline_log
Retrieves log content for a specific timeline log. Supports plain text or JSON (structured lines) formats.
### Parameters
| Parameter | Type | Required | Description |
| ------------ | ------ | -------- | ---------------------------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `runId` | number | Yes | Numeric ID of the pipeline run |
| `logId` | number | Yes | Log identifier from the timeline records |
| `format` | string | No | `plain` (default) or `json` |
| `startLine` | number | No | First line to include (useful for large logs) |
| `endLine` | number | No | Last line to include (exclusive, when paired with `startLine`) |
| `pipelineId` | number | No | Optional reference to help locate the log when working across many pipelines |
### Response
- When `format` is omitted or set to `plain`, returns a newline-delimited string:
```text
Queueing build...
Starting: Build
##[section]Starting: Initialize job
```
- When `format` is set to `json`, returns the structured log payload from Azure DevOps:
```json
[
{
"line": 1,
"stepRecordId": "Job_1",
"content": "##[section]Starting: Initialize job"
}
]
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the log or project does not exist
- Returns `AzureDevOpsAuthenticationError` if authentication fails
- Returns generic error messages for other failures
### Example Usage
```javascript
// Grab the first 200 lines of plain text
const logText = await callTool('get_pipeline_log', {
runId: 13590799,
logId: 12,
startLine: 0,
endLine: 200,
});
// Fetch the same lines as structured JSON
const logJson = await callTool('get_pipeline_log', {
runId: 13590799,
logId: 12,
format: 'json',
startLine: 0,
endLine: 200,
});
```
## trigger_pipeline
Triggers a run of a specific pipeline. Allows specifying the branch to run on and passing variables to customize the pipeline execution.
### Parameters
| Parameter | Type | Required | Description |
| -------------------- | ------ | -------- | --------------------------------------------------------------------- |
| `projectId` | string | No | The ID or name of the project (Default: from environment) |
| `pipelineId` | number | Yes | The numeric ID of the pipeline to trigger |
| `branch` | string | No | The branch to run the pipeline on (e.g., "main", "feature/my-branch") |
| `variables` | object | No | Variables to pass to the pipeline run |
| `templateParameters` | object | No | Parameters for template-based pipelines |
| `stagesToSkip` | array | No | Stages to skip in the pipeline run |
#### Variables Format
```json
{
"myVariable": {
"value": "my-value",
"isSecret": false
},
"secretVariable": {
"value": "secret-value",
"isSecret": true
}
}
```
### Response
Returns a run object with details about the triggered pipeline run:
```json
{
"id": 12345,
"name": "20230215.1",
"createdDate": "2023-02-15T10:30:00Z",
"url": "https://dev.azure.com/organization/project/_apis/pipelines/runs/12345",
"_links": {
"self": {
"href": "https://dev.azure.com/organization/project/_apis/pipelines/runs/12345"
},
"web": {
"href": "https://dev.azure.com/organization/project/_build/results?buildId=12345"
}
},
"state": 1,
"result": null,
"variables": {
"myVariable": {
"value": "my-value"
}
}
}
```
### Error Handling
- Returns `AzureDevOpsResourceNotFoundError` if the pipeline or project does not exist
- Returns `AzureDevOpsAuthenticationError` if authentication fails
- Returns generic error messages for other failures
### Example Usage
```javascript
// Trigger a pipeline on the default branch
// In this case, use default project from environment variables
const result = await callTool('trigger_pipeline', {
pipelineId: 4,
});
// Trigger a pipeline on a specific branch with variables
const runWithOptions = await callTool('trigger_pipeline', {
projectId: 'my-project',
pipelineId: 4,
branch: 'feature/my-branch',
variables: {
deployEnvironment: {
value: 'staging',
isSecret: false,
},
},
});
```
```
--------------------------------------------------------------------------------
/shrimp-rules.md:
--------------------------------------------------------------------------------
```markdown
# Development Guidelines for AI Agents - mcp-server-azure-devops
**This document is exclusively for AI Agent operational use. DO NOT include general development knowledge.**
## 1. Project Overview
### Purpose
- This project, `@tiberriver256/mcp-server-azure-devops`, is an MCP (Model Context Protocol) server.
- Its primary function is to provide tools for interacting with Azure DevOps services.
### Technology Stack
- **Core**: TypeScript, Node.js
- **Key Libraries**:
- `@modelcontextprotocol/sdk`: For MCP server and type definitions.
- `azure-devops-node-api`: For interacting with Azure DevOps.
- `@azure/identity`: For Azure authentication.
- `zod`: For schema definition and validation.
- `zod-to-json-schema`: For converting Zod schemas to JSON schemas for MCP tools.
- **Testing**: Jest (for unit, integration, and e2e tests).
- **Linting/Formatting**: ESLint, Prettier.
- **Environment Management**: `dotenv`.
### Core Functionality
- Provides MCP tools to interact with Azure DevOps features including, but not limited to:
- Organizations
- Projects (list, get, get details)
- Repositories (list, get, get content, get tree)
- Work Items (list, get, create, update, manage links)
- Pull Requests (list, get, create, update, add/get comments)
- Pipelines (list, trigger)
- Search (code, wiki, work items)
- Users (get current user)
- Wikis (list, get page, create, update page)
## 2. Project Architecture
### Main Directory Structure
- **`./` (Root)**:
- [`package.json`](package.json:0): Project metadata, dependencies, and NPM scripts. **REFER** to this for available commands and dependencies.
- [`tsconfig.json`](tsconfig.json:0): TypeScript compiler configuration. **ADHERE** to its settings.
- [`.eslintrc.json`](.eslintrc.json:0): ESLint configuration for code linting. **ADHERE** to its rules.
- [`README.md`](README.md:0): General project information.
- `setup_env.sh`: Shell script for environment setup.
- `CHANGELOG.md` (if present): Tracks changes between versions.
- **`src/`**: Contains all TypeScript source code.
- **`src/features/`**: Core application logic. Each subdirectory represents a distinct Azure DevOps feature set (e.g., `projects`, `repositories`).
- `src/features/[feature-name]/`: Contains all files related to a specific feature.
- `src/features/[feature-name]/index.ts`: Main export file for the feature. Exports request handlers (`isFeatureRequest`, `handleFeatureRequest`), tool definitions array (`featureTools`), schemas, types, and individual tool implementation functions. **MODIFY** this file when adding new tools or functionalities to the feature.
- `src/features/[feature-name]/schemas.ts`: Defines Zod input/output schemas for all tools within this feature. **DEFINE** new schemas here.
- `src/features/[feature-name]/tool-definitions.ts`: Defines MCP tools for the feature using `@modelcontextprotocol/sdk` and `zodToJsonSchema`. **ADD** new tool definitions here.
- `src/features/[feature-name]/types.ts`: Contains TypeScript type definitions specific to this feature. **DEFINE** feature-specific types here.
- `src/features/[feature-name]/[tool-name]/`: Subdirectory for a specific tool/action within the feature.
- `src/features/[feature-name]/[tool-name]/feature.ts`: Implements the core logic for the specific tool (e.g., API calls, data transformation). **IMPLEMENT** tool logic here.
- `src/features/[feature-name]/[tool-name]/index.ts`: Exports the `feature.ts` logic and potentially tool-specific schemas/types if not in the parent feature files.
- `src/features/[feature-name]/[tool-name]/schema.ts` (optional, often re-exports from feature-level `schemas.ts`): Defines or re-exports Zod schemas for this specific tool.
- `src/features/organizations/`, `src/features/pipelines/`, `src/features/projects/`, `src/features/pull-requests/`, `src/features/repositories/`, `src/features/search/`, `src/features/users/`, `src/features/wikis/`, `src/features/work-items/`: Existing feature modules. **REFER** to these for patterns.
- **`src/shared/`**: Contains shared modules and utilities used across features.
- `src/shared/api/`: Azure DevOps API client setup (e.g., `client.ts`).
- `src/shared/auth/`: Authentication logic for Azure DevOps (e.g., `auth-factory.ts`, `client-factory.ts`). **USE** these factories; DO NOT implement custom auth.
- `src/shared/config/`: Configuration management (e.g., `version.ts`).
- `src/shared/errors/`: Shared error handling classes and utilities (e.g., `azure-devops-errors.ts`, `handle-request-error.ts`). **USE** these for consistent error handling.
- `src/shared/types/`: Global TypeScript type definitions (e.g., `config.ts`, `request-handler.ts`, `tool-definition.ts`).
- **`src/utils/`**: General utility functions.
- `src/utils/environment.ts`: Provides default values for environment variables (e.g., `defaultProject`, `defaultOrg`).
- [`src/index.ts`](src/index.ts:1): Main application entry point. Handles environment variable loading and server initialization. **Exports** server components.
- [`src/server.ts`](src/server.ts:1): MCP server core logic. Initializes the server, registers all tool handlers from features, and sets up request routing. **MODIFY** this file to register new feature modules (their `isFeatureRequest` and `handleFeatureRequest` handlers, and `featureTools` array).
- **`docs/`**: Currently empty. If documentation is added, **MAINTAIN** it in sync with code changes.
- **`project-management/`**: Contains project planning and design documents. **REFER** to `architecture-guide.md` for high-level design.
- **`tests/`**: Directory for global test setup or utilities if any. Most tests are co-located with source files (e.g., `*.spec.unit.ts`, `*.spec.int.ts`, `*.spec.e2e.ts`).
## 3. Code Standards
### Naming Conventions
- **Files and Directories**: USE kebab-case (e.g., `my-feature`, `get-project-details.ts`).
- **Variables and Functions**: USE camelCase (e.g., `projectId`, `listProjects`).
- **Classes, Interfaces, Enums, Types**: USE PascalCase (e.g., `AzureDevOpsClient`, `TeamProject`, `AuthenticationMethod`).
- **Test Files**:
- Unit tests: `[filename].spec.unit.ts` (e.g., [`get-project.spec.unit.ts`](src/features/projects/get-project/feature.spec.unit.ts:0)).
- Integration tests: `[filename].spec.int.ts` (e.g., [`get-project.spec.int.ts`](src/features/projects/get-project/feature.spec.int.ts:0)).
- E2E tests: `[filename].spec.e2e.ts` (e.g., [`server.spec.e2e.ts`](src/server.spec.e2e.ts:0)).
- **Feature Modules**: Place under `src/features/[feature-name]/`.
- **Tool Logic**: Place in `src/features/[feature-name]/[tool-name]/feature.ts`.
- **Schemas**: Define in `src/features/[feature-name]/schemas.ts`.
- **Tool Definitions (MCP)**: Define in `src/features/[feature-name]/tool-definitions.ts`.
- **Types**: Feature-specific types in `src/features/[feature-name]/types.ts`; global types in `src/shared/types/`.
### Formatting
- **Prettier**: Enforced via ESLint and lint-staged.
- **Rule**: ADHERE to formatting rules defined by Prettier (implicitly via [`.eslintrc.json`](.eslintrc.json:1) which extends `prettier`).
- **Action**: ALWAYS run `npm run format` (or rely on lint-staged) before committing.
### Linting
- **ESLint**: Configuration in [`.eslintrc.json`](.eslintrc.json:1).
- **Rule**: ADHERE to linting rules.
- **Action**: ALWAYS run `npm run lint` (or `npm run lint:fix`) and RESOLVE all errors/warnings before committing.
- **Key Lint Rules (from [`.eslintrc.json`](.eslintrc.json:1))**:
- `prettier/prettier: "error"` (Prettier violations are ESLint errors).
- `@typescript-eslint/no-explicit-any: "warn"` (Avoid `any` where possible; it's "off" for `*.spec.unit.ts` and `tests/**/*.ts`).
- `@typescript-eslint/no-unused-vars: ["error", { "argsIgnorePattern": "^_", "varsIgnorePattern": "^_" }]` (No unused variables, allowing `_` prefix for ignored ones).
### Comments
- **TSDoc**: USE TSDoc for documenting public functions, classes, interfaces, and types (e.g., `/** ... */`).
- **Inline Comments**: For complex logic blocks, ADD inline comments (`// ...`) explaining the purpose.
### TypeScript Specifics (from [`tsconfig.json`](tsconfig.json:1))
- `strict: true`: ADHERE to strict mode.
- `noImplicitAny: true`: DO NOT use implicit `any`. Explicitly type all entities.
- `noUnusedLocals: true`, `noUnusedParameters: true`: ENSURE no unused local variables or parameters.
- `moduleResolution: "Node16"`: Be aware of Node.js ESM module resolution specifics.
- `paths: { "@/*": ["src/*"] }`: USE path alias `@/*` for imports from `src/`.
## 4. Functionality Implementation Standards
### Adding a New Tool/Functionality to an Existing Feature
1. **Identify Feature**: Determine the relevant feature directory in `src/features/[feature-name]/`.
2. **Create Tool Directory**: Inside the feature directory, CREATE a new subdirectory for your tool, e.g., `src/features/[feature-name]/[new-tool-name]/`.
3. **Implement Logic**: CREATE `[new-tool-name]/feature.ts`. Implement the core Azure DevOps interaction logic here.
- USE `getClient()` from `src/shared/api/client.ts` or `getConnection()` from [`src/server.ts`](src/server.ts:1) to get `WebApi`.
- USE error handling from `src/shared/errors/`.
4. **Define Schema**:
- ADD Zod schema for the tool's input to `src/features/[feature-name]/schemas.ts`.
- EXPORT it.
- If needed, CREATE `[new-tool-name]/schema.ts` and re-export the specific schema from the feature-level `schemas.ts`.
5. **Define MCP Tool**:
- ADD tool definition to `src/features/[feature-name]/tool-definitions.ts`.
- Import the Zod schema and use `zodToJsonSchema` for `inputSchema`.
- Ensure `name` matches the intended tool name for MCP.
6. **Update Feature Index**:
- In `src/features/[feature-name]/index.ts`:
- EXPORT your new tool's logic function (from `[new-tool-name]/feature.ts` or its `index.ts`).
- ADD your new tool's name to the `includes()` check in `isFeatureRequest` function.
- ADD a `case` for your new tool in the `handleFeatureRequest` function to call your logic. Parse arguments using the Zod schema.
7. **Update Server**: No changes usually needed in [`src/server.ts`](src/server.ts:1) if the feature module is already registered. The feature's `tool-definitions.ts` and `handleFeatureRequest` will be picked up.
8. **Add Tests**: CREATE `[new-tool-name]/feature.spec.unit.ts` and `[new-tool-name]/feature.spec.int.ts`.
### Adding a New Feature Module (e.g., for a new Azure DevOps Service Area)
1. **Create Feature Directory**: CREATE `src/features/[new-feature-module-name]/`.
2. **Implement Tools**: Follow "Adding a New Tool" steps above for each tool within this new feature module. This includes creating `schemas.ts`, `tool-definitions.ts`, `types.ts` (if needed), and subdirectories for each tool's `feature.ts`.
3. **Create Feature Index**: CREATE `src/features/[new-feature-module-name]/index.ts`.
- EXPORT all schemas, types, tool logic functions.
- EXPORT the `[new-feature-module-name]Tools` array from `tool-definitions.ts`.
- CREATE and EXPORT `is[NewFeatureModuleName]Request` (e.g., `isMyNewFeatureRequest`) type guard.
- CREATE and EXPORT `handle[NewFeatureModuleName]Request` (e.g., `handleMyNewFeatureRequest`) request handler function.
4. **Register Feature in Server**:
- In [`src/server.ts`](src/server.ts:1):
- IMPORT `[new-feature-module-name]Tools`, `is[NewFeatureModuleName]Request`, and `handle[NewFeatureModuleName]Request` from your new feature's `index.ts`.
- ADD `...[new-feature-module-name]Tools` to the `tools` array in the `ListToolsRequestSchema` handler.
- ADD an `if (is[NewFeatureModuleName]Request(request)) { return await handle[NewFeatureModuleName]Request(connection, request); }` block in the `CallToolRequestSchema` handler.
5. **Add Tests**: Ensure comprehensive tests for the new feature module.
## 5. Framework/Plugin/Third-party Library Usage Standards
- **`@modelcontextprotocol/sdk`**:
- USE `Server` class from `@modelcontextprotocol/sdk/server/index.js` to create the MCP server ([`src/server.ts`](src/server.ts:1)).
- USE `StdioServerTransport` for transport ([`src/index.ts`](src/index.ts:1)).
- USE schema types like `CallToolRequestSchema` from `@modelcontextprotocol/sdk/types.js`.
- DEFINE tools as `ToolDefinition[]` (see `src/shared/types/tool-definition.ts` and feature `tool-definitions.ts` files).
- **`azure-devops-node-api`**:
- This is the primary library for Azure DevOps interactions.
- OBTAIN `WebApi` connection object via `getConnection()` from [`src/server.ts`](src/server.ts:1) or `AzureDevOpsClient` from `src/shared/auth/client-factory.ts`.
- USE specific APIs from the connection (e.g., `connection.getCoreApi()`, `connection.getWorkItemTrackingApi()`).
- **`@azure/identity`**:
- Used for Azure authentication (e.g., `DefaultAzureCredential`).
- Primarily abstracted via `AzureDevOpsClient` in `src/shared/auth/`. PREFER using this abstraction.
- **`zod`**:
- USE for all input/output schema definition and validation.
- DEFINE schemas in `src/features/[feature-name]/schemas.ts`.
- USE `z.object({...})`, `z.string()`, `z.boolean()`, etc.
- USE `.optional()`, `.default()`, `.describe()` for schema fields.
- **`zod-to-json-schema`**:
- USE to convert Zod schemas to JSON schemas for MCP `inputSchema` in `tool-definitions.ts`.
- **`dotenv`**:
- Used in [`src/index.ts`](src/index.ts:1) to load environment variables from a `.env` file.
- **Jest**:
- Test files co-located with source files or in feature-specific `__test__` directories.
- Configuration in `jest.unit.config.js`, `jest.int.config.js`, `jest.e2e.config.js`.
- **ESLint/Prettier**: See "Code Standards".
## 6. Workflow Standards
### Development Workflow
1. **Branch**: CREATE or CHECKOUT a feature/bugfix branch from `main` (or relevant development branch).
2. **Implement**: WRITE code and corresponding tests.
3. **Test**:
- RUN unit tests: `npm run test:unit`.
- RUN integration tests: `npm run test:int`.
- RUN E2E tests: `npm run test:e2e`.
- Or run all tests: `npm test`.
- ENSURE all tests pass.
4. **Lint & Format**:
- RUN `npm run lint` (or `npm run lint:fix`). RESOLVE all issues.
- RUN `npm run format`.
5. **Commit**:
- USE Conventional Commits specification (e.g., `feat: ...`, `fix: ...`).
- RECOMMENDED: Use `npm run commit` (uses `cz-conventional-changelog`) for guided commit messages.
6. **Pull Request**: PUSH branch and CREATE Pull Request against `main` (or relevant development branch).
### NPM Scripts (from [`package.json`](package.json:1))
- `build`: `tsc` (Compiles TypeScript to `dist/`).
- `dev`: `ts-node-dev --respawn --transpile-only src/index.ts` (Runs server in development with auto-restart).
- `start`: `node dist/index.js` (Runs compiled server).
- `inspector`: `npm run build && npx @modelcontextprotocol/inspector node dist/index.js` (Runs server with MCP Inspector).
- `test:unit`, `test:int`, `test:e2e`, `test`: Run respective test suites.
- `lint`, `lint:fix`: Run ESLint.
- `format`: Run Prettier.
- `prepare`: `husky install` (Sets up Git hooks).
- `commit`: `cz` (Interactive commitizen).
### CI/CD
- No explicit CI/CD pipeline configuration files (e.g., `azure-pipelines.yml`, `.github/workflows/`) were found in the file listing. If added, **REFER** to them.
## 7. Key File Interaction Standards
- **Adding/Modifying a Tool**:
- TOUCH `src/features/[feature-name]/[tool-name]/feature.ts` (logic).
- TOUCH `src/features/[feature-name]/schemas.ts` (Zod schema).
- TOUCH `src/features/[feature-name]/tool-definitions.ts` (MCP tool definition).
- TOUCH `src/features/[feature-name]/index.ts` (export logic, update request handler and guard).
- TOUCH corresponding `*.spec.unit.ts` and `*.spec.int.ts` files.
- **Adding a New Feature Module**:
- CREATE files within `src/features/[new-feature-module-name]/` as per "Functionality Implementation Standards".
- MODIFY [`src/server.ts`](src/server.ts:1) to import and register the new feature module's tools and handlers.
- **Configuration Changes**:
- Environment variables: Managed via `.env` file (loaded by `dotenv` in [`src/index.ts`](src/index.ts:1)).
- TypeScript config: [`tsconfig.json`](tsconfig.json:1).
- Linting config: [`.eslintrc.json`](.eslintrc.json:1).
- **Dependency Management**:
- MODIFY [`package.json`](package.json:1) to add/update dependencies.
- RUN `npm install` or `npm ci`.
- **Documentation**:
- `docs/` directory is currently empty. If project documentation is added (e.g., `docs/feature-x.md`), **UPDATE** it when the corresponding feature `src/features/feature-x/` is modified.
- [`README.md`](README.md:0): UPDATE for significant high-level changes.
## 8. AI Decision-making Standards
### When Adding a New Azure DevOps API Interaction:
1. **Goal**: To expose a new Azure DevOps API endpoint as an MCP tool.
2. **Decision: New or Existing Feature?**
- IF the API relates to an existing service area (e.g., adding a new work item query type to `work-items` feature), MODIFY the existing feature module.
- ELSE (e.g., interacting with Azure DevOps Audit Logs, a new service area), CREATE a new feature module. (See "Functionality Implementation Standards").
3. **Pattern Adherence**:
- FOLLOW the established pattern:
- `src/features/[feature]/[tool]/feature.ts` for logic.
- `src/features/[feature]/schemas.ts` for Zod schemas.
- `src/features/[feature]/tool-definitions.ts` for MCP tool definitions.
- `src/features/[feature]/index.ts` for feature-level exports, request guard (`isFeatureRequest`), and request handler (`handleFeatureRequest`).
- **Example**: To add `get_pipeline_run_logs` to `pipelines` feature:
- CREATE `src/features/pipelines/get-pipeline-run-logs/feature.ts`.
- ADD `GetPipelineRunLogsSchema` to `src/features/pipelines/schemas.ts`.
- ADD `get_pipeline_run_logs` definition to `src/features/pipelines/tool-definitions.ts`.
- UPDATE `src/features/pipelines/index.ts` to export the new function, add to `isPipelinesRequest`, and handle in `handlePipelinesRequest`.
4. **Error Handling**:
- ALWAYS use custom error classes from `src/shared/errors/azure-devops-errors.ts` (e.g., `AzureDevOpsResourceNotFoundError`).
- WRAP external API calls in try/catch blocks.
- USE `handleResponseError` from `src/shared/errors/handle-request-error.ts` in the top-level request handler in [`src/server.ts`](src/server.ts:1) (already done for existing features). Feature-specific handlers should re-throw custom errors.
5. **Testing**:
- ALWAYS write unit tests for the new logic in `[tool-name]/feature.spec.unit.ts`.
- ALWAYS write integration tests (NEVER mocking anything) in `[tool-name]/feature.spec.int.ts`. Prefer integration tests over unit tests.
### When Modifying Existing Functionality:
1. **Identify Impact**: DETERMINE all files affected by the change (logic, schemas, tool definitions, tests, potentially documentation).
2. **Maintain Consistency**: ENSURE changes are consistent with existing patterns within that feature module.
3. **Update Tests**: MODIFY existing tests or ADD new ones to cover the changes. ENSURE all tests pass.
4. **Version Bumping**: For significant changes, consider if a version bump in [`package.json`](package.json:1) is warranted (usually handled by `release-please`).
## 9. Prohibited Actions
- **DO NOT** include general development knowledge or LLM-known facts in this `shrimp-rules.md` document. This document is for project-specific operational rules for AI.
- **DO NOT** explain project functionality in terms of *what it does for an end-user*. Focus on *how to modify or add to it* for an AI developer.
- **DO NOT** use `any` type implicitly. [`tsconfig.json`](tsconfig.json:1) enforces `noImplicitAny: true`. [`.eslintrc.json`](.eslintrc.json:1) warns on explicit `any` (`@typescript-eslint/no-explicit-any: "warn"`), except in unit tests. MINIMIZE explicit `any`.
- **DO NOT** bypass linting (`npm run lint`) or formatting (`npm run format`) checks. Code MUST adhere to these standards.
- **DO NOT** commit code that fails tests (`npm test`).
- **DO NOT** implement custom Azure DevOps authentication logic. USE the provided `AzureDevOpsClient` from `src/shared/auth/`.
- **DO NOT** hardcode configuration values (like PATs, Org URLs, Project IDs). These should come from environment variables (see [`src/index.ts`](src/index.ts:1) `getConfig` and `src/utils/environment.ts`).
- **DO NOT** directly call Azure DevOps REST APIs if a corresponding function already exists in the `azure-devops-node-api` library or in shared project code (e.g., `src/shared/api/`).
- **DO NOT** modify files in `dist/` directory directly. This directory is auto-generated by `npm run build`.
- **DO NOT** ignore the `project-management/` directory for understanding architectural guidelines, but DO NOT replicate its content here.
- **DO NOT** use mocks within integration tests.
```
--------------------------------------------------------------------------------
/src/clients/azure-devops.ts:
--------------------------------------------------------------------------------
```typescript
import axios, { AxiosError } from 'axios';
import { DefaultAzureCredential, AzureCliCredential } from '@azure/identity';
import {
AzureDevOpsError,
AzureDevOpsResourceNotFoundError,
AzureDevOpsValidationError,
AzureDevOpsPermissionError,
} from '../shared/errors';
import { defaultOrg, defaultProject } from '../utils/environment';
interface AzureDevOpsApiErrorResponse {
message?: string;
typeKey?: string;
errorCode?: number;
eventId?: number;
}
interface ClientOptions {
organizationId?: string;
}
interface WikiCreateParameters {
name: string;
projectId: string;
type: 'projectWiki' | 'codeWiki';
repositoryId?: string;
mappedPath?: string;
version?: {
version: string;
versionType?: 'branch' | 'tag' | 'commit';
};
}
interface WikiPageContent {
content: string;
}
export interface WikiPageSummary {
id: number;
path: string;
url?: string;
order?: number;
}
interface WikiPagesBatchRequest {
top: number;
continuationToken?: string;
}
interface WikiPagesBatchResponse {
value: WikiPageSummary[];
continuationToken?: string;
}
interface PageUpdateOptions {
comment?: string;
versionDescriptor?: {
version?: string;
};
}
export class WikiClient {
private baseUrl: string;
private organizationId: string;
constructor(organizationId: string) {
this.organizationId = organizationId || defaultOrg;
this.baseUrl = `https://dev.azure.com/${this.organizationId}`;
}
/**
* Gets a project's ID from its name or verifies a project ID
* @param projectNameOrId - Project name or ID
* @returns The project ID
*/
private async getProjectId(projectNameOrId: string): Promise<string> {
try {
// Try to get project details using the provided name or ID
const url = `${this.baseUrl}/_apis/projects/${projectNameOrId}`;
const authHeader = await getAuthorizationHeader();
const response = await axios.get(url, {
params: {
'api-version': '7.1',
},
headers: {
Authorization: authHeader,
'Content-Type': 'application/json',
},
});
// Return the project ID from the response
return response.data.id;
} catch (error) {
const axiosError = error as AxiosError;
if (axiosError.response) {
const status = axiosError.response.status;
const errorMessage =
typeof axiosError.response.data === 'object' &&
axiosError.response.data
? (axiosError.response.data as AzureDevOpsApiErrorResponse)
.message || axiosError.message
: axiosError.message;
if (status === 404) {
throw new AzureDevOpsResourceNotFoundError(
`Project not found: ${projectNameOrId}`,
);
}
if (status === 401 || status === 403) {
throw new AzureDevOpsPermissionError(
`Permission denied to access project: ${projectNameOrId}`,
);
}
throw new AzureDevOpsError(
`Failed to get project details: ${errorMessage}`,
);
}
throw new AzureDevOpsError(
`Network error when getting project details: ${axiosError.message}`,
);
}
}
/**
* Creates a new wiki in Azure DevOps
* @param projectId - Project ID or name
* @param params - Parameters for creating the wiki
* @returns The created wiki
*/
async createWiki(projectId: string, params: WikiCreateParameters) {
// Use the default project if not provided
const project = projectId || defaultProject;
try {
// Get the actual project ID (whether the input was a name or ID)
const actualProjectId = await this.getProjectId(project);
// Construct the URL to create the wiki
const url = `${this.baseUrl}/${project}/_apis/wiki/wikis`;
// Get authorization header
const authHeader = await getAuthorizationHeader();
// Make the API request
const response = await axios.post(
url,
{
name: params.name,
type: params.type,
projectId: actualProjectId,
...(params.type === 'codeWiki' && {
repositoryId: params.repositoryId,
mappedPath: params.mappedPath,
version: params.version,
}),
},
{
params: {
'api-version': '7.1',
},
headers: {
Authorization: authHeader,
'Content-Type': 'application/json',
},
},
);
return response.data;
} catch (error) {
const axiosError = error as AxiosError;
// Handle specific error cases
if (axiosError.response) {
const status = axiosError.response.status;
const errorMessage =
typeof axiosError.response.data === 'object' &&
axiosError.response.data
? (axiosError.response.data as AzureDevOpsApiErrorResponse)
.message || axiosError.message
: axiosError.message;
// Handle 404 Not Found
if (status === 404) {
throw new AzureDevOpsResourceNotFoundError(
`Project not found: ${projectId}`,
);
}
// Handle 401 Unauthorized or 403 Forbidden
if (status === 401 || status === 403) {
throw new AzureDevOpsPermissionError(
`Permission denied to create wiki in project: ${projectId}`,
);
}
// Handle validation errors
if (status === 400) {
throw new AzureDevOpsValidationError(
`Invalid wiki creation parameters: ${errorMessage}`,
);
}
// Handle other error statuses
throw new AzureDevOpsError(`Failed to create wiki: ${errorMessage}`);
}
// Handle network errors
throw new AzureDevOpsError(
`Network error when creating wiki: ${axiosError.message}`,
);
}
}
/**
* Gets a wiki page's content
* @param projectId - Project ID or name
* @param wikiId - Wiki ID or name
* @param pagePath - Path of the wiki page
* @param options - Additional options like version
* @returns The wiki page content and ETag
*/
async getPage(projectId: string, wikiId: string, pagePath: string) {
// Use the default project if not provided
const project = projectId || defaultProject;
// Ensure pagePath starts with a forward slash
const normalizedPath = pagePath.startsWith('/') ? pagePath : `/${pagePath}`;
// Construct the URL to get the wiki page
const url = `${this.baseUrl}/${project}/_apis/wiki/wikis/${wikiId}/pages`;
const params: Record<string, string> = {
'api-version': '7.1',
path: normalizedPath,
};
try {
// Get authorization header
const authHeader = await getAuthorizationHeader();
// Make the API request for plain text content
const response = await axios.get(url, {
params,
headers: {
Authorization: authHeader,
Accept: 'text/plain',
'Content-Type': 'application/json',
},
responseType: 'text',
});
// Return both the content and the ETag
return {
content: response.data,
eTag: response.headers.etag?.replace(/"/g, ''), // Remove quotes from ETag
};
} catch (error) {
const axiosError = error as AxiosError;
// Handle specific error cases
if (axiosError.response) {
const status = axiosError.response.status;
const errorMessage =
typeof axiosError.response.data === 'object' &&
axiosError.response.data
? (axiosError.response.data as AzureDevOpsApiErrorResponse)
.message || axiosError.message
: axiosError.message;
// Handle 404 Not Found
if (status === 404) {
throw new AzureDevOpsResourceNotFoundError(
`Wiki page not found: ${pagePath} in wiki ${wikiId}`,
);
}
// Handle 401 Unauthorized or 403 Forbidden
if (status === 401 || status === 403) {
throw new AzureDevOpsPermissionError(
`Permission denied to access wiki page: ${pagePath}`,
);
}
// Handle other error statuses
throw new AzureDevOpsError(
`Failed to get wiki page: ${errorMessage} ${axiosError.response?.data}`,
);
}
// Handle network errors
throw new AzureDevOpsError(
`Network error when getting wiki page: ${axiosError.message}`,
);
}
}
/**
* Creates a new wiki page with the provided content
* @param content - Content for the new wiki page
* @param projectId - Project ID or name
* @param wikiId - Wiki ID or name
* @param pagePath - Path of the wiki page to create
* @param options - Additional options like comment
* @returns The created wiki page
*/
async createPage(
content: string,
projectId: string,
wikiId: string,
pagePath: string,
options?: { comment?: string },
) {
// Use the default project if not provided
const project = projectId || defaultProject;
// Encode the page path, handling forward slashes properly
const encodedPagePath = encodeURIComponent(pagePath).replace(/%2F/g, '/');
// Construct the URL to create the wiki page
const url = `${this.baseUrl}/${project}/_apis/wiki/wikis/${wikiId}/pages`;
const params: Record<string, string> = {
'api-version': '7.1',
path: encodedPagePath,
};
// Prepare the request payload
const payload: Record<string, string> = {
content,
};
// Add comment if provided
if (options?.comment) {
payload.comment = options.comment;
}
try {
// Get authorization header
const authHeader = await getAuthorizationHeader();
// Make the API request
const response = await axios.put(url, payload, {
params,
headers: {
Authorization: authHeader,
'Content-Type': 'application/json',
Accept: 'application/json',
},
});
// The ETag header contains the version
const eTag = response.headers.etag;
// Return the page content along with metadata
return {
...response.data,
version: eTag ? eTag.replace(/"/g, '') : undefined, // Remove quotes from ETag
};
} catch (error) {
const axiosError = error as AxiosError;
// Handle specific error cases
if (axiosError.response) {
const status = axiosError.response.status;
const errorMessage =
typeof axiosError.response.data === 'object' &&
axiosError.response.data
? (axiosError.response.data as AzureDevOpsApiErrorResponse)
.message || axiosError.message
: axiosError.message;
// Handle 404 Not Found - usually means the parent path doesn't exist
if (status === 404) {
throw new AzureDevOpsResourceNotFoundError(
`Cannot create wiki page: parent path for ${pagePath} does not exist`,
);
}
// Handle 401 Unauthorized or 403 Forbidden
if (status === 401 || status === 403) {
throw new AzureDevOpsPermissionError(
`Permission denied to create wiki page: ${pagePath}`,
);
}
// Handle 412 Precondition Failed - page might already exist
if (status === 412) {
throw new AzureDevOpsValidationError(
`Wiki page already exists: ${pagePath}`,
);
}
// Handle 400 Bad Request - usually validation errors
if (status === 400) {
throw new AzureDevOpsValidationError(
`Invalid request when creating wiki page: ${errorMessage}`,
);
}
// Handle other error statuses
throw new AzureDevOpsError(
`Failed to create wiki page: ${errorMessage}`,
);
}
// Handle network errors
throw new AzureDevOpsError(
`Network error when creating wiki page: ${axiosError.message}`,
);
}
}
/**
* Updates a wiki page with the provided content
* @param content - Content for the wiki page
* @param projectId - Project ID or name
* @param wikiId - Wiki ID or name
* @param pagePath - Path of the wiki page
* @param options - Additional options like comment and version
* @returns The updated wiki page
*/
async updatePage(
content: WikiPageContent,
projectId: string,
wikiId: string,
pagePath: string,
options?: PageUpdateOptions,
) {
// Use the default project if not provided
const project = projectId || defaultProject;
// First get the current page version
let currentETag;
try {
const currentPage = await this.getPage(project, wikiId, pagePath);
currentETag = currentPage.eTag;
} catch (error) {
if (error instanceof AzureDevOpsResourceNotFoundError) {
// If page doesn't exist, we'll create it (no If-Match header needed)
currentETag = undefined;
} else {
throw error;
}
}
// Encode the page path, handling forward slashes properly
const encodedPagePath = encodeURIComponent(pagePath).replace(/%2F/g, '/');
// Construct the URL to update the wiki page
const url = `${this.baseUrl}/${project}/_apis/wiki/wikis/${wikiId}/pages`;
const params: Record<string, string> = {
'api-version': '7.1',
path: encodedPagePath,
};
// Add optional comment parameter if provided
if (options?.comment) {
params.comment = options.comment;
}
try {
// Get authorization header
const authHeader = await getAuthorizationHeader();
// Prepare request headers
const headers: Record<string, string> = {
Authorization: authHeader,
'Content-Type': 'application/json',
};
// Add If-Match header if we have an ETag (for updates)
if (currentETag) {
headers['If-Match'] = `"${currentETag}"`; // Wrap in quotes as required by API
}
// Create a properly typed payload
const payload: Record<string, string> = {
content: content.content,
};
// Make the API request
const response = await axios.put(url, payload, {
params,
headers,
});
// The ETag header contains the version
const eTag = response.headers.etag;
// Return the page content along with metadata
return {
...response.data,
version: eTag ? eTag.replace(/"/g, '') : undefined, // Remove quotes from ETag
message:
response.status === 201
? 'Page created successfully'
: 'Page updated successfully',
};
} catch (error) {
const axiosError = error as AxiosError;
// Handle specific error cases
if (axiosError.response) {
const status = axiosError.response.status;
const errorMessage =
typeof axiosError.response.data === 'object' &&
axiosError.response.data
? (axiosError.response.data as AzureDevOpsApiErrorResponse)
.message || axiosError.message
: axiosError.message;
// Handle 404 Not Found
if (status === 404) {
throw new AzureDevOpsResourceNotFoundError(
`Wiki page not found: ${pagePath} in wiki ${wikiId}`,
);
}
// Handle 401 Unauthorized or 403 Forbidden
if (status === 401 || status === 403) {
throw new AzureDevOpsPermissionError(
`Permission denied to update wiki page: ${pagePath}`,
);
}
// Handle 412 Precondition Failed (version conflict)
if (status === 412) {
throw new AzureDevOpsValidationError(
`Version conflict: The wiki page has been modified since you retrieved it. Please get the latest version and try again.`,
);
}
// Handle other error statuses
throw new AzureDevOpsError(
`Failed to update wiki page: ${errorMessage}`,
);
}
// Handle network errors
throw new AzureDevOpsError(
`Network error when updating wiki page: ${axiosError.message}`,
);
}
}
/**
* Lists wiki pages from a wiki using the Pages Batch API
* @param projectId - Project ID or name
* @param wikiId - Wiki ID or name
* @returns Array of wiki page summaries sorted by order then path
*/
async listWikiPages(
projectId: string,
wikiId: string,
): Promise<WikiPageSummary[]> {
// Use the default project if not provided
const project = projectId || defaultProject;
// Construct the URL for the Pages Batch API
const url = `${this.baseUrl}/${project}/_apis/wiki/wikis/${wikiId}/pagesbatch`;
const allPages: WikiPageSummary[] = [];
let continuationToken: string | undefined;
try {
// Get authorization header
const authHeader = await getAuthorizationHeader();
do {
// Prepare the request body
const requestBody: WikiPagesBatchRequest = {
top: 100,
...(continuationToken && { continuationToken }),
};
// Make the API request
const response = await axios.post<WikiPagesBatchResponse>(
url,
requestBody,
{
params: {
'api-version': '7.1',
},
headers: {
Authorization: authHeader,
'Content-Type': 'application/json',
},
},
);
// Add the pages from this batch to our collection
if (response.data.value && Array.isArray(response.data.value)) {
allPages.push(...response.data.value);
}
// Update continuation token for next iteration
continuationToken = response.data.continuationToken;
} while (continuationToken);
// Sort results by order then path
return allPages.sort((a, b) => {
// Handle optional order field
const aOrder = a.order ?? Number.MAX_SAFE_INTEGER;
const bOrder = b.order ?? Number.MAX_SAFE_INTEGER;
if (aOrder !== bOrder) {
return aOrder - bOrder;
}
return a.path.localeCompare(b.path);
});
} catch (error) {
const axiosError = error as AxiosError;
// Handle specific error cases
if (axiosError.response) {
const status = axiosError.response.status;
const errorMessage =
typeof axiosError.response.data === 'object' &&
axiosError.response.data
? (axiosError.response.data as AzureDevOpsApiErrorResponse)
.message || axiosError.message
: axiosError.message;
// Handle 404 Not Found
if (status === 404) {
throw new AzureDevOpsResourceNotFoundError(
`Wiki not found: ${wikiId} in project ${projectId}`,
);
}
// Handle 401 Unauthorized or 403 Forbidden
if (status === 401 || status === 403) {
throw new AzureDevOpsPermissionError(
`Permission denied to list wiki pages in wiki: ${wikiId}`,
);
}
// Handle other error statuses
throw new AzureDevOpsError(
`Failed to list wiki pages: ${errorMessage}`,
);
}
// Handle network errors
throw new AzureDevOpsError(
`Network error when listing wiki pages: ${axiosError.message}`,
);
}
}
}
/**
* Creates a Wiki client for Azure DevOps operations
* @param options - Options for creating the client
* @returns A Wiki client instance
*/
export async function getWikiClient(
options: ClientOptions,
): Promise<WikiClient> {
const { organizationId } = options;
return new WikiClient(organizationId || defaultOrg);
}
/**
* Get the authorization header for Azure DevOps API requests
* @returns The authorization header
*/
export async function getAuthorizationHeader(): Promise<string> {
try {
// For PAT authentication, we can construct the header directly
if (
process.env.AZURE_DEVOPS_AUTH_METHOD?.toLowerCase() === 'pat' &&
process.env.AZURE_DEVOPS_PAT
) {
// For PAT auth, we can construct the Basic auth header directly
const token = process.env.AZURE_DEVOPS_PAT;
const base64Token = Buffer.from(`:${token}`).toString('base64');
return `Basic ${base64Token}`;
}
// For Azure Identity / Azure CLI auth, we need to get a token
// using the Azure DevOps resource ID
// Choose the appropriate credential based on auth method
const credential =
process.env.AZURE_DEVOPS_AUTH_METHOD?.toLowerCase() === 'azure-cli'
? new AzureCliCredential()
: new DefaultAzureCredential();
// Azure DevOps resource ID for token acquisition
const AZURE_DEVOPS_RESOURCE_ID = '499b84ac-1321-427f-aa17-267ca6975798';
// Get token for Azure DevOps
const token = await credential.getToken(
`${AZURE_DEVOPS_RESOURCE_ID}/.default`,
);
if (!token || !token.token) {
throw new Error('Failed to acquire token for Azure DevOps');
}
return `Bearer ${token.token}`;
} catch (error) {
throw new AzureDevOpsValidationError(
`Failed to get authorization header: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
```