This is page 2 of 6. Use http://codebase.md/aashari/mcp-server-atlassian-bitbucket?lines=true&page={x} to view the full context. # Directory Structure ``` ├── .env.example ├── .github │ ├── dependabot.yml │ └── workflows │ ├── ci-dependabot-auto-merge.yml │ ├── ci-dependency-check.yml │ └── ci-semantic-release.yml ├── .gitignore ├── .gitkeep ├── .npmignore ├── .npmrc ├── .prettierrc ├── .releaserc.json ├── .trigger-ci ├── CHANGELOG.md ├── eslint.config.mjs ├── jest.setup.js ├── package-lock.json ├── package.json ├── README.md ├── scripts │ ├── ensure-executable.js │ ├── package.json │ └── update-version.js ├── src │ ├── cli │ │ ├── atlassian.diff.cli.ts │ │ ├── atlassian.pullrequests.cli.test.ts │ │ ├── atlassian.pullrequests.cli.ts │ │ ├── atlassian.repositories.cli.test.ts │ │ ├── atlassian.repositories.cli.ts │ │ ├── atlassian.search.cli.test.ts │ │ ├── atlassian.search.cli.ts │ │ ├── atlassian.workspaces.cli.test.ts │ │ ├── atlassian.workspaces.cli.ts │ │ └── index.ts │ ├── controllers │ │ ├── atlassian.diff.controller.ts │ │ ├── atlassian.diff.formatter.ts │ │ ├── atlassian.pullrequests.approve.controller.ts │ │ ├── atlassian.pullrequests.base.controller.ts │ │ ├── atlassian.pullrequests.comments.controller.ts │ │ ├── atlassian.pullrequests.controller.test.ts │ │ ├── atlassian.pullrequests.controller.ts │ │ ├── atlassian.pullrequests.create.controller.ts │ │ ├── atlassian.pullrequests.formatter.ts │ │ ├── atlassian.pullrequests.get.controller.ts │ │ ├── atlassian.pullrequests.list.controller.ts │ │ ├── atlassian.pullrequests.reject.controller.ts │ │ ├── atlassian.pullrequests.update.controller.ts │ │ ├── atlassian.repositories.branch.controller.ts │ │ ├── atlassian.repositories.commit.controller.ts │ │ ├── atlassian.repositories.content.controller.ts │ │ ├── atlassian.repositories.controller.test.ts │ │ ├── atlassian.repositories.details.controller.ts │ │ ├── atlassian.repositories.formatter.ts │ │ ├── atlassian.repositories.list.controller.ts │ │ ├── atlassian.search.code.controller.ts │ │ ├── atlassian.search.content.controller.ts │ │ ├── atlassian.search.controller.test.ts │ │ ├── atlassian.search.controller.ts │ │ ├── atlassian.search.formatter.ts │ │ ├── atlassian.search.pullrequests.controller.ts │ │ ├── atlassian.search.repositories.controller.ts │ │ ├── atlassian.workspaces.controller.test.ts │ │ ├── atlassian.workspaces.controller.ts │ │ └── atlassian.workspaces.formatter.ts │ ├── index.ts │ ├── services │ │ ├── vendor.atlassian.pullrequests.service.ts │ │ ├── vendor.atlassian.pullrequests.test.ts │ │ ├── vendor.atlassian.pullrequests.types.ts │ │ ├── vendor.atlassian.repositories.diff.service.ts │ │ ├── vendor.atlassian.repositories.diff.types.ts │ │ ├── vendor.atlassian.repositories.service.test.ts │ │ ├── vendor.atlassian.repositories.service.ts │ │ ├── vendor.atlassian.repositories.types.ts │ │ ├── vendor.atlassian.search.service.ts │ │ ├── vendor.atlassian.search.types.ts │ │ ├── vendor.atlassian.workspaces.service.ts │ │ ├── vendor.atlassian.workspaces.test.ts │ │ └── vendor.atlassian.workspaces.types.ts │ ├── tools │ │ ├── atlassian.diff.tool.ts │ │ ├── atlassian.diff.types.ts │ │ ├── atlassian.pullrequests.tool.ts │ │ ├── atlassian.pullrequests.types.test.ts │ │ ├── atlassian.pullrequests.types.ts │ │ ├── atlassian.repositories.tool.ts │ │ ├── atlassian.repositories.types.ts │ │ ├── atlassian.search.tool.ts │ │ ├── atlassian.search.types.ts │ │ ├── atlassian.workspaces.tool.ts │ │ └── atlassian.workspaces.types.ts │ ├── types │ │ └── common.types.ts │ └── utils │ ├── adf.util.test.ts │ ├── adf.util.ts │ ├── atlassian.util.ts │ ├── bitbucket-error-detection.test.ts │ ├── cli.test.util.ts │ ├── config.util.test.ts │ ├── config.util.ts │ ├── constants.util.ts │ ├── defaults.util.ts │ ├── diff.util.ts │ ├── error-handler.util.test.ts │ ├── error-handler.util.ts │ ├── error.util.test.ts │ ├── error.util.ts │ ├── formatter.util.ts │ ├── logger.util.ts │ ├── markdown.util.test.ts │ ├── markdown.util.ts │ ├── pagination.util.ts │ ├── path.util.test.ts │ ├── path.util.ts │ ├── query.util.ts │ ├── shell.util.ts │ ├── transport.util.test.ts │ ├── transport.util.ts │ └── workspace.util.ts ├── STYLE_GUIDE.md └── tsconfig.json ``` # Files -------------------------------------------------------------------------------- /src/utils/adf.util.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { adfToMarkdown } from './adf.util.js'; 2 | 3 | describe('ADF Utility', () => { 4 | describe('adfToMarkdown', () => { 5 | it('should handle empty or undefined input', () => { 6 | expect(adfToMarkdown(null)).toBe(''); 7 | expect(adfToMarkdown(undefined)).toBe(''); 8 | expect(adfToMarkdown('')).toBe(''); 9 | }); 10 | 11 | it('should handle non-ADF string input', () => { 12 | expect(adfToMarkdown('plain text')).toBe('plain text'); 13 | }); 14 | 15 | it('should convert basic paragraph', () => { 16 | const adf = { 17 | type: 'doc', 18 | version: 1, 19 | content: [ 20 | { 21 | type: 'paragraph', 22 | content: [ 23 | { 24 | type: 'text', 25 | text: 'This is a paragraph', 26 | }, 27 | ], 28 | }, 29 | ], 30 | }; 31 | 32 | expect(adfToMarkdown(adf)).toBe('This is a paragraph'); 33 | }); 34 | 35 | it('should convert multiple paragraphs', () => { 36 | const adf = { 37 | type: 'doc', 38 | version: 1, 39 | content: [ 40 | { 41 | type: 'paragraph', 42 | content: [ 43 | { 44 | type: 'text', 45 | text: 'First paragraph', 46 | }, 47 | ], 48 | }, 49 | { 50 | type: 'paragraph', 51 | content: [ 52 | { 53 | type: 'text', 54 | text: 'Second paragraph', 55 | }, 56 | ], 57 | }, 58 | ], 59 | }; 60 | 61 | expect(adfToMarkdown(adf)).toBe( 62 | 'First paragraph\n\nSecond paragraph', 63 | ); 64 | }); 65 | 66 | it('should convert headings', () => { 67 | const adf = { 68 | type: 'doc', 69 | version: 1, 70 | content: [ 71 | { 72 | type: 'heading', 73 | attrs: { level: 1 }, 74 | content: [ 75 | { 76 | type: 'text', 77 | text: 'Heading 1', 78 | }, 79 | ], 80 | }, 81 | { 82 | type: 'heading', 83 | attrs: { level: 2 }, 84 | content: [ 85 | { 86 | type: 'text', 87 | text: 'Heading 2', 88 | }, 89 | ], 90 | }, 91 | ], 92 | }; 93 | 94 | expect(adfToMarkdown(adf)).toBe('# Heading 1\n\n## Heading 2'); 95 | }); 96 | 97 | it('should convert text with marks', () => { 98 | const adf = { 99 | type: 'doc', 100 | version: 1, 101 | content: [ 102 | { 103 | type: 'paragraph', 104 | content: [ 105 | { 106 | type: 'text', 107 | text: 'Bold', 108 | marks: [{ type: 'strong' }], 109 | }, 110 | { 111 | type: 'text', 112 | text: ' and ', 113 | }, 114 | { 115 | type: 'text', 116 | text: 'italic', 117 | marks: [{ type: 'em' }], 118 | }, 119 | { 120 | type: 'text', 121 | text: ' and ', 122 | }, 123 | { 124 | type: 'text', 125 | text: 'code', 126 | marks: [{ type: 'code' }], 127 | }, 128 | ], 129 | }, 130 | ], 131 | }; 132 | 133 | expect(adfToMarkdown(adf)).toBe('**Bold** and *italic* and `code`'); 134 | }); 135 | 136 | it('should convert bullet lists', () => { 137 | const adf = { 138 | type: 'doc', 139 | version: 1, 140 | content: [ 141 | { 142 | type: 'bulletList', 143 | content: [ 144 | { 145 | type: 'listItem', 146 | content: [ 147 | { 148 | type: 'paragraph', 149 | content: [ 150 | { 151 | type: 'text', 152 | text: 'Item 1', 153 | }, 154 | ], 155 | }, 156 | ], 157 | }, 158 | { 159 | type: 'listItem', 160 | content: [ 161 | { 162 | type: 'paragraph', 163 | content: [ 164 | { 165 | type: 'text', 166 | text: 'Item 2', 167 | }, 168 | ], 169 | }, 170 | ], 171 | }, 172 | ], 173 | }, 174 | ], 175 | }; 176 | 177 | expect(adfToMarkdown(adf)).toBe('- Item 1\n- Item 2'); 178 | }); 179 | 180 | it('should convert code blocks', () => { 181 | const adf = { 182 | type: 'doc', 183 | version: 1, 184 | content: [ 185 | { 186 | type: 'codeBlock', 187 | attrs: { language: 'javascript' }, 188 | content: [ 189 | { 190 | type: 'text', 191 | text: 'const x = 1;', 192 | }, 193 | ], 194 | }, 195 | ], 196 | }; 197 | 198 | expect(adfToMarkdown(adf)).toBe('```javascript\nconst x = 1;\n```'); 199 | }); 200 | 201 | it('should convert links', () => { 202 | const adf = { 203 | type: 'doc', 204 | version: 1, 205 | content: [ 206 | { 207 | type: 'paragraph', 208 | content: [ 209 | { 210 | type: 'text', 211 | text: 'Visit', 212 | }, 213 | { 214 | type: 'text', 215 | text: ' Atlassian', 216 | marks: [ 217 | { 218 | type: 'link', 219 | attrs: { 220 | href: 'https://atlassian.com', 221 | }, 222 | }, 223 | ], 224 | }, 225 | ], 226 | }, 227 | ], 228 | }; 229 | 230 | expect(adfToMarkdown(adf)).toBe( 231 | 'Visit[ Atlassian](https://atlassian.com)', 232 | ); 233 | }); 234 | }); 235 | }); 236 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.pullrequests.base.controller.ts: -------------------------------------------------------------------------------- ```typescript 1 | import atlassianPullRequestsService from '../services/vendor.atlassian.pullrequests.service.js'; 2 | import { Logger } from '../utils/logger.util.js'; 3 | import { handleControllerError } from '../utils/error-handler.util.js'; 4 | import { 5 | extractPaginationInfo, 6 | PaginationType, 7 | } from '../utils/pagination.util.js'; 8 | import { formatPagination } from '../utils/formatter.util.js'; 9 | import { 10 | formatPullRequestsList, 11 | formatPullRequestDetails, 12 | formatPullRequestComments, 13 | } from './atlassian.pullrequests.formatter.js'; 14 | import { 15 | PullRequestComment, 16 | PullRequestCommentsResponse, 17 | } from '../services/vendor.atlassian.pullrequests.types.js'; 18 | import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js'; 19 | import { extractDiffSnippet } from '../utils/diff.util.js'; 20 | import { optimizeBitbucketMarkdown } from '../utils/formatter.util.js'; 21 | import { getDefaultWorkspace } from '../utils/workspace.util.js'; 22 | 23 | /** 24 | * Base controller for managing Bitbucket pull requests. 25 | * Contains shared utilities and types used by the specific PR controller files. 26 | * 27 | * NOTE ON MARKDOWN HANDLING: 28 | * Unlike Jira (which uses ADF) or Confluence (which uses a mix of formats), 29 | * Bitbucket Cloud API natively accepts Markdown for text content in both directions: 30 | * - When sending data TO the API (comments, PR descriptions) 31 | * - When receiving data FROM the API (PR descriptions, comments) 32 | * 33 | * The API expects content in the format: { content: { raw: "markdown-text" } } 34 | * 35 | * We use optimizeBitbucketMarkdown() to address specific rendering quirks in 36 | * Bitbucket's markdown renderer but it does NOT perform format conversion. 37 | * See formatter.util.ts for details on the specific issues it addresses. 38 | */ 39 | 40 | // Define an extended type for internal use within the controller/formatter 41 | // to include the code snippet. 42 | export interface PullRequestCommentWithSnippet extends PullRequestComment { 43 | codeSnippet?: string; 44 | } 45 | 46 | // Define a service-specific type for listing comments 47 | export type ListCommentsParams = { 48 | workspace: string; 49 | repo_slug: string; 50 | pull_request_id: number; 51 | pagelen?: number; 52 | page?: number; 53 | }; 54 | 55 | // Define a service-specific type for creating comments 56 | export type CreateCommentParams = { 57 | workspace: string; 58 | repo_slug: string; 59 | pull_request_id: number; 60 | content: { 61 | raw: string; 62 | }; 63 | inline?: { 64 | path: string; 65 | to?: number; 66 | }; 67 | parent?: { 68 | id: number; 69 | }; 70 | }; 71 | 72 | // Helper function to enhance comments with code snippets 73 | export async function enhanceCommentsWithSnippets( 74 | commentsData: PullRequestCommentsResponse, 75 | controllerMethodName: string, // To contextualize logs 76 | ): Promise<PullRequestCommentWithSnippet[]> { 77 | const methodLogger = Logger.forContext( 78 | `controllers/atlassian.pullrequests.base.controller.ts`, 79 | controllerMethodName, // Use provided method name for logger context 80 | ); 81 | const commentsWithSnippets: PullRequestCommentWithSnippet[] = []; 82 | 83 | if (!commentsData.values || commentsData.values.length === 0) { 84 | return []; 85 | } 86 | 87 | for (const comment of commentsData.values) { 88 | let snippet = undefined; 89 | if ( 90 | comment.inline && 91 | comment.links?.code?.href && 92 | comment.inline.to !== undefined 93 | ) { 94 | try { 95 | methodLogger.debug( 96 | `Fetching diff for inline comment ${comment.id} from ${comment.links.code.href}`, 97 | ); 98 | const diffContent = 99 | await atlassianPullRequestsService.getDiffForUrl( 100 | comment.links.code.href, 101 | ); 102 | snippet = extractDiffSnippet(diffContent, comment.inline.to); 103 | methodLogger.debug( 104 | `Extracted snippet for comment ${comment.id} (length: ${snippet?.length})`, 105 | ); 106 | } catch (snippetError) { 107 | methodLogger.warn( 108 | `Failed to fetch or parse snippet for comment ${comment.id}:`, 109 | snippetError, 110 | ); 111 | // Continue without snippet if fetching/parsing fails 112 | } 113 | } 114 | commentsWithSnippets.push({ ...comment, codeSnippet: snippet }); 115 | } 116 | return commentsWithSnippets; 117 | } 118 | 119 | export { 120 | atlassianPullRequestsService, 121 | Logger, 122 | handleControllerError, 123 | extractPaginationInfo, 124 | PaginationType, 125 | formatPagination, 126 | formatPullRequestsList, 127 | formatPullRequestDetails, 128 | formatPullRequestComments, 129 | DEFAULT_PAGE_SIZE, 130 | applyDefaults, 131 | optimizeBitbucketMarkdown, 132 | getDefaultWorkspace, 133 | }; 134 | ``` -------------------------------------------------------------------------------- /src/utils/bitbucket-error-detection.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { describe, expect, test } from '@jest/globals'; 2 | import { detectErrorType, ErrorCode } from './error-handler.util.js'; 3 | import { createApiError } from './error.util.js'; 4 | 5 | describe('Bitbucket Error Detection', () => { 6 | describe('Classic Bitbucket error structure: { error: { message, detail } }', () => { 7 | test('detects not found errors', () => { 8 | // Create a mock Bitbucket error structure 9 | const bitbucketError = { 10 | error: { 11 | message: 'Repository not found', 12 | detail: 'The repository does not exist or you do not have access', 13 | }, 14 | }; 15 | const mcpError = createApiError('API Error', 404, bitbucketError); 16 | 17 | const result = detectErrorType(mcpError); 18 | expect(result).toEqual({ 19 | code: ErrorCode.NOT_FOUND, 20 | statusCode: 404, 21 | }); 22 | }); 23 | 24 | test('detects access denied errors', () => { 25 | const bitbucketError = { 26 | error: { 27 | message: 'Access denied to this repository', 28 | detail: 'You need admin permissions to perform this action', 29 | }, 30 | }; 31 | const mcpError = createApiError('API Error', 403, bitbucketError); 32 | 33 | const result = detectErrorType(mcpError); 34 | expect(result).toEqual({ 35 | code: ErrorCode.ACCESS_DENIED, 36 | statusCode: 403, 37 | }); 38 | }); 39 | 40 | test('detects validation errors', () => { 41 | const bitbucketError = { 42 | error: { 43 | message: 'Invalid parameter: repository name', 44 | detail: 'Repository name can only contain alphanumeric characters', 45 | }, 46 | }; 47 | const mcpError = createApiError('API Error', 400, bitbucketError); 48 | 49 | const result = detectErrorType(mcpError); 50 | expect(result).toEqual({ 51 | code: ErrorCode.VALIDATION_ERROR, 52 | statusCode: 400, 53 | }); 54 | }); 55 | 56 | test('detects rate limit errors', () => { 57 | const bitbucketError = { 58 | error: { 59 | message: 'Too many requests', 60 | detail: 'Rate limit exceeded. Try again later.', 61 | }, 62 | }; 63 | const mcpError = createApiError('API Error', 429, bitbucketError); 64 | 65 | const result = detectErrorType(mcpError); 66 | expect(result).toEqual({ 67 | code: ErrorCode.RATE_LIMIT_ERROR, 68 | statusCode: 429, 69 | }); 70 | }); 71 | }); 72 | 73 | describe('Alternate Bitbucket error structure: { type: "error", ... }', () => { 74 | test('detects not found errors', () => { 75 | const altBitbucketError = { 76 | type: 'error', 77 | status: 404, 78 | message: 'Resource not found', 79 | }; 80 | const mcpError = createApiError( 81 | 'API Error', 82 | 404, 83 | altBitbucketError, 84 | ); 85 | 86 | const result = detectErrorType(mcpError); 87 | expect(result).toEqual({ 88 | code: ErrorCode.NOT_FOUND, 89 | statusCode: 404, 90 | }); 91 | }); 92 | 93 | test('detects access denied errors', () => { 94 | const altBitbucketError = { 95 | type: 'error', 96 | status: 403, 97 | message: 'Forbidden', 98 | }; 99 | const mcpError = createApiError( 100 | 'API Error', 101 | 403, 102 | altBitbucketError, 103 | ); 104 | 105 | const result = detectErrorType(mcpError); 106 | expect(result).toEqual({ 107 | code: ErrorCode.ACCESS_DENIED, 108 | statusCode: 403, 109 | }); 110 | }); 111 | }); 112 | 113 | describe('Bitbucket errors array structure: { errors: [{ ... }] }', () => { 114 | test('detects errors from array structure', () => { 115 | const arrayBitbucketError = { 116 | errors: [ 117 | { 118 | status: 400, 119 | code: 'INVALID_REQUEST_PARAMETER', 120 | title: 'Invalid parameter value', 121 | message: 'The parameter is not valid', 122 | }, 123 | ], 124 | }; 125 | const mcpError = createApiError( 126 | 'API Error', 127 | 400, 128 | arrayBitbucketError, 129 | ); 130 | 131 | const result = detectErrorType(mcpError); 132 | expect(result).toEqual({ 133 | code: ErrorCode.VALIDATION_ERROR, 134 | statusCode: 400, 135 | }); 136 | }); 137 | }); 138 | 139 | describe('Network errors in Bitbucket context', () => { 140 | test('detects network errors from TypeError', () => { 141 | const networkError = new TypeError('Failed to fetch'); 142 | const mcpError = createApiError('Network Error', 500, networkError); 143 | 144 | const result = detectErrorType(mcpError); 145 | expect(result).toEqual({ 146 | code: ErrorCode.NETWORK_ERROR, 147 | statusCode: 500, 148 | }); 149 | }); 150 | 151 | test('detects other common network error messages', () => { 152 | const errorMessages = [ 153 | 'network error occurred', 154 | 'ECONNREFUSED', 155 | 'ENOTFOUND', 156 | 'Network request failed', 157 | 'Failed to fetch', 158 | ]; 159 | 160 | errorMessages.forEach((msg) => { 161 | const error = new Error(msg); 162 | const result = detectErrorType(error); 163 | expect(result).toEqual({ 164 | code: ErrorCode.NETWORK_ERROR, 165 | statusCode: 500, 166 | }); 167 | }); 168 | }); 169 | }); 170 | }); 171 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.workspaces.formatter.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { 2 | WorkspaceDetailed, 3 | WorkspacePermissionsResponse, 4 | WorkspaceMembership, 5 | } from '../services/vendor.atlassian.workspaces.types.js'; 6 | import { 7 | formatUrl, 8 | formatHeading, 9 | formatBulletList, 10 | formatSeparator, 11 | formatNumberedList, 12 | formatDate, 13 | } from '../utils/formatter.util.js'; 14 | 15 | /** 16 | * Format a list of workspaces for display 17 | * @param workspacesData - Raw workspaces data from the API 18 | * @returns Formatted string with workspaces information in markdown format 19 | */ 20 | export function formatWorkspacesList( 21 | workspacesData: WorkspacePermissionsResponse, 22 | ): string { 23 | const workspaces = workspacesData.values || []; 24 | 25 | if (workspaces.length === 0) { 26 | return 'No workspaces found matching your criteria.'; 27 | } 28 | 29 | const lines: string[] = [formatHeading('Bitbucket Workspaces', 1), '']; 30 | 31 | // Format each workspace with its details 32 | const formattedList = formatNumberedList( 33 | workspaces, 34 | (membership, index) => { 35 | const workspace = membership.workspace; 36 | const itemLines: string[] = []; 37 | itemLines.push(formatHeading(workspace.name, 2)); 38 | 39 | // Basic information 40 | const properties: Record<string, unknown> = { 41 | UUID: workspace.uuid, 42 | Slug: workspace.slug, 43 | 'Permission Level': membership.permission || 'Unknown', 44 | 'Last Accessed': membership.last_accessed 45 | ? formatDate(new Date(membership.last_accessed)) 46 | : 'N/A', 47 | 'Added On': membership.added_on 48 | ? formatDate(new Date(membership.added_on)) 49 | : 'N/A', 50 | 'Web URL': workspace.links?.html?.href 51 | ? formatUrl(workspace.links.html.href, workspace.slug) 52 | : formatUrl( 53 | `https://bitbucket.org/${workspace.slug}/`, 54 | workspace.slug, 55 | ), 56 | User: 57 | membership.user?.display_name || 58 | membership.user?.nickname || 59 | 'Unknown', 60 | }; 61 | 62 | // Format as a bullet list 63 | itemLines.push(formatBulletList(properties, (key) => key)); 64 | 65 | // Add separator between workspaces except for the last one 66 | if (index < workspaces.length - 1) { 67 | itemLines.push(''); 68 | itemLines.push(formatSeparator()); 69 | } 70 | 71 | return itemLines.join('\n'); 72 | }, 73 | ); 74 | 75 | lines.push(formattedList); 76 | 77 | // Add standard footer with timestamp 78 | lines.push('\n\n' + formatSeparator()); 79 | lines.push(`*Information retrieved at: ${formatDate(new Date())}*`); 80 | 81 | return lines.join('\n'); 82 | } 83 | 84 | /** 85 | * Format detailed workspace information for display 86 | * @param workspace - Raw workspace data from the API 87 | * @param membership - Optional membership information for the workspace 88 | * @returns Formatted string with workspace details in markdown format 89 | */ 90 | export function formatWorkspaceDetails( 91 | workspace: WorkspaceDetailed, 92 | membership?: WorkspaceMembership, 93 | ): string { 94 | const lines: string[] = [ 95 | formatHeading(`Workspace: ${workspace.name}`, 1), 96 | '', 97 | formatHeading('Basic Information', 2), 98 | ]; 99 | 100 | // Format basic information as a bullet list 101 | const basicProperties: Record<string, unknown> = { 102 | UUID: workspace.uuid, 103 | Slug: workspace.slug, 104 | Type: workspace.type || 'Not specified', 105 | 'Created On': workspace.created_on 106 | ? formatDate(workspace.created_on) 107 | : 'N/A', 108 | }; 109 | 110 | lines.push(formatBulletList(basicProperties, (key) => key)); 111 | 112 | // Add membership information if available 113 | if (membership) { 114 | lines.push(''); 115 | lines.push(formatHeading('Your Membership', 2)); 116 | 117 | const membershipProperties: Record<string, unknown> = { 118 | Permission: membership.permission, 119 | 'Last Accessed': membership.last_accessed 120 | ? formatDate(membership.last_accessed) 121 | : 'N/A', 122 | 'Added On': membership.added_on 123 | ? formatDate(membership.added_on) 124 | : 'N/A', 125 | }; 126 | 127 | lines.push(formatBulletList(membershipProperties, (key) => key)); 128 | } 129 | 130 | // Add links 131 | lines.push(''); 132 | lines.push(formatHeading('Links', 2)); 133 | 134 | const links: string[] = []; 135 | 136 | if (workspace.links.html?.href) { 137 | links.push( 138 | `- ${formatUrl(workspace.links.html.href, 'View in Browser')}`, 139 | ); 140 | } 141 | if (workspace.links.repositories?.href) { 142 | links.push( 143 | `- ${formatUrl(workspace.links.repositories.href, 'Repositories')}`, 144 | ); 145 | } 146 | if (workspace.links.projects?.href) { 147 | links.push(`- ${formatUrl(workspace.links.projects.href, 'Projects')}`); 148 | } 149 | if (workspace.links.snippets?.href) { 150 | links.push(`- ${formatUrl(workspace.links.snippets.href, 'Snippets')}`); 151 | } 152 | 153 | lines.push(links.join('\n')); 154 | 155 | // Add standard footer with timestamp 156 | lines.push('\n\n' + formatSeparator()); 157 | lines.push(`*Information retrieved at: ${formatDate(new Date())}*`); 158 | 159 | return lines.join('\n'); 160 | } 161 | ``` -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- ```json 1 | { 2 | "name": "@aashari/mcp-server-atlassian-bitbucket", 3 | "version": "1.45.0", 4 | "description": "Node.js/TypeScript MCP server for Atlassian Bitbucket. Enables AI systems (LLMs) to interact with workspaces, repositories, and pull requests via tools (list, get, comment, search). Connects AI directly to version control workflows through the standard MCP interface.", 5 | "main": "dist/index.js", 6 | "types": "dist/index.d.ts", 7 | "type": "commonjs", 8 | "repository": { 9 | "type": "git", 10 | "url": "https://github.com/aashari/mcp-server-atlassian-bitbucket.git" 11 | }, 12 | "bin": { 13 | "mcp-atlassian-bitbucket": "./dist/index.js" 14 | }, 15 | "scripts": { 16 | "build": "tsc", 17 | "prepare": "npm run build && node scripts/ensure-executable.js", 18 | "postinstall": "node scripts/ensure-executable.js", 19 | "test": "jest", 20 | "test:coverage": "jest --coverage", 21 | "test:cli": "jest src/cli/.*\\.cli\\.test\\.ts --runInBand --testTimeout=60000", 22 | "lint": "eslint src --ext .ts --config eslint.config.mjs", 23 | "format": "prettier --write 'src/**/*.ts' 'scripts/**/*.js'", 24 | "publish:npm": "npm publish", 25 | "update:check": "npx npm-check-updates", 26 | "update:deps": "npx npm-check-updates -u && npm install --legacy-peer-deps", 27 | "update:version": "node scripts/update-version.js", 28 | "mcp:stdio": "TRANSPORT_MODE=stdio npm run build && node dist/index.js", 29 | "mcp:http": "TRANSPORT_MODE=http npm run build && node dist/index.js", 30 | "mcp:inspect": "TRANSPORT_MODE=http npm run build && (node dist/index.js &) && sleep 2 && npx @modelcontextprotocol/inspector http://localhost:3000/mcp", 31 | "dev:stdio": "npm run build && npx @modelcontextprotocol/inspector -e TRANSPORT_MODE=stdio -e DEBUG=true node dist/index.js", 32 | "dev:http": "DEBUG=true TRANSPORT_MODE=http npm run build && node dist/index.js", 33 | "dev:server": "DEBUG=true npm run build && npx @modelcontextprotocol/inspector -e DEBUG=true node dist/index.js", 34 | "dev:cli": "DEBUG=true npm run build && DEBUG=true node dist/index.js", 35 | "start:server": "npm run build && npx @modelcontextprotocol/inspector node dist/index.js", 36 | "start:cli": "npm run build && node dist/index.js" 37 | }, 38 | "keywords": [ 39 | "mcp", 40 | "typescript", 41 | "claude", 42 | "anthropic", 43 | "ai", 44 | "atlassian", 45 | "bitbucket", 46 | "repository", 47 | "version-control", 48 | "pull-request", 49 | "server", 50 | "model-context-protocol", 51 | "tools", 52 | "resources", 53 | "tooling", 54 | "ai-integration", 55 | "mcp-server", 56 | "llm", 57 | "ai-connector", 58 | "external-tools", 59 | "cli", 60 | "mcp-inspector" 61 | ], 62 | "author": "", 63 | "license": "ISC", 64 | "devDependencies": { 65 | "@eslint/js": "^9.35.0", 66 | "@semantic-release/changelog": "^6.0.3", 67 | "@semantic-release/exec": "^7.1.0", 68 | "@semantic-release/git": "^10.0.1", 69 | "@semantic-release/github": "^11.0.5", 70 | "@semantic-release/npm": "^12.0.2", 71 | "@types/cors": "^2.8.19", 72 | "@types/express": "^5.0.3", 73 | "@types/jest": "^30.0.0", 74 | "@types/node": "^24.3.1", 75 | "@types/turndown": "^5.0.5", 76 | "@typescript-eslint/eslint-plugin": "^8.43.0", 77 | "@typescript-eslint/parser": "^8.43.0", 78 | "eslint": "^9.35.0", 79 | "eslint-config-prettier": "^10.1.8", 80 | "eslint-plugin-filenames": "^1.3.2", 81 | "eslint-plugin-prettier": "^5.5.4", 82 | "jest": "^30.1.3", 83 | "nodemon": "^3.1.10", 84 | "npm-check-updates": "^18.1.0", 85 | "prettier": "^3.6.2", 86 | "semantic-release": "^24.2.7", 87 | "ts-jest": "^29.4.1", 88 | "ts-node": "^10.9.2", 89 | "typescript": "^5.9.2", 90 | "typescript-eslint": "^8.43.0" 91 | }, 92 | "publishConfig": { 93 | "registry": "https://registry.npmjs.org/", 94 | "access": "public" 95 | }, 96 | "dependencies": { 97 | "@modelcontextprotocol/sdk": "^1.17.5", 98 | "commander": "^14.0.0", 99 | "cors": "^2.8.5", 100 | "dotenv": "^17.2.2", 101 | "express": "^5.1.0", 102 | "turndown": "^7.2.1", 103 | "zod": "^3.25.76" 104 | }, 105 | "directories": { 106 | "example": "examples" 107 | }, 108 | "jest": { 109 | "preset": "ts-jest", 110 | "testEnvironment": "node", 111 | "setupFilesAfterEnv": [ 112 | "<rootDir>/jest.setup.js" 113 | ], 114 | "testMatch": [ 115 | "**/src/**/*.test.ts" 116 | ], 117 | "collectCoverageFrom": [ 118 | "src/**/*.ts", 119 | "!src/**/*.test.ts", 120 | "!src/**/*.spec.ts" 121 | ], 122 | "coveragePathIgnorePatterns": [ 123 | "/node_modules/", 124 | "/dist/", 125 | "/coverage/" 126 | ], 127 | "coverageReporters": [ 128 | "text", 129 | "lcov", 130 | "json-summary" 131 | ], 132 | "transform": { 133 | "^.+\\.tsx?$": [ 134 | "ts-jest", 135 | { 136 | "useESM": true 137 | } 138 | ] 139 | }, 140 | "moduleNameMapper": { 141 | "(.*)\\.(js|jsx)$": "$1" 142 | }, 143 | "extensionsToTreatAsEsm": [ 144 | ".ts" 145 | ], 146 | "moduleFileExtensions": [ 147 | "ts", 148 | "tsx", 149 | "js", 150 | "jsx", 151 | "json", 152 | "node" 153 | ] 154 | }, 155 | "engines": { 156 | "node": ">=18.0.0" 157 | } 158 | } 159 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.pullrequests.controller.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { ControllerResponse } from '../types/common.types.js'; 2 | import { 3 | ListPullRequestsToolArgsType, 4 | GetPullRequestToolArgsType, 5 | ListPullRequestCommentsToolArgsType, 6 | CreatePullRequestCommentToolArgsType, 7 | CreatePullRequestToolArgsType, 8 | UpdatePullRequestToolArgsType, 9 | ApprovePullRequestToolArgsType, 10 | RejectPullRequestToolArgsType, 11 | } from '../tools/atlassian.pullrequests.types.js'; 12 | 13 | import listController from './atlassian.pullrequests.list.controller.js'; 14 | import getController from './atlassian.pullrequests.get.controller.js'; 15 | import commentsController from './atlassian.pullrequests.comments.controller.js'; 16 | import createController from './atlassian.pullrequests.create.controller.js'; 17 | import updateController from './atlassian.pullrequests.update.controller.js'; 18 | import approveController from './atlassian.pullrequests.approve.controller.js'; 19 | import rejectController from './atlassian.pullrequests.reject.controller.js'; 20 | 21 | /** 22 | * Controller for managing Bitbucket pull requests. 23 | * Provides functionality for listing, retrieving, and creating pull requests and comments. 24 | * 25 | * NOTE ON MARKDOWN HANDLING: 26 | * Unlike Jira (which uses ADF) or Confluence (which uses a mix of formats), 27 | * Bitbucket Cloud API natively accepts Markdown for text content in both directions: 28 | * - When sending data TO the API (comments, PR descriptions) 29 | * - When receiving data FROM the API (PR descriptions, comments) 30 | * 31 | * The API expects content in the format: { content: { raw: "markdown-text" } } 32 | * 33 | * We use optimizeBitbucketMarkdown() to address specific rendering quirks in 34 | * Bitbucket's markdown renderer but it does NOT perform format conversion. 35 | * See formatter.util.ts for details on the specific issues it addresses. 36 | */ 37 | 38 | /** 39 | * List Bitbucket pull requests with optional filtering options 40 | * @param options - Options for listing pull requests including workspace slug and repo slug 41 | * @returns Promise with formatted pull requests list content and pagination information 42 | */ 43 | async function list( 44 | options: ListPullRequestsToolArgsType, 45 | ): Promise<ControllerResponse> { 46 | return listController.list(options); 47 | } 48 | 49 | /** 50 | * Get detailed information about a specific Bitbucket pull request 51 | * @param options - Options including workspace slug, repo slug, and pull request ID 52 | * @returns Promise with formatted pull request details as Markdown content 53 | */ 54 | async function get( 55 | options: GetPullRequestToolArgsType, 56 | ): Promise<ControllerResponse> { 57 | return getController.get(options); 58 | } 59 | 60 | /** 61 | * List comments on a Bitbucket pull request 62 | * @param options - Options including workspace slug, repo slug, and pull request ID 63 | * @returns Promise with formatted pull request comments as Markdown content 64 | */ 65 | async function listComments( 66 | options: ListPullRequestCommentsToolArgsType, 67 | ): Promise<ControllerResponse> { 68 | return commentsController.listComments(options); 69 | } 70 | 71 | /** 72 | * Add a comment to a Bitbucket pull request 73 | * @param options - Options including workspace slug, repo slug, PR ID, and comment content 74 | * @returns Promise with a success message as content 75 | */ 76 | async function addComment( 77 | options: CreatePullRequestCommentToolArgsType, 78 | ): Promise<ControllerResponse> { 79 | return commentsController.addComment(options); 80 | } 81 | 82 | /** 83 | * Create a new pull request in Bitbucket 84 | * @param options - Options including workspace slug, repo slug, source branch, target branch, title, etc. 85 | * @returns Promise with formatted pull request details as Markdown content 86 | */ 87 | async function add( 88 | options: CreatePullRequestToolArgsType, 89 | ): Promise<ControllerResponse> { 90 | return createController.add(options); 91 | } 92 | 93 | /** 94 | * Update an existing pull request in Bitbucket 95 | * @param options - Options including workspace slug, repo slug, pull request ID, title, and description 96 | * @returns Promise with formatted updated pull request details as Markdown content 97 | */ 98 | async function update( 99 | options: UpdatePullRequestToolArgsType, 100 | ): Promise<ControllerResponse> { 101 | return updateController.update(options); 102 | } 103 | 104 | /** 105 | * Approve a pull request in Bitbucket 106 | * @param options - Options including workspace slug, repo slug, and pull request ID 107 | * @returns Promise with formatted approval confirmation as Markdown content 108 | */ 109 | async function approve( 110 | options: ApprovePullRequestToolArgsType, 111 | ): Promise<ControllerResponse> { 112 | return approveController.approve(options); 113 | } 114 | 115 | /** 116 | * Request changes on a pull request in Bitbucket 117 | * @param options - Options including workspace slug, repo slug, and pull request ID 118 | * @returns Promise with formatted rejection confirmation as Markdown content 119 | */ 120 | async function reject( 121 | options: RejectPullRequestToolArgsType, 122 | ): Promise<ControllerResponse> { 123 | return rejectController.reject(options); 124 | } 125 | 126 | // Export the controller functions 127 | export default { 128 | list, 129 | get, 130 | listComments, 131 | addComment, 132 | add, 133 | update, 134 | approve, 135 | reject, 136 | }; 137 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.workspaces.controller.ts: -------------------------------------------------------------------------------- ```typescript 1 | import atlassianWorkspacesService from '../services/vendor.atlassian.workspaces.service.js'; 2 | import { Logger } from '../utils/logger.util.js'; 3 | import { handleControllerError } from '../utils/error-handler.util.js'; 4 | import { 5 | extractPaginationInfo, 6 | PaginationType, 7 | } from '../utils/pagination.util.js'; 8 | import { ControllerResponse } from '../types/common.types.js'; 9 | import { 10 | ListWorkspacesToolArgsType, 11 | GetWorkspaceToolArgsType, 12 | } from '../tools/atlassian.workspaces.types.js'; 13 | import { 14 | formatWorkspacesList, 15 | formatWorkspaceDetails, 16 | } from './atlassian.workspaces.formatter.js'; 17 | import { ListWorkspacesParams } from '../services/vendor.atlassian.workspaces.types.js'; 18 | import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js'; 19 | import { formatPagination } from '../utils/formatter.util.js'; 20 | 21 | // Create a contextualized logger for this file 22 | const controllerLogger = Logger.forContext( 23 | 'controllers/atlassian.workspaces.controller.ts', 24 | ); 25 | 26 | // Log controller initialization 27 | controllerLogger.debug('Bitbucket workspaces controller initialized'); 28 | 29 | /** 30 | * Controller for managing Bitbucket workspaces. 31 | * Provides functionality for listing workspaces and retrieving workspace details. 32 | */ 33 | 34 | /** 35 | * List Bitbucket workspaces with optional filtering 36 | * @param options - Options for listing workspaces 37 | * @param options.limit - Maximum number of workspaces to return 38 | * @param options.cursor - Pagination cursor for retrieving the next set of results 39 | * @returns Promise with formatted workspace list content including pagination information 40 | */ 41 | async function list( 42 | options: ListWorkspacesToolArgsType, 43 | ): Promise<ControllerResponse> { 44 | const methodLogger = Logger.forContext( 45 | 'controllers/atlassian.workspaces.controller.ts', 46 | 'list', 47 | ); 48 | methodLogger.debug('Listing Bitbucket workspaces...', options); 49 | 50 | try { 51 | // Create defaults object with proper typing 52 | const defaults: Partial<ListWorkspacesToolArgsType> = { 53 | limit: DEFAULT_PAGE_SIZE, 54 | }; 55 | 56 | // Apply defaults 57 | const mergedOptions = applyDefaults<ListWorkspacesToolArgsType>( 58 | options, 59 | defaults, 60 | ); 61 | 62 | // Map controller filters to service params 63 | const serviceParams: ListWorkspacesParams = { 64 | pagelen: mergedOptions.limit, // Default page length 65 | page: mergedOptions.cursor 66 | ? parseInt(mergedOptions.cursor, 10) 67 | : undefined, // Use cursor value for page 68 | // NOTE: Sort parameter is not included as the Bitbucket API's /2.0/user/permissions/workspaces 69 | // endpoint does not support sorting on any field 70 | }; 71 | 72 | methodLogger.debug('Using filters:', serviceParams); 73 | 74 | const workspacesData = 75 | await atlassianWorkspacesService.list(serviceParams); 76 | 77 | methodLogger.debug( 78 | `Retrieved ${workspacesData.values?.length || 0} workspaces`, 79 | ); 80 | 81 | // Extract pagination information using the utility 82 | const pagination = extractPaginationInfo( 83 | workspacesData, 84 | PaginationType.PAGE, 85 | ); 86 | 87 | // Format the workspaces data for display using the formatter 88 | const formattedWorkspaces = formatWorkspacesList(workspacesData); 89 | 90 | // Create the final content by combining the formatted workspaces with pagination information 91 | let finalContent = formattedWorkspaces; 92 | 93 | // Add pagination information if available 94 | if ( 95 | pagination && 96 | (pagination.hasMore || pagination.count !== undefined) 97 | ) { 98 | const paginationString = formatPagination(pagination); 99 | finalContent += '\n\n' + paginationString; 100 | } 101 | 102 | return { 103 | content: finalContent, 104 | }; 105 | } catch (error) { 106 | // Use the standardized error handler 107 | throw handleControllerError(error, { 108 | entityType: 'Workspaces', 109 | operation: 'listing', 110 | source: 'controllers/atlassian.workspaces.controller.ts@list', 111 | additionalInfo: { options }, 112 | }); 113 | } 114 | } 115 | 116 | /** 117 | * Get details of a specific Bitbucket workspace 118 | * @param identifier - Object containing the workspace slug 119 | * @param identifier.workspaceSlug - The slug of the workspace to retrieve 120 | * @returns Promise with formatted workspace details content 121 | * @throws Error if workspace retrieval fails 122 | */ 123 | async function get( 124 | identifier: GetWorkspaceToolArgsType, 125 | ): Promise<ControllerResponse> { 126 | const { workspaceSlug } = identifier; 127 | const methodLogger = Logger.forContext( 128 | 'controllers/atlassian.workspaces.controller.ts', 129 | 'get', 130 | ); 131 | 132 | methodLogger.debug( 133 | `Getting Bitbucket workspace with slug: ${workspaceSlug}...`, 134 | ); 135 | 136 | try { 137 | const workspaceData = 138 | await atlassianWorkspacesService.get(workspaceSlug); 139 | methodLogger.debug(`Retrieved workspace: ${workspaceData.slug}`); 140 | 141 | // Since membership info isn't directly available, we'll use the workspace data only 142 | methodLogger.debug( 143 | 'Membership info not available, using workspace data only', 144 | ); 145 | 146 | // Format the workspace data for display using the formatter 147 | const formattedWorkspace = formatWorkspaceDetails( 148 | workspaceData, 149 | undefined, // Pass undefined instead of membership data 150 | ); 151 | 152 | return { 153 | content: formattedWorkspace, 154 | }; 155 | } catch (error) { 156 | // Use the standardized error handler 157 | throw handleControllerError(error, { 158 | entityType: 'Workspace', 159 | operation: 'retrieving', 160 | source: 'controllers/atlassian.workspaces.controller.ts@get', 161 | additionalInfo: { identifier }, 162 | }); 163 | } 164 | } 165 | 166 | export default { list, get }; 167 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.workspaces.controller.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import atlassianWorkspacesController from './atlassian.workspaces.controller.js'; 2 | import { getAtlassianCredentials } from '../utils/transport.util.js'; 3 | import { config } from '../utils/config.util.js'; 4 | import { McpError } from '../utils/error.util.js'; 5 | 6 | describe('Atlassian Workspaces Controller', () => { 7 | // Load configuration and check for credentials before all tests 8 | beforeAll(() => { 9 | config.load(); // Ensure config is loaded 10 | const credentials = getAtlassianCredentials(); 11 | if (!credentials) { 12 | console.warn( 13 | 'Skipping Atlassian Workspaces Controller tests: No credentials available', 14 | ); 15 | } 16 | }); 17 | 18 | // Helper function to skip tests when credentials are missing 19 | const skipIfNoCredentials = () => !getAtlassianCredentials(); 20 | 21 | describe('list', () => { 22 | it('should return a formatted list of workspaces in Markdown', async () => { 23 | if (skipIfNoCredentials()) return; 24 | 25 | const result = await atlassianWorkspacesController.list({}); 26 | 27 | // Verify the response structure 28 | expect(result).toHaveProperty('content'); 29 | expect(typeof result.content).toBe('string'); 30 | 31 | // Basic Markdown content checks 32 | if (result.content !== 'No Bitbucket workspaces found.') { 33 | expect(result.content).toMatch(/^# Bitbucket Workspaces/m); 34 | expect(result.content).toContain('**UUID**'); 35 | expect(result.content).toContain('**Slug**'); 36 | expect(result.content).toContain('**Permission Level**'); 37 | 38 | // Check for pagination information in the content string 39 | expect(result.content).toMatch( 40 | /---\s*[\s\S]*\*Showing \d+ (of \d+ total items|\S+ items?)[\s\S]*\*/, 41 | ); 42 | } 43 | }, 30000); // Increased timeout 44 | 45 | it('should handle pagination options (limit/cursor)', async () => { 46 | if (skipIfNoCredentials()) return; 47 | 48 | // Fetch first page 49 | const result1 = await atlassianWorkspacesController.list({ 50 | limit: 1, 51 | }); 52 | 53 | // Extract pagination info from content 54 | const countMatch1 = result1.content.match( 55 | /\*Showing (\d+) items?\.\*/, 56 | ); 57 | const count1 = countMatch1 ? parseInt(countMatch1[1], 10) : 0; 58 | expect(count1).toBeLessThanOrEqual(1); 59 | 60 | // Extract cursor from content 61 | const cursorMatch1 = result1.content.match( 62 | /\*Next cursor: `(\d+)`\*/, 63 | ); 64 | const nextCursor = cursorMatch1 ? cursorMatch1[1] : null; 65 | 66 | // Check if pagination indicates more results 67 | const hasMoreResults = result1.content.includes( 68 | 'More results are available.', 69 | ); 70 | 71 | // If there's a next page, fetch it 72 | if (hasMoreResults && nextCursor) { 73 | const result2 = await atlassianWorkspacesController.list({ 74 | limit: 1, 75 | cursor: nextCursor, 76 | }); 77 | 78 | // Ensure content is different (or handle case where only 1 item exists) 79 | if ( 80 | result1.content !== 'No Bitbucket workspaces found.' && 81 | result2.content !== 'No Bitbucket workspaces found.' 82 | ) { 83 | // Only compare if we actually have multiple workspaces 84 | expect(result1.content).not.toEqual(result2.content); 85 | } 86 | } else { 87 | console.warn( 88 | 'Skipping cursor part of pagination test: Only one page of workspaces found.', 89 | ); 90 | } 91 | }, 30000); 92 | }); 93 | 94 | describe('get', () => { 95 | // Helper to get a valid slug for testing 'get' 96 | async function getFirstWorkspaceSlugForController(): Promise< 97 | string | null 98 | > { 99 | if (skipIfNoCredentials()) return null; 100 | try { 101 | const listResult = await atlassianWorkspacesController.list({ 102 | limit: 1, 103 | }); 104 | if (listResult.content === 'No Bitbucket workspaces found.') 105 | return null; 106 | // Extract slug from Markdown content 107 | const slugMatch = listResult.content.match( 108 | /\*\*Slug\*\*:\s+([^\s\n]+)/, 109 | ); 110 | return slugMatch ? slugMatch[1] : null; 111 | } catch (error) { 112 | console.warn( 113 | "Could not fetch workspace list for controller 'get' test setup:", 114 | error, 115 | ); 116 | return null; 117 | } 118 | } 119 | 120 | it('should return formatted details for a valid workspace slug in Markdown', async () => { 121 | const workspaceSlug = await getFirstWorkspaceSlugForController(); 122 | if (!workspaceSlug) { 123 | console.warn( 124 | 'Skipping controller get test: No workspace slug found.', 125 | ); 126 | return; 127 | } 128 | 129 | const result = await atlassianWorkspacesController.get({ 130 | workspaceSlug, 131 | }); 132 | 133 | // Verify the ControllerResponse structure 134 | expect(result).toHaveProperty('content'); 135 | expect(typeof result.content).toBe('string'); 136 | 137 | // Verify Markdown content 138 | expect(result.content).toMatch(/^# Workspace:/m); 139 | expect(result.content).toContain(`**Slug**: ${workspaceSlug}`); 140 | expect(result.content).toContain('## Basic Information'); 141 | expect(result.content).toContain('## Links'); 142 | }, 30000); 143 | 144 | it('should throw McpError for an invalid workspace slug', async () => { 145 | if (skipIfNoCredentials()) return; 146 | 147 | const invalidSlug = 'this-slug-definitely-does-not-exist-12345'; 148 | 149 | // Expect the controller call to reject with an McpError 150 | await expect( 151 | atlassianWorkspacesController.get({ 152 | workspaceSlug: invalidSlug, 153 | }), 154 | ).rejects.toThrow(McpError); 155 | 156 | // Optionally check the status code via the error handler's behavior 157 | try { 158 | await atlassianWorkspacesController.get({ 159 | workspaceSlug: invalidSlug, 160 | }); 161 | } catch (e) { 162 | expect(e).toBeInstanceOf(McpError); 163 | // The controller error handler wraps the service error 164 | expect((e as McpError).statusCode).toBe(404); // Expecting Not Found 165 | expect((e as McpError).message).toContain('not found'); 166 | } 167 | }, 30000); 168 | }); 169 | }); 170 | ``` -------------------------------------------------------------------------------- /src/cli/atlassian.workspaces.cli.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { CliTestUtil } from '../utils/cli.test.util.js'; 2 | import { getAtlassianCredentials } from '../utils/transport.util.js'; 3 | import { config } from '../utils/config.util.js'; 4 | 5 | describe('Atlassian Workspaces CLI Commands', () => { 6 | // Load configuration and check for credentials before all tests 7 | beforeAll(() => { 8 | // Load configuration from all sources 9 | config.load(); 10 | 11 | // Log warning if credentials aren't available 12 | const credentials = getAtlassianCredentials(); 13 | if (!credentials) { 14 | console.warn( 15 | 'Skipping Atlassian Workspaces CLI tests: No credentials available', 16 | ); 17 | } 18 | }); 19 | 20 | // Helper function to skip tests when credentials are missing 21 | const skipIfNoCredentials = () => { 22 | const credentials = getAtlassianCredentials(); 23 | if (!credentials) { 24 | return true; 25 | } 26 | return false; 27 | }; 28 | 29 | describe('ls-workspaces command', () => { 30 | // Test default behavior (list all workspaces) 31 | it('should list available workspaces', async () => { 32 | if (skipIfNoCredentials()) { 33 | return; 34 | } 35 | 36 | // Run the CLI command 37 | const result = await CliTestUtil.runCommand(['ls-workspaces']); 38 | 39 | // Check command exit code 40 | expect(result.exitCode).toBe(0); 41 | 42 | // Verify the output format 43 | if (!result.stdout.includes('No Bitbucket workspaces found.')) { 44 | // Validate expected Markdown structure - Fixed to match actual output 45 | CliTestUtil.validateOutputContains(result.stdout, [ 46 | '# Bitbucket Workspaces', 47 | '**UUID**', 48 | '**Slug**', 49 | '**Permission Level**', 50 | ]); 51 | 52 | // Validate Markdown formatting 53 | CliTestUtil.validateMarkdownOutput(result.stdout); 54 | } 55 | }, 30000); // Increased timeout for API call 56 | 57 | // Test with pagination 58 | it('should support pagination with --limit flag', async () => { 59 | if (skipIfNoCredentials()) { 60 | return; 61 | } 62 | 63 | // Run the CLI command with limit 64 | const result = await CliTestUtil.runCommand([ 65 | 'ls-workspaces', 66 | '--limit', 67 | '1', 68 | ]); 69 | 70 | // Check command exit code 71 | expect(result.exitCode).toBe(0); 72 | 73 | // If there are multiple workspaces, pagination section should be present 74 | if ( 75 | !result.stdout.includes('No Bitbucket workspaces found.') && 76 | result.stdout.includes('items remaining') 77 | ) { 78 | CliTestUtil.validateOutputContains(result.stdout, [ 79 | 'Pagination', 80 | 'Next cursor:', 81 | ]); 82 | } 83 | }, 30000); // Increased timeout for API call 84 | 85 | // Test with invalid parameters - Fixed to use a truly invalid input 86 | it('should handle invalid parameters properly', async () => { 87 | if (skipIfNoCredentials()) { 88 | return; 89 | } 90 | 91 | // Run the CLI command with a non-existent parameter 92 | const result = await CliTestUtil.runCommand([ 93 | 'ls-workspaces', 94 | '--non-existent-parameter', 95 | 'value', 96 | ]); 97 | 98 | // Should fail with non-zero exit code 99 | expect(result.exitCode).not.toBe(0); 100 | 101 | // Should output error message 102 | expect(result.stderr).toContain('unknown option'); 103 | }, 30000); 104 | }); 105 | 106 | describe('get-workspace command', () => { 107 | // Test to fetch a specific workspace 108 | it('should retrieve a specific workspace by slug', async () => { 109 | if (skipIfNoCredentials()) { 110 | return; 111 | } 112 | 113 | // First, get a list of workspaces to find a valid slug 114 | const listResult = await CliTestUtil.runCommand(['ls-workspaces']); 115 | 116 | // Skip if no workspaces are available 117 | if (listResult.stdout.includes('No Bitbucket workspaces found.')) { 118 | console.warn('Skipping test: No workspaces available'); 119 | return; 120 | } 121 | 122 | // Extract a workspace slug from the output 123 | const slugMatch = listResult.stdout.match( 124 | /\*\*Slug\*\*:\s+([^\n]+)/, 125 | ); 126 | if (!slugMatch || !slugMatch[1]) { 127 | console.warn('Skipping test: Could not extract workspace slug'); 128 | return; 129 | } 130 | 131 | const workspaceSlug = slugMatch[1].trim(); 132 | 133 | // Run the get-workspace command with the extracted slug 134 | const getResult = await CliTestUtil.runCommand([ 135 | 'get-workspace', 136 | '--workspace-slug', 137 | workspaceSlug, 138 | ]); 139 | 140 | // Check command exit code 141 | expect(getResult.exitCode).toBe(0); 142 | 143 | // Verify the output structure and content 144 | CliTestUtil.validateOutputContains(getResult.stdout, [ 145 | `# Workspace: `, 146 | `**Slug**: ${workspaceSlug}`, 147 | 'Basic Information', 148 | 'Links', 149 | ]); 150 | 151 | // Validate Markdown formatting 152 | CliTestUtil.validateMarkdownOutput(getResult.stdout); 153 | }, 30000); // Increased timeout for API calls 154 | 155 | // Test with missing required parameter 156 | it('should fail when workspace slug is not provided', async () => { 157 | if (skipIfNoCredentials()) { 158 | return; 159 | } 160 | 161 | // Run command without required parameter 162 | const result = await CliTestUtil.runCommand(['get-workspace']); 163 | 164 | // Should fail with non-zero exit code 165 | expect(result.exitCode).not.toBe(0); 166 | 167 | // Should indicate missing required option 168 | expect(result.stderr).toContain('required option'); 169 | }, 15000); 170 | 171 | // Test with invalid workspace slug 172 | it('should handle invalid workspace slugs gracefully', async () => { 173 | if (skipIfNoCredentials()) { 174 | return; 175 | } 176 | 177 | // Use a deliberately invalid workspace slug 178 | const invalidSlug = 'invalid-workspace-slug-that-does-not-exist'; 179 | 180 | // Run command with invalid slug 181 | const result = await CliTestUtil.runCommand([ 182 | 'get-workspace', 183 | '--workspace-slug', 184 | invalidSlug, 185 | ]); 186 | 187 | // Should fail with non-zero exit code 188 | expect(result.exitCode).not.toBe(0); 189 | 190 | // Should contain error information 191 | expect(result.stderr).toContain('error'); 192 | }, 30000); 193 | }); 194 | }); 195 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.search.code.controller.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { Logger } from '../utils/logger.util.js'; 2 | import { ControllerResponse } from '../types/common.types.js'; 3 | import { DEFAULT_PAGE_SIZE } from '../utils/defaults.util.js'; 4 | import atlassianSearchService from '../services/vendor.atlassian.search.service.js'; 5 | import { 6 | extractPaginationInfo, 7 | PaginationType, 8 | } from '../utils/pagination.util.js'; 9 | import { formatPagination } from '../utils/formatter.util.js'; 10 | import { formatCodeSearchResults } from './atlassian.search.formatter.js'; 11 | 12 | /** 13 | * Handle search for code content (uses Bitbucket's Code Search API) 14 | */ 15 | export async function handleCodeSearch( 16 | workspaceSlug: string, 17 | repoSlug?: string, 18 | query?: string, 19 | limit: number = DEFAULT_PAGE_SIZE, 20 | cursor?: string, 21 | language?: string, 22 | extension?: string, 23 | ): Promise<ControllerResponse> { 24 | const methodLogger = Logger.forContext( 25 | 'controllers/atlassian.search.code.controller.ts', 26 | 'handleCodeSearch', 27 | ); 28 | methodLogger.debug('Performing code search'); 29 | 30 | if (!query) { 31 | return { 32 | content: 'Please provide a search query for code search.', 33 | }; 34 | } 35 | 36 | try { 37 | // Convert cursor to page number if provided 38 | let page = 1; 39 | if (cursor) { 40 | const parsedPage = parseInt(cursor, 10); 41 | if (!isNaN(parsedPage)) { 42 | page = parsedPage; 43 | } else { 44 | methodLogger.warn('Invalid page cursor:', cursor); 45 | } 46 | } 47 | 48 | // Use the search service 49 | const searchResponse = await atlassianSearchService.searchCode({ 50 | workspaceSlug: workspaceSlug, 51 | searchQuery: query, 52 | repoSlug: repoSlug, 53 | page: page, 54 | pageLen: limit, 55 | language: language, 56 | extension: extension, 57 | }); 58 | 59 | methodLogger.debug( 60 | `Search complete, found ${searchResponse.size} matches`, 61 | ); 62 | 63 | // Post-filter by language if specified and Bitbucket API returned mixed results 64 | let filteredValues = searchResponse.values || []; 65 | let originalSize = searchResponse.size; 66 | 67 | if (language && filteredValues.length > 0) { 68 | // Language extension mapping for post-filtering 69 | const languageExtMap: Record<string, string[]> = { 70 | hcl: ['.tf', '.tfvars', '.hcl'], 71 | terraform: ['.tf', '.tfvars', '.hcl'], 72 | java: ['.java', '.class', '.jar'], 73 | javascript: ['.js', '.jsx', '.mjs'], 74 | typescript: ['.ts', '.tsx'], 75 | python: ['.py', '.pyw', '.pyc'], 76 | ruby: ['.rb', '.rake'], 77 | go: ['.go'], 78 | rust: ['.rs'], 79 | c: ['.c', '.h'], 80 | cpp: ['.cpp', '.cc', '.cxx', '.h', '.hpp'], 81 | csharp: ['.cs'], 82 | php: ['.php'], 83 | html: ['.html', '.htm'], 84 | css: ['.css'], 85 | shell: ['.sh', '.bash', '.zsh'], 86 | sql: ['.sql'], 87 | yaml: ['.yml', '.yaml'], 88 | json: ['.json'], 89 | xml: ['.xml'], 90 | markdown: ['.md', '.markdown'], 91 | }; 92 | 93 | // Normalize the language name to lowercase 94 | const normalizedLang = language.toLowerCase(); 95 | const extensions = languageExtMap[normalizedLang] || []; 96 | 97 | // Only apply post-filtering if we have extension mappings for this language 98 | if (extensions.length > 0) { 99 | const beforeFilterCount = filteredValues.length; 100 | 101 | // Filter results to only include files with the expected extensions 102 | filteredValues = filteredValues.filter((result) => { 103 | const filePath = result.file.path.toLowerCase(); 104 | return extensions.some((ext) => filePath.endsWith(ext)); 105 | }); 106 | 107 | const afterFilterCount = filteredValues.length; 108 | 109 | if (afterFilterCount !== beforeFilterCount) { 110 | methodLogger.debug( 111 | `Post-filtered code search results by language=${language}: ${afterFilterCount} of ${beforeFilterCount} matched extensions ${extensions.join(', ')}`, 112 | ); 113 | 114 | // Adjust the size estimate 115 | originalSize = searchResponse.size; 116 | const filterRatio = afterFilterCount / beforeFilterCount; 117 | searchResponse.size = Math.max( 118 | afterFilterCount, 119 | Math.ceil(searchResponse.size * filterRatio), 120 | ); 121 | 122 | methodLogger.debug( 123 | `Adjusted size from ${originalSize} to ${searchResponse.size} based on filtering`, 124 | ); 125 | } 126 | } 127 | } 128 | 129 | // Extract pagination information 130 | const transformedResponse = { 131 | pagelen: limit, 132 | page: page, 133 | size: searchResponse.size, 134 | values: filteredValues, 135 | next: 'available', // Fallback to 'available' since searchResponse doesn't have a next property 136 | }; 137 | 138 | const pagination = extractPaginationInfo( 139 | transformedResponse, 140 | PaginationType.PAGE, 141 | ); 142 | 143 | // Format the code search results 144 | let formattedCode = formatCodeSearchResults({ 145 | ...searchResponse, 146 | values: filteredValues, 147 | }); 148 | 149 | // Add note about language filtering if applied 150 | if (language) { 151 | // Make it clear that language filtering is a best-effort by the API and we've improved it 152 | const languageNote = `> **Note:** Language filtering for '${language}' combines Bitbucket API filtering with client-side filtering for more accurate results. Due to limitations in the Bitbucket API, some files in other languages might still appear in search results, and filtering is based on file extensions rather than content analysis. This is a known limitation of the Bitbucket API that this tool attempts to mitigate through additional filtering.`; 153 | formattedCode = `${languageNote}\n\n${formattedCode}`; 154 | } 155 | 156 | // Add pagination information if available 157 | let finalContent = formattedCode; 158 | if ( 159 | pagination && 160 | (pagination.hasMore || pagination.count !== undefined) 161 | ) { 162 | const paginationString = formatPagination(pagination); 163 | finalContent += '\n\n' + paginationString; 164 | } 165 | 166 | return { 167 | content: finalContent, 168 | }; 169 | } catch (searchError) { 170 | methodLogger.error('Error performing code search:', searchError); 171 | throw searchError; 172 | } 173 | } 174 | ``` -------------------------------------------------------------------------------- /scripts/update-version.js: -------------------------------------------------------------------------------- ```javascript 1 | #!/usr/bin/env node 2 | 3 | /** 4 | * Script to update version numbers across the project 5 | * Usage: node scripts/update-version.js [version] [options] 6 | * Options: 7 | * --dry-run Show what changes would be made without applying them 8 | * --verbose Show detailed logging information 9 | * 10 | * If no version is provided, it will use the version from package.json 11 | */ 12 | 13 | import fs from 'fs'; 14 | import path from 'path'; 15 | import { fileURLToPath } from 'url'; 16 | 17 | // Get the directory name of the current module 18 | const __filename = fileURLToPath(import.meta.url); 19 | const __dirname = path.dirname(__filename); 20 | const rootDir = path.resolve(__dirname, '..'); 21 | 22 | // Parse command line arguments 23 | const args = process.argv.slice(2); 24 | const options = { 25 | dryRun: args.includes('--dry-run'), 26 | verbose: args.includes('--verbose'), 27 | }; 28 | 29 | // Get the version (first non-flag argument) 30 | let newVersion = args.find((arg) => !arg.startsWith('--')); 31 | 32 | // Log helper function 33 | const log = (message, verbose = false) => { 34 | if (!verbose || options.verbose) { 35 | console.log(message); 36 | } 37 | }; 38 | 39 | // File paths that may contain version information 40 | const versionFiles = [ 41 | { 42 | path: path.join(rootDir, 'package.json'), 43 | pattern: /"version": "([^"]*)"/, 44 | replacement: (match, currentVersion) => 45 | match.replace(currentVersion, newVersion), 46 | }, 47 | { 48 | path: path.join(rootDir, 'src', 'utils', 'constants.util.ts'), 49 | pattern: /export const VERSION = ['"]([^'"]*)['"]/, 50 | replacement: (match, currentVersion) => 51 | match.replace(currentVersion, newVersion), 52 | }, 53 | // Also update the compiled JavaScript files if they exist 54 | { 55 | path: path.join(rootDir, 'dist', 'utils', 'constants.util.js'), 56 | pattern: /exports.VERSION = ['"]([^'"]*)['"]/, 57 | replacement: (match, currentVersion) => 58 | match.replace(currentVersion, newVersion), 59 | optional: true, // Mark this file as optional 60 | }, 61 | // Additional files can be added here with their patterns and replacement logic 62 | ]; 63 | 64 | /** 65 | * Read the version from package.json 66 | * @returns {string} The version from package.json 67 | */ 68 | function getPackageVersion() { 69 | try { 70 | const packageJsonPath = path.join(rootDir, 'package.json'); 71 | log(`Reading version from ${packageJsonPath}`, true); 72 | 73 | const packageJson = JSON.parse( 74 | fs.readFileSync(packageJsonPath, 'utf8'), 75 | ); 76 | 77 | if (!packageJson.version) { 78 | throw new Error('No version field found in package.json'); 79 | } 80 | 81 | return packageJson.version; 82 | } catch (error) { 83 | console.error(`Error reading package.json: ${error.message}`); 84 | process.exit(1); 85 | } 86 | } 87 | 88 | /** 89 | * Validate the semantic version format 90 | * @param {string} version - The version to validate 91 | * @returns {boolean} True if valid, throws error if invalid 92 | */ 93 | function validateVersion(version) { 94 | // More comprehensive semver regex 95 | const semverRegex = 96 | /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/; 97 | 98 | if (!semverRegex.test(version)) { 99 | throw new Error( 100 | `Invalid version format: ${version}\nPlease use semantic versioning format (e.g., 1.2.3, 1.2.3-beta.1, etc.)`, 101 | ); 102 | } 103 | 104 | return true; 105 | } 106 | 107 | /** 108 | * Update version in a specific file 109 | * @param {Object} fileConfig - Configuration for the file to update 110 | */ 111 | function updateFileVersion(fileConfig) { 112 | const { 113 | path: filePath, 114 | pattern, 115 | replacement, 116 | optional = false, 117 | } = fileConfig; 118 | 119 | try { 120 | log(`Checking ${filePath}...`, true); 121 | 122 | if (!fs.existsSync(filePath)) { 123 | if (optional) { 124 | log(`Optional file not found (skipping): ${filePath}`, true); 125 | return; 126 | } 127 | console.warn(`Warning: File not found: ${filePath}`); 128 | return; 129 | } 130 | 131 | // Read file content 132 | const fileContent = fs.readFileSync(filePath, 'utf8'); 133 | const match = fileContent.match(pattern); 134 | 135 | if (!match) { 136 | console.warn(`Warning: Version pattern not found in ${filePath}`); 137 | return; 138 | } 139 | 140 | const currentVersion = match[1]; 141 | if (currentVersion === newVersion) { 142 | log( 143 | `Version in ${path.basename(filePath)} is already ${newVersion}`, 144 | true, 145 | ); 146 | return; 147 | } 148 | 149 | // Create new content with the updated version 150 | const updatedContent = fileContent.replace(pattern, replacement); 151 | 152 | // Write the changes or log them in dry run mode 153 | if (options.dryRun) { 154 | log( 155 | `Would update version in ${filePath} from ${currentVersion} to ${newVersion}`, 156 | ); 157 | } else { 158 | // Create a backup of the original file 159 | fs.writeFileSync(`${filePath}.bak`, fileContent); 160 | log(`Backup created: ${filePath}.bak`, true); 161 | 162 | // Write the updated content 163 | fs.writeFileSync(filePath, updatedContent); 164 | log( 165 | `Updated version in ${path.basename(filePath)} from ${currentVersion} to ${newVersion}`, 166 | ); 167 | } 168 | } catch (error) { 169 | if (optional) { 170 | log(`Error with optional file ${filePath}: ${error.message}`, true); 171 | return; 172 | } 173 | console.error(`Error updating ${filePath}: ${error.message}`); 174 | process.exit(1); 175 | } 176 | } 177 | 178 | // Main execution 179 | try { 180 | // If no version specified, get from package.json 181 | if (!newVersion) { 182 | newVersion = getPackageVersion(); 183 | log( 184 | `No version specified, using version from package.json: ${newVersion}`, 185 | ); 186 | } 187 | 188 | // Validate the version format 189 | validateVersion(newVersion); 190 | 191 | // Update all configured files 192 | for (const fileConfig of versionFiles) { 193 | updateFileVersion(fileConfig); 194 | } 195 | 196 | if (options.dryRun) { 197 | log(`\nDry run completed. No files were modified.`); 198 | } else { 199 | log(`\nVersion successfully updated to ${newVersion}`); 200 | } 201 | } catch (error) { 202 | console.error(`\nVersion update failed: ${error.message}`); 203 | process.exit(1); 204 | } 205 | ``` -------------------------------------------------------------------------------- /src/services/vendor.atlassian.workspaces.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import atlassianWorkspacesService from './vendor.atlassian.workspaces.service.js'; 2 | import { getAtlassianCredentials } from '../utils/transport.util.js'; 3 | import { config } from '../utils/config.util.js'; 4 | import { McpError } from '../utils/error.util.js'; 5 | 6 | describe('Vendor Atlassian Workspaces Service', () => { 7 | // Load configuration and check for credentials before all tests 8 | beforeAll(() => { 9 | config.load(); // Ensure config is loaded 10 | const credentials = getAtlassianCredentials(); 11 | if (!credentials) { 12 | console.warn( 13 | 'Skipping Atlassian Workspaces Service tests: No credentials available', 14 | ); 15 | } 16 | }); 17 | 18 | // Helper function to skip tests when credentials are missing 19 | const skipIfNoCredentials = () => !getAtlassianCredentials(); 20 | 21 | describe('list', () => { 22 | it('should return a list of workspaces (permissions)', async () => { 23 | if (skipIfNoCredentials()) return; 24 | 25 | const result = await atlassianWorkspacesService.list(); 26 | 27 | // Verify the response structure based on WorkspacePermissionsResponse 28 | expect(result).toHaveProperty('values'); 29 | expect(Array.isArray(result.values)).toBe(true); 30 | expect(result).toHaveProperty('pagelen'); // Bitbucket uses pagelen 31 | expect(result).toHaveProperty('page'); 32 | expect(result).toHaveProperty('size'); 33 | 34 | if (result.values.length > 0) { 35 | const membership = result.values[0]; 36 | expect(membership).toHaveProperty( 37 | 'type', 38 | 'workspace_membership', 39 | ); 40 | expect(membership).toHaveProperty('permission'); 41 | expect(membership).toHaveProperty('user'); 42 | expect(membership).toHaveProperty('workspace'); 43 | expect(membership.workspace).toHaveProperty('slug'); 44 | expect(membership.workspace).toHaveProperty('uuid'); 45 | } 46 | }, 30000); // Increased timeout 47 | 48 | it('should support pagination with pagelen', async () => { 49 | if (skipIfNoCredentials()) return; 50 | 51 | const result = await atlassianWorkspacesService.list({ 52 | pagelen: 1, 53 | }); 54 | 55 | expect(result).toHaveProperty('pagelen'); 56 | // Allow pagelen to be greater than requested if API enforces minimum 57 | expect(result.pagelen).toBeGreaterThanOrEqual(1); 58 | expect(result.values.length).toBeLessThanOrEqual(result.pagelen); // Items should not exceed pagelen 59 | 60 | if (result.size > result.pagelen) { 61 | // If there are more items than the page size, expect pagination links 62 | expect(result).toHaveProperty('next'); 63 | } 64 | }, 30000); 65 | 66 | it('should handle query filtering if supported by the API', async () => { 67 | if (skipIfNoCredentials()) return; 68 | 69 | // First get all workspaces to find a potential query term 70 | const allWorkspaces = await atlassianWorkspacesService.list(); 71 | 72 | // Skip if no workspaces available 73 | if (allWorkspaces.values.length === 0) { 74 | console.warn( 75 | 'Skipping query filtering test: No workspaces available', 76 | ); 77 | return; 78 | } 79 | 80 | // Try to search using a workspace name - note that this might not work if 81 | // the API doesn't fully support 'q' parameter for this endpoint 82 | // This test basically checks that the request doesn't fail 83 | const firstWorkspace = allWorkspaces.values[0].workspace; 84 | try { 85 | const result = await atlassianWorkspacesService.list({ 86 | q: `workspace.name="${firstWorkspace.name}"`, 87 | }); 88 | 89 | // We're mostly testing that this request completes without error 90 | expect(result).toHaveProperty('values'); 91 | 92 | // The result might be empty if filtering isn't supported, 93 | // so we don't assert on the number of results returned 94 | } catch (error) { 95 | // If filtering isn't supported, the API might return an error 96 | // This is acceptable, so we just log it 97 | console.warn( 98 | 'Query filtering test encountered an error:', 99 | error instanceof Error ? error.message : String(error), 100 | ); 101 | } 102 | }, 30000); 103 | }); 104 | 105 | describe('get', () => { 106 | // Helper to get a valid slug for testing 'get' 107 | async function getFirstWorkspaceSlug(): Promise<string | null> { 108 | if (skipIfNoCredentials()) return null; 109 | try { 110 | const listResult = await atlassianWorkspacesService.list({ 111 | pagelen: 1, 112 | }); 113 | return listResult.values.length > 0 114 | ? listResult.values[0].workspace.slug 115 | : null; 116 | } catch (error) { 117 | console.warn( 118 | "Could not fetch workspace list for 'get' test setup:", 119 | error, 120 | ); 121 | return null; 122 | } 123 | } 124 | 125 | it('should return details for a valid workspace slug', async () => { 126 | const workspaceSlug = await getFirstWorkspaceSlug(); 127 | if (!workspaceSlug) { 128 | console.warn('Skipping get test: No workspace slug found.'); 129 | return; 130 | } 131 | 132 | const result = await atlassianWorkspacesService.get(workspaceSlug); 133 | 134 | // Verify the response structure based on WorkspaceDetailed 135 | expect(result).toHaveProperty('uuid'); 136 | expect(result).toHaveProperty('slug', workspaceSlug); 137 | expect(result).toHaveProperty('name'); 138 | expect(result).toHaveProperty('type', 'workspace'); 139 | expect(result).toHaveProperty('links'); 140 | expect(result.links).toHaveProperty('html'); 141 | }, 30000); 142 | 143 | it('should throw an McpError for an invalid workspace slug', async () => { 144 | if (skipIfNoCredentials()) return; 145 | 146 | const invalidSlug = 'this-slug-definitely-does-not-exist-12345'; 147 | 148 | // Expect the service call to reject with an McpError (likely 404) 149 | await expect( 150 | atlassianWorkspacesService.get(invalidSlug), 151 | ).rejects.toThrow(McpError); 152 | 153 | // Optionally check the status code if needed 154 | try { 155 | await atlassianWorkspacesService.get(invalidSlug); 156 | } catch (e) { 157 | expect(e).toBeInstanceOf(McpError); 158 | expect((e as McpError).statusCode).toBe(404); // Expecting Not Found 159 | } 160 | }, 30000); 161 | }); 162 | }); 163 | ``` -------------------------------------------------------------------------------- /src/utils/config.util.ts: -------------------------------------------------------------------------------- ```typescript 1 | import fs from 'fs'; 2 | import path from 'path'; 3 | import { Logger } from './logger.util.js'; 4 | import dotenv from 'dotenv'; 5 | import os from 'os'; 6 | 7 | /** 8 | * Configuration loader that handles multiple sources with priority: 9 | * 1. Direct ENV pass (process.env) 10 | * 2. .env file in project root 11 | * 3. Global config file at $HOME/.mcp/configs.json 12 | */ 13 | class ConfigLoader { 14 | private packageName: string; 15 | private configLoaded: boolean = false; 16 | 17 | /** 18 | * Create a new ConfigLoader instance 19 | * @param packageName The package name to use for global config lookup 20 | */ 21 | constructor(packageName: string) { 22 | this.packageName = packageName; 23 | } 24 | 25 | /** 26 | * Load configuration from all sources with proper priority 27 | */ 28 | load(): void { 29 | const methodLogger = Logger.forContext('utils/config.util.ts', 'load'); 30 | if (this.configLoaded) { 31 | methodLogger.debug('Configuration already loaded, skipping'); 32 | return; 33 | } 34 | 35 | methodLogger.debug('Loading configuration...'); 36 | 37 | // Priority 3: Load from global config file 38 | this.loadFromGlobalConfig(); 39 | 40 | // Priority 2: Load from .env file 41 | this.loadFromEnvFile(); 42 | 43 | // Priority 1: Direct ENV pass is already in process.env 44 | // No need to do anything as it already has highest priority 45 | 46 | this.configLoaded = true; 47 | methodLogger.debug('Configuration loaded successfully'); 48 | } 49 | 50 | /** 51 | * Load configuration from .env file in project root 52 | */ 53 | private loadFromEnvFile(): void { 54 | const methodLogger = Logger.forContext( 55 | 'utils/config.util.ts', 56 | 'loadFromEnvFile', 57 | ); 58 | try { 59 | // Use quiet mode to prevent dotenv from outputting to STDIO 60 | // which interferes with MCP's JSON-RPC communication 61 | const result = dotenv.config({ quiet: true }); 62 | if (result.error) { 63 | methodLogger.debug('No .env file found or error reading it'); 64 | return; 65 | } 66 | methodLogger.debug('Loaded configuration from .env file'); 67 | } catch (error) { 68 | methodLogger.error('Error loading .env file', error); 69 | } 70 | } 71 | 72 | /** 73 | * Load configuration from global config file at $HOME/.mcp/configs.json 74 | */ 75 | private loadFromGlobalConfig(): void { 76 | const methodLogger = Logger.forContext( 77 | 'utils/config.util.ts', 78 | 'loadFromGlobalConfig', 79 | ); 80 | try { 81 | const homedir = os.homedir(); 82 | const globalConfigPath = path.join(homedir, '.mcp', 'configs.json'); 83 | 84 | if (!fs.existsSync(globalConfigPath)) { 85 | methodLogger.debug('Global config file not found'); 86 | return; 87 | } 88 | 89 | const configContent = fs.readFileSync(globalConfigPath, 'utf8'); 90 | const config = JSON.parse(configContent); 91 | 92 | // Determine the potential keys for the current package 93 | const shortKey = 'bitbucket'; // Project-specific short key 94 | const atlassianProductKey = 'atlassian-bitbucket'; // New supported key 95 | const fullPackageName = this.packageName; // e.g., '@aashari/mcp-server-atlassian-bitbucket' 96 | const unscopedPackageName = 97 | fullPackageName.split('/')[1] || fullPackageName; // e.g., 'mcp-server-atlassian-bitbucket' 98 | 99 | // Define the prioritized order of keys to check 100 | const potentialKeys = [ 101 | shortKey, 102 | atlassianProductKey, 103 | fullPackageName, 104 | unscopedPackageName, 105 | ]; 106 | let foundConfigSection: { 107 | environments?: Record<string, unknown>; 108 | } | null = null; 109 | let usedKey: string | null = null; 110 | 111 | for (const key of potentialKeys) { 112 | if ( 113 | config[key] && 114 | typeof config[key] === 'object' && 115 | config[key].environments 116 | ) { 117 | foundConfigSection = config[key]; 118 | usedKey = key; 119 | methodLogger.debug(`Found configuration using key: ${key}`); 120 | break; // Stop once found 121 | } 122 | } 123 | 124 | if (!foundConfigSection || !foundConfigSection.environments) { 125 | methodLogger.debug( 126 | `No configuration found for ${ 127 | this.packageName 128 | } using keys: ${potentialKeys.join(', ')}`, 129 | ); 130 | return; 131 | } 132 | 133 | const environments = foundConfigSection.environments; 134 | for (const [key, value] of Object.entries(environments)) { 135 | // Only set if not already defined in process.env 136 | if (process.env[key] === undefined) { 137 | process.env[key] = String(value); 138 | } 139 | } 140 | 141 | methodLogger.debug( 142 | `Loaded configuration from global config file using key: ${usedKey}`, 143 | ); 144 | } catch (error) { 145 | methodLogger.error('Error loading global config file', error); 146 | } 147 | } 148 | 149 | /** 150 | * Get a configuration value 151 | * @param key The configuration key 152 | * @param defaultValue The default value if the key is not found 153 | * @returns The configuration value or the default value 154 | */ 155 | get(key: string, defaultValue?: string): string | undefined { 156 | return process.env[key] || defaultValue; 157 | } 158 | 159 | /** 160 | * Get a boolean configuration value 161 | * @param key The configuration key 162 | * @param defaultValue The default value if the key is not found 163 | * @returns The boolean configuration value or the default value 164 | */ 165 | getBoolean(key: string, defaultValue: boolean = false): boolean { 166 | const value = this.get(key); 167 | if (value === undefined) { 168 | return defaultValue; 169 | } 170 | return value.toLowerCase() === 'true'; 171 | } 172 | 173 | /** 174 | * Get a number configuration value 175 | * @param key The configuration key 176 | * @param defaultValue The default value if the key is not found 177 | * @returns The number configuration value or the default value 178 | */ 179 | getNumber(key: string, defaultValue: number = 0): number { 180 | const value = this.get(key); 181 | if (value === undefined) { 182 | return defaultValue; 183 | } 184 | const parsed = parseInt(value, 10); 185 | return isNaN(parsed) ? defaultValue : parsed; 186 | } 187 | } 188 | 189 | // Create and export a singleton instance with the package name from package.json 190 | export const config = new ConfigLoader( 191 | '@aashari/mcp-server-atlassian-bitbucket', 192 | ); 193 | ``` -------------------------------------------------------------------------------- /src/cli/atlassian.diff.cli.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { Command } from 'commander'; 2 | import { Logger } from '../utils/logger.util.js'; 3 | import { handleCliError } from '../utils/error.util.js'; 4 | import diffController from '../controllers/atlassian.diff.controller.js'; 5 | 6 | // Create a contextualized logger for this file 7 | const cliLogger = Logger.forContext('cli/atlassian.diff.cli.ts'); 8 | 9 | // Log initialization 10 | cliLogger.debug('Bitbucket diff CLI module initialized'); 11 | 12 | /** 13 | * Register diff-related CLI commands 14 | * @param program - Commander instance 15 | */ 16 | function register(program: Command) { 17 | const registerLogger = cliLogger.forMethod('register'); 18 | registerLogger.debug('Registering Bitbucket Diff CLI commands...'); 19 | 20 | // Branch diff command 21 | program 22 | .command('diff-branches') 23 | .description( 24 | 'Display differences between two branches in a repository.\nIMPORTANT: The output shows changes as "destinationBranch → sourceBranch". For complete code changes (not just summary), try reversing the branch parameters if initial results show only summary.', 25 | ) 26 | .option( 27 | '-w, --workspace-slug <workspaceSlug>', 28 | 'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account).', 29 | ) 30 | .requiredOption( 31 | '-r, --repo-slug <repoSlug>', 32 | 'Repository slug where the branches are located. Example: "my-repo"', 33 | ) 34 | .requiredOption( 35 | '-s, --source-branch <sourceBranch>', 36 | 'Name of the source branch (typically your feature branch). Example: "feature/my-feature"', 37 | ) 38 | .option( 39 | '-d, --destination-branch <destinationBranch>', 40 | 'Name of the destination branch (typically the main branch). Defaults to "main" if not provided.', 41 | ) 42 | .option( 43 | '--full-diff <boolean>', 44 | 'Whether to include the full diff in the response. Defaults to true.', 45 | (value) => value === 'true', 46 | ) 47 | .option( 48 | '-l, --limit <number>', 49 | 'Maximum number of files to show in the diff (1-100). Defaults to 25 if omitted.', 50 | ) 51 | .option( 52 | '-p, --page <number>', 53 | 'Page number for pagination. Starts at 1. Use with limit to paginate results.', 54 | ) 55 | .option( 56 | '-t, --topic <boolean>', 57 | 'Whether to treat the source ref as a topic branch. Defaults to false.', 58 | (value) => value === 'true', 59 | ) 60 | .action(async (options) => { 61 | const actionLogger = cliLogger.forMethod('diff-branches'); 62 | try { 63 | actionLogger.debug('Processing command options:', options); 64 | 65 | // Map CLI options to controller params - keep only type conversions 66 | const controllerOptions = { 67 | workspaceSlug: options.workspaceSlug, 68 | repoSlug: options.repoSlug, 69 | sourceBranch: options.sourceBranch, 70 | destinationBranch: options.destinationBranch, 71 | includeFullDiff: 72 | options.fullDiff !== undefined 73 | ? options.fullDiff 74 | : true, 75 | limit: options.limit 76 | ? parseInt(options.limit, 10) 77 | : undefined, 78 | cursor: options.page 79 | ? parseInt(options.page, 10) 80 | : undefined, 81 | topic: options.topic, 82 | }; 83 | 84 | actionLogger.debug( 85 | 'Calling controller with parameters:', 86 | controllerOptions, 87 | ); 88 | 89 | // Call controller directly 90 | const result = 91 | await diffController.branchDiff(controllerOptions); 92 | 93 | console.log(result.content); 94 | } catch (error) { 95 | actionLogger.error('Operation failed:', error); 96 | handleCliError(error); 97 | } 98 | }); 99 | 100 | // Commit diff command 101 | program 102 | .command('diff-commits') 103 | .description( 104 | 'Display differences between two commits in a repository.\nIMPORTANT: For proper results, the parameter order can matter. If you see "No changes detected", try reversing the commit order.', 105 | ) 106 | .option( 107 | '-w, --workspace-slug <workspaceSlug>', 108 | 'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account).', 109 | ) 110 | .requiredOption( 111 | '-r, --repo-slug <repoSlug>', 112 | 'Repository slug where the commits are located. Example: "my-repo"', 113 | ) 114 | .requiredOption( 115 | '-s, --since-commit <sinceCommit>', 116 | 'Commit hash for the newer/later commit. Example: "a1b2c3d4"', 117 | ) 118 | .requiredOption( 119 | '-u, --until-commit <untilCommit>', 120 | 'Commit hash for the older/earlier commit. Example: "e5f6g7h8"', 121 | ) 122 | .option( 123 | '--full-diff <boolean>', 124 | 'Whether to include the full diff in the response. Defaults to true.', 125 | (value) => value === 'true', 126 | ) 127 | .option( 128 | '-l, --limit <number>', 129 | 'Maximum number of files to show in the diff (1-100). Defaults to 25 if omitted.', 130 | ) 131 | .option( 132 | '-p, --page <number>', 133 | 'Page number for pagination. Starts at 1. Use with limit to paginate results.', 134 | ) 135 | .action(async (options) => { 136 | const actionLogger = cliLogger.forMethod('diff-commits'); 137 | try { 138 | actionLogger.debug('Processing command options:', options); 139 | 140 | // Map CLI options to controller params - keep only type conversions 141 | const controllerOptions = { 142 | workspaceSlug: options.workspaceSlug, 143 | repoSlug: options.repoSlug, 144 | sinceCommit: options.sinceCommit, 145 | untilCommit: options.untilCommit, 146 | includeFullDiff: 147 | options.fullDiff !== undefined 148 | ? options.fullDiff 149 | : true, 150 | limit: options.limit 151 | ? parseInt(options.limit, 10) 152 | : undefined, 153 | cursor: options.page 154 | ? parseInt(options.page, 10) 155 | : undefined, 156 | }; 157 | 158 | actionLogger.debug( 159 | 'Calling controller with parameters:', 160 | controllerOptions, 161 | ); 162 | 163 | // Call controller directly 164 | const result = 165 | await diffController.commitDiff(controllerOptions); 166 | 167 | console.log(result.content); 168 | } catch (error) { 169 | actionLogger.error('Operation failed:', error); 170 | handleCliError(error); 171 | } 172 | }); 173 | 174 | registerLogger.debug('CLI commands registered successfully'); 175 | } 176 | 177 | export default { register }; 178 | ``` -------------------------------------------------------------------------------- /src/utils/transport.util.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { getAtlassianCredentials, fetchAtlassian } from './transport.util.js'; 2 | import { config } from './config.util.js'; 3 | 4 | /** 5 | * Generic response type for testing 6 | */ 7 | interface TestResponse { 8 | values: Array<Record<string, unknown>>; 9 | next?: string; 10 | total?: number; 11 | } 12 | 13 | // NOTE: We are no longer mocking fetch or logger, using real implementations instead 14 | 15 | describe('Transport Utility', () => { 16 | // Load configuration before all tests 17 | beforeAll(() => { 18 | // Load configuration from all sources 19 | config.load(); 20 | }); 21 | 22 | describe('getAtlassianCredentials', () => { 23 | it('should return credentials when environment variables are set', () => { 24 | // This test will be skipped if credentials are not available 25 | const credentials = getAtlassianCredentials(); 26 | if (!credentials) { 27 | return; // Skip silently - no credentials available for testing 28 | } 29 | 30 | // Check if the credentials are for standard Atlassian or Bitbucket-specific 31 | if (credentials.useBitbucketAuth) { 32 | // Verify the Bitbucket-specific credentials 33 | expect(credentials).toHaveProperty('bitbucketUsername'); 34 | expect(credentials).toHaveProperty('bitbucketAppPassword'); 35 | expect(credentials).toHaveProperty('useBitbucketAuth'); 36 | 37 | // Verify the credentials are not empty 38 | expect(credentials.bitbucketUsername).toBeTruthy(); 39 | expect(credentials.bitbucketAppPassword).toBeTruthy(); 40 | expect(credentials.useBitbucketAuth).toBe(true); 41 | } else { 42 | // Verify the standard Atlassian credentials 43 | expect(credentials).toHaveProperty('userEmail'); 44 | expect(credentials).toHaveProperty('apiToken'); 45 | 46 | // Verify the credentials are not empty 47 | expect(credentials.userEmail).toBeTruthy(); 48 | expect(credentials.apiToken).toBeTruthy(); 49 | // Note: siteName is optional for API tokens 50 | } 51 | }); 52 | 53 | it('should return null and log a warning when environment variables are missing', () => { 54 | // Store original environment variables 55 | const originalEnv = { ...process.env }; 56 | 57 | // Clear relevant environment variables to simulate missing credentials 58 | delete process.env.ATLASSIAN_SITE_NAME; 59 | delete process.env.ATLASSIAN_USER_EMAIL; 60 | delete process.env.ATLASSIAN_API_TOKEN; 61 | delete process.env.ATLASSIAN_BITBUCKET_USERNAME; 62 | delete process.env.ATLASSIAN_BITBUCKET_APP_PASSWORD; 63 | 64 | // Force reload configuration 65 | config.load(); 66 | 67 | // Call the function 68 | const credentials = getAtlassianCredentials(); 69 | 70 | // Verify the result is null 71 | expect(credentials).toBeNull(); 72 | 73 | // Restore original environment 74 | process.env = originalEnv; 75 | 76 | // Reload config with original environment 77 | config.load(); 78 | }); 79 | }); 80 | 81 | describe('fetchAtlassian', () => { 82 | it('should successfully fetch data from the Atlassian API', async () => { 83 | // This test will be skipped if credentials are not available 84 | const credentials = getAtlassianCredentials(); 85 | if (!credentials) { 86 | return; // Skip silently - no credentials available for testing 87 | } 88 | 89 | // Make a call to a real API endpoint 90 | // For Bitbucket, we'll use the workspaces endpoint 91 | const result = await fetchAtlassian<TestResponse>( 92 | credentials, 93 | '/2.0/workspaces', 94 | { 95 | method: 'GET', 96 | headers: { 97 | 'Content-Type': 'application/json', 98 | }, 99 | }, 100 | ); 101 | 102 | // Verify the response structure from real API 103 | expect(result).toHaveProperty('values'); 104 | expect(Array.isArray(result.values)).toBe(true); 105 | // Different property names than mocked data to match actual API response 106 | if (result.values.length > 0) { 107 | // Verify an actual workspace result 108 | const workspace = result.values[0]; 109 | expect(workspace).toHaveProperty('uuid'); 110 | expect(workspace).toHaveProperty('name'); 111 | expect(workspace).toHaveProperty('slug'); 112 | } 113 | }, 15000); // Increased timeout for real API call 114 | 115 | it('should handle API errors correctly', async () => { 116 | // This test will be skipped if credentials are not available 117 | const credentials = getAtlassianCredentials(); 118 | if (!credentials) { 119 | return; // Skip silently - no credentials available for testing 120 | } 121 | 122 | // Call a non-existent endpoint and expect it to throw 123 | await expect( 124 | fetchAtlassian(credentials, '/2.0/non-existent-endpoint'), 125 | ).rejects.toThrow(); 126 | }, 15000); // Increased timeout for real API call 127 | 128 | it('should normalize paths that do not start with a slash', async () => { 129 | // This test will be skipped if credentials are not available 130 | const credentials = getAtlassianCredentials(); 131 | if (!credentials) { 132 | return; // Skip silently - no credentials available for testing 133 | } 134 | 135 | // Call the function with a path that doesn't start with a slash 136 | const result = await fetchAtlassian<TestResponse>( 137 | credentials, 138 | '2.0/workspaces', 139 | { 140 | method: 'GET', 141 | }, 142 | ); 143 | 144 | // Verify the response structure from real API 145 | expect(result).toHaveProperty('values'); 146 | expect(Array.isArray(result.values)).toBe(true); 147 | }, 15000); // Increased timeout for real API call 148 | 149 | it('should support custom request options', async () => { 150 | // This test will be skipped if credentials are not available 151 | const credentials = getAtlassianCredentials(); 152 | if (!credentials) { 153 | return; // Skip silently - no credentials available for testing 154 | } 155 | 156 | // Custom request options with pagination 157 | const options = { 158 | method: 'GET' as const, 159 | headers: { 160 | Accept: 'application/json', 161 | 'Content-Type': 'application/json', 162 | }, 163 | }; 164 | 165 | // Call a real endpoint with pagination parameter 166 | const result = await fetchAtlassian<TestResponse>( 167 | credentials, 168 | '/2.0/workspaces?pagelen=1', 169 | options, 170 | ); 171 | 172 | // Verify the response structure from real API 173 | expect(result).toHaveProperty('values'); 174 | expect(Array.isArray(result.values)).toBe(true); 175 | expect(result.values.length).toBeLessThanOrEqual(1); // Should respect pagelen=1 176 | }, 15000); // Increased timeout for real API call 177 | }); 178 | }); 179 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.diff.formatter.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { DiffstatResponse } from '../services/vendor.atlassian.repositories.diff.types.js'; 2 | import { 3 | formatHeading, 4 | formatSeparator, 5 | formatDate, 6 | formatDiff, 7 | } from '../utils/formatter.util.js'; 8 | 9 | /** 10 | * Format diffstat results into Markdown 11 | * 12 | * @param diffstat - Diffstat response containing file changes 13 | * @param baseBranchOrCommit - Name of the base branch or commit (source of the diff) 14 | * @param targetBranchOrCommit - Name of the target branch or commit (destination of the diff) 15 | * @returns Formatted Markdown string 16 | */ 17 | export function formatDiffstat( 18 | diffstat: DiffstatResponse, 19 | baseBranchOrCommit: string, 20 | targetBranchOrCommit: string, 21 | ): string { 22 | const lines: string[] = []; 23 | const files = diffstat.values || []; 24 | 25 | // Title section 26 | lines.push( 27 | formatHeading( 28 | `Diff: ${baseBranchOrCommit} → ${targetBranchOrCommit}`, 29 | 1, 30 | ), 31 | ); 32 | lines.push(''); 33 | 34 | if (files.length === 0) { 35 | lines.push( 36 | '*No changes detected in the diffstat response. This might occur when:*', 37 | ); 38 | lines.push('- The commits or branches are identical'); 39 | lines.push( 40 | '- The changes are purely structural (e.g., merge commits without content changes)', 41 | ); 42 | lines.push( 43 | '- The parameters need to be specified in a different order', 44 | ); 45 | lines.push(''); 46 | lines.push('**Try the following:**'); 47 | lines.push( 48 | '1. For branch comparisons: Reverse the source and destination branch parameters', 49 | ); 50 | lines.push( 51 | '2. For commit comparisons: Ensure newer commit is `sinceCommit` and older commit is `untilCommit`', 52 | ); 53 | lines.push( 54 | '3. Check that both references exist and you have access to them', 55 | ); 56 | lines.push(''); 57 | lines.push(formatSeparator()); 58 | lines.push(`*Information retrieved at: ${formatDate(new Date())}*`); 59 | return lines.join('\n'); 60 | } 61 | 62 | // Summary statistics 63 | let totalAdditions = 0; 64 | let totalDeletions = 0; 65 | let conflictedFiles = 0; 66 | 67 | // Collect statistics 68 | files.forEach((file) => { 69 | if (file.lines_added) totalAdditions += file.lines_added; 70 | if (file.lines_removed) totalDeletions += file.lines_removed; 71 | if (file.status === 'merge conflict') conflictedFiles++; 72 | }); 73 | 74 | lines.push(formatHeading('Summary', 2)); 75 | lines.push( 76 | `${files.length} file${files.length !== 1 ? 's' : ''} changed with ${totalAdditions} insertion${totalAdditions !== 1 ? 's' : ''} and ${totalDeletions} deletion${totalDeletions !== 1 ? 's' : ''}.`, 77 | ); 78 | 79 | if (conflictedFiles > 0) { 80 | lines.push(''); 81 | lines.push( 82 | `⚠️ **Merge conflicts detected in ${conflictedFiles} file${conflictedFiles !== 1 ? 's' : ''}.**`, 83 | ); 84 | } 85 | 86 | lines.push(''); 87 | 88 | // File changes section (limit to a reasonable number, like 20) 89 | const maxFilesToShow = 20; 90 | const hasMoreFiles = files.length > maxFilesToShow; 91 | const filesToDisplay = files.slice(0, maxFilesToShow); 92 | 93 | // File list with changes 94 | lines.push(formatHeading('Files Changed', 2)); 95 | lines.push(''); 96 | 97 | filesToDisplay.forEach((file) => { 98 | const changes = []; 99 | if (file.lines_added) changes.push(`+${file.lines_added}`); 100 | if (file.lines_removed) changes.push(`-${file.lines_removed}`); 101 | const changeStr = changes.length > 0 ? ` (${changes.join(', ')})` : ''; 102 | 103 | // Handle potentially null old/new paths 104 | const filePath = file.new?.path || file.old?.path || '(unnamed file)'; 105 | 106 | // Show path, changes, and status if it's a conflict 107 | let line = `- \`${filePath}\`${changeStr}`; 108 | if (file.status === 'merge conflict') { 109 | line += ' **CONFLICT**'; 110 | } 111 | lines.push(line); 112 | }); 113 | 114 | if (hasMoreFiles) { 115 | lines.push(''); 116 | lines.push(`... and ${files.length - maxFilesToShow} more files`); 117 | } 118 | 119 | // Standard footer 120 | lines.push(''); 121 | lines.push(formatSeparator()); 122 | lines.push(`*Information retrieved at: ${formatDate(new Date())}*`); 123 | 124 | return lines.join('\n'); 125 | } 126 | 127 | /** 128 | * Format complete diff results, including diffstat summary and raw diff 129 | * 130 | * @param diffstat - Diffstat response containing file changes 131 | * @param rawDiff - Raw unified diff text 132 | * @param baseBranchOrCommit - Name of the base branch or commit 133 | * @param targetBranchOrCommit - Name of the target branch or commit 134 | * @returns Formatted Markdown string 135 | */ 136 | export function formatFullDiff( 137 | diffstat: DiffstatResponse, 138 | rawDiff: string, 139 | baseBranchOrCommit: string, 140 | targetBranchOrCommit: string, 141 | ): string { 142 | const diffstatMd = formatDiffstat( 143 | diffstat, 144 | baseBranchOrCommit, 145 | targetBranchOrCommit, 146 | ); 147 | 148 | // If there's a raw diff but empty diffstat, we should still show the raw diff 149 | // This can happen with structural changes like merges 150 | if ( 151 | rawDiff && 152 | rawDiff.trim() !== '' && 153 | (diffstat.values || []).length === 0 154 | ) { 155 | const lines = diffstatMd.split('\n'); 156 | 157 | // Replace the "No changes detected" message with a more accurate one 158 | const messageStartIndex = lines.findIndex((line) => 159 | line.includes('*No changes detected'), 160 | ); 161 | if (messageStartIndex >= 0) { 162 | lines.splice( 163 | messageStartIndex, 164 | 6, 165 | '*No file changes in diffstat but raw diff content was found. This often happens with:*', 166 | '- Merge commits or trivial merges', 167 | '- Rename-only changes without content modifications', 168 | '- Changes to file metadata or permissions without content changes', 169 | '', 170 | 'If the diff content below is not what you expected, try reversing the parameter order:', 171 | '- For branch comparisons: swap source and destination branch values', 172 | '- For commit comparisons: swap sinceCommit and untilCommit values', 173 | ); 174 | } 175 | 176 | // Insert section heading for the raw diff before the footer 177 | const separatorIndex = lines.findIndex((line) => line.includes('---')); 178 | if (separatorIndex >= 0) { 179 | lines.splice( 180 | separatorIndex, 181 | 0, 182 | '', 183 | formatHeading('Raw Diff Content', 2), 184 | '', 185 | ); 186 | lines.splice(separatorIndex + 3, 0, formatDiff(rawDiff)); 187 | } 188 | 189 | return lines.join('\n'); 190 | } 191 | 192 | if (!rawDiff || rawDiff.trim() === '') { 193 | return diffstatMd; 194 | } 195 | 196 | const lines = diffstatMd.split('\n'); 197 | 198 | // Insert section heading for the raw diff 199 | // Insert before the standard footer (which is the last 2 lines) 200 | lines.splice(lines.length - 3, 0, '', formatHeading('Code Changes', 2), ''); 201 | lines.splice(lines.length - 3, 0, formatDiff(rawDiff)); 202 | 203 | return lines.join('\n'); 204 | } 205 | ``` -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- ```typescript 1 | #!/usr/bin/env node 2 | import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; 3 | import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; 4 | import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; 5 | import { Logger } from './utils/logger.util.js'; 6 | import { config } from './utils/config.util.js'; 7 | import { VERSION, PACKAGE_NAME } from './utils/constants.util.js'; 8 | import { runCli } from './cli/index.js'; 9 | import type { Request, Response } from 'express'; 10 | import express from 'express'; 11 | import cors from 'cors'; 12 | 13 | // Import Bitbucket-specific tools 14 | import atlassianWorkspaces from './tools/atlassian.workspaces.tool.js'; 15 | import atlassianRepositories from './tools/atlassian.repositories.tool.js'; 16 | import atlassianPullRequests from './tools/atlassian.pullrequests.tool.js'; 17 | import atlassianSearch from './tools/atlassian.search.tool.js'; 18 | import atlassianDiff from './tools/atlassian.diff.tool.js'; 19 | 20 | // Create a contextualized logger for this file 21 | const indexLogger = Logger.forContext('index.ts'); 22 | 23 | // Log initialization at debug level 24 | indexLogger.debug('Bitbucket MCP server module loaded'); 25 | 26 | let serverInstance: McpServer | null = null; 27 | let transportInstance: 28 | | StreamableHTTPServerTransport 29 | | StdioServerTransport 30 | | null = null; 31 | 32 | /** 33 | * Start the MCP server with the specified transport mode 34 | * 35 | * @param mode The transport mode to use (stdio or http) 36 | * @returns Promise that resolves to the server instance when started successfully 37 | */ 38 | export async function startServer( 39 | mode: 'stdio' | 'http' = 'stdio', 40 | ): Promise<McpServer> { 41 | const serverLogger = Logger.forContext('index.ts', 'startServer'); 42 | 43 | // Load configuration 44 | serverLogger.info('Starting MCP server initialization...'); 45 | config.load(); 46 | 47 | if (config.getBoolean('DEBUG')) { 48 | serverLogger.debug('Debug mode enabled'); 49 | } 50 | 51 | serverLogger.info(`Initializing Bitbucket MCP server v${VERSION}`); 52 | serverInstance = new McpServer({ 53 | name: PACKAGE_NAME, 54 | version: VERSION, 55 | }); 56 | 57 | // Register all tools 58 | serverLogger.info('Registering MCP tools...'); 59 | atlassianWorkspaces.registerTools(serverInstance); 60 | atlassianRepositories.registerTools(serverInstance); 61 | atlassianPullRequests.registerTools(serverInstance); 62 | atlassianSearch.registerTools(serverInstance); 63 | atlassianDiff.registerTools(serverInstance); 64 | serverLogger.info('All tools registered successfully'); 65 | 66 | if (mode === 'stdio') { 67 | // STDIO Transport 68 | serverLogger.info('Using STDIO transport for MCP communication'); 69 | transportInstance = new StdioServerTransport(); 70 | 71 | try { 72 | await serverInstance.connect(transportInstance); 73 | serverLogger.info( 74 | 'MCP server started successfully on STDIO transport', 75 | ); 76 | setupGracefulShutdown(); 77 | return serverInstance; 78 | } catch (err) { 79 | serverLogger.error( 80 | 'Failed to start server on STDIO transport', 81 | err, 82 | ); 83 | process.exit(1); 84 | } 85 | } else { 86 | // HTTP Transport with Express 87 | serverLogger.info( 88 | 'Using Streamable HTTP transport for MCP communication', 89 | ); 90 | 91 | const app = express(); 92 | app.use(cors()); 93 | app.use(express.json()); 94 | 95 | const mcpEndpoint = '/mcp'; 96 | serverLogger.debug(`MCP endpoint: ${mcpEndpoint}`); 97 | 98 | // Create transport instance 99 | const transport = new StreamableHTTPServerTransport({ 100 | sessionIdGenerator: undefined, 101 | }); 102 | 103 | // Connect server to transport 104 | await serverInstance.connect(transport); 105 | transportInstance = transport; 106 | 107 | // Handle all MCP requests 108 | app.all(mcpEndpoint, (req: Request, res: Response) => { 109 | transport 110 | .handleRequest(req, res, req.body) 111 | .catch((err: unknown) => { 112 | serverLogger.error('Error in transport.handleRequest', err); 113 | if (!res.headersSent) { 114 | res.status(500).json({ 115 | error: 'Internal Server Error', 116 | }); 117 | } 118 | }); 119 | }); 120 | 121 | // Health check endpoint 122 | app.get('/', (_req: Request, res: Response) => { 123 | res.send(`Bitbucket MCP Server v${VERSION} is running`); 124 | }); 125 | 126 | // Start HTTP server 127 | const PORT = Number(process.env.PORT ?? 3000); 128 | await new Promise<void>((resolve) => { 129 | app.listen(PORT, () => { 130 | serverLogger.info( 131 | `HTTP transport listening on http://localhost:${PORT}${mcpEndpoint}`, 132 | ); 133 | resolve(); 134 | }); 135 | }); 136 | 137 | setupGracefulShutdown(); 138 | return serverInstance; 139 | } 140 | } 141 | 142 | /** 143 | * Main entry point - this will run when executed directly 144 | * Determines whether to run in CLI or server mode based on command-line arguments 145 | */ 146 | async function main() { 147 | const mainLogger = Logger.forContext('index.ts', 'main'); 148 | 149 | // Load configuration 150 | config.load(); 151 | 152 | // CLI mode - if any arguments are provided 153 | if (process.argv.length > 2) { 154 | mainLogger.info('Starting in CLI mode'); 155 | await runCli(process.argv.slice(2)); 156 | mainLogger.info('CLI execution completed'); 157 | return; 158 | } 159 | 160 | // Server mode - determine transport 161 | const transportMode = (process.env.TRANSPORT_MODE || 'stdio').toLowerCase(); 162 | let mode: 'http' | 'stdio'; 163 | 164 | if (transportMode === 'stdio') { 165 | mode = 'stdio'; 166 | } else if (transportMode === 'http') { 167 | mode = 'http'; 168 | } else { 169 | mainLogger.warn( 170 | `Unknown TRANSPORT_MODE "${transportMode}", defaulting to stdio`, 171 | ); 172 | mode = 'stdio'; 173 | } 174 | 175 | mainLogger.info(`Starting server with ${mode.toUpperCase()} transport`); 176 | await startServer(mode); 177 | mainLogger.info('Server is now running'); 178 | } 179 | 180 | /** 181 | * Set up graceful shutdown handlers for the server 182 | */ 183 | function setupGracefulShutdown() { 184 | const shutdownLogger = Logger.forContext('index.ts', 'shutdown'); 185 | 186 | const shutdown = async () => { 187 | try { 188 | shutdownLogger.info('Shutting down gracefully...'); 189 | 190 | if ( 191 | transportInstance && 192 | 'close' in transportInstance && 193 | typeof transportInstance.close === 'function' 194 | ) { 195 | await transportInstance.close(); 196 | } 197 | 198 | if (serverInstance && typeof serverInstance.close === 'function') { 199 | await serverInstance.close(); 200 | } 201 | 202 | process.exit(0); 203 | } catch (err) { 204 | shutdownLogger.error('Error during shutdown', err); 205 | process.exit(1); 206 | } 207 | }; 208 | 209 | ['SIGINT', 'SIGTERM'].forEach((signal) => { 210 | process.on(signal as NodeJS.Signals, shutdown); 211 | }); 212 | } 213 | 214 | // If this file is being executed directly (not imported), run the main function 215 | if (require.main === module) { 216 | main().catch((err) => { 217 | indexLogger.error('Unhandled error in main process', err); 218 | process.exit(1); 219 | }); 220 | } 221 | ``` -------------------------------------------------------------------------------- /src/services/vendor.atlassian.workspaces.service.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { z } from 'zod'; 2 | import { 3 | createAuthMissingError, 4 | createApiError, 5 | McpError, 6 | } from '../utils/error.util.js'; 7 | import { Logger } from '../utils/logger.util.js'; 8 | import { 9 | fetchAtlassian, 10 | getAtlassianCredentials, 11 | } from '../utils/transport.util.js'; 12 | import { 13 | WorkspaceDetailedSchema, 14 | WorkspacePermissionsResponseSchema, 15 | ListWorkspacesParamsSchema, 16 | type ListWorkspacesParams, 17 | } from './vendor.atlassian.workspaces.types.js'; 18 | 19 | /** 20 | * Base API path for Bitbucket REST API v2 21 | * @see https://developer.atlassian.com/cloud/bitbucket/rest/api-group-workspaces/ 22 | * @constant {string} 23 | */ 24 | const API_PATH = '/2.0'; 25 | 26 | /** 27 | * @namespace VendorAtlassianWorkspacesService 28 | * @description Service for interacting with Bitbucket Workspaces API. 29 | * Provides methods for listing workspaces and retrieving workspace details. 30 | * All methods require valid Atlassian credentials configured in the environment. 31 | */ 32 | 33 | // Create a contextualized logger for this file 34 | const serviceLogger = Logger.forContext( 35 | 'services/vendor.atlassian.workspaces.service.ts', 36 | ); 37 | 38 | // Log service initialization 39 | serviceLogger.debug('Bitbucket workspaces service initialized'); 40 | 41 | /** 42 | * List Bitbucket workspaces with optional filtering and pagination 43 | * 44 | * Retrieves a list of workspaces from Bitbucket with support for various filters 45 | * and pagination options. 46 | * 47 | * NOTE: The /2.0/user/permissions/workspaces endpoint does not support sorting, 48 | * despite the ListWorkspacesParams type including a sort parameter. 49 | * 50 | * @async 51 | * @memberof VendorAtlassianWorkspacesService 52 | * @param {ListWorkspacesParams} [params={}] - Optional parameters for customizing the request 53 | * @param {string} [params.q] - Filter by workspace name 54 | * @param {number} [params.page] - Page number 55 | * @param {number} [params.pagelen] - Number of items per page 56 | * @returns {Promise<z.infer<typeof WorkspacePermissionsResponseSchema>>} Promise containing the validated workspaces response 57 | * @throws {McpError} If validation fails, credentials are missing, or API request fails 58 | * @example 59 | * // List workspaces with pagination 60 | * const response = await list({ 61 | * pagelen: 10 62 | * }); 63 | */ 64 | async function list( 65 | params: ListWorkspacesParams = {}, 66 | ): Promise<z.infer<typeof WorkspacePermissionsResponseSchema>> { 67 | const methodLogger = Logger.forContext( 68 | 'services/vendor.atlassian.workspaces.service.ts', 69 | 'list', 70 | ); 71 | methodLogger.debug('Listing Bitbucket workspaces with params:', params); 72 | 73 | // Validate params with Zod 74 | try { 75 | ListWorkspacesParamsSchema.parse(params); 76 | } catch (error) { 77 | if (error instanceof z.ZodError) { 78 | methodLogger.error( 79 | 'Invalid parameters provided to list workspaces:', 80 | error.format(), 81 | ); 82 | throw createApiError( 83 | `Invalid parameters for listing workspaces: ${error.issues.map((e) => e.message).join(', ')}`, 84 | 400, 85 | error, 86 | ); 87 | } 88 | throw error; 89 | } 90 | 91 | const credentials = getAtlassianCredentials(); 92 | if (!credentials) { 93 | throw createAuthMissingError( 94 | 'Atlassian credentials are required for this operation', 95 | ); 96 | } 97 | 98 | // Build query parameters 99 | const queryParams = new URLSearchParams(); 100 | 101 | // Add optional query parameters if provided 102 | // NOTE: Sort is intentionally not included as the /2.0/user/permissions/workspaces endpoint 103 | // does not support sorting on any field 104 | if (params.q) { 105 | queryParams.set('q', params.q); 106 | } 107 | if (params.pagelen) { 108 | queryParams.set('pagelen', params.pagelen.toString()); 109 | } 110 | if (params.page) { 111 | queryParams.set('page', params.page.toString()); 112 | } 113 | 114 | const queryString = queryParams.toString() 115 | ? `?${queryParams.toString()}` 116 | : ''; 117 | const path = `${API_PATH}/user/permissions/workspaces${queryString}`; 118 | 119 | methodLogger.debug(`Sending request to: ${path}`); 120 | try { 121 | const rawData = await fetchAtlassian(credentials, path); 122 | // Validate response with Zod schema 123 | try { 124 | const validatedData = 125 | WorkspacePermissionsResponseSchema.parse(rawData); 126 | return validatedData; 127 | } catch (error) { 128 | if (error instanceof z.ZodError) { 129 | methodLogger.error( 130 | 'Invalid response from Bitbucket API:', 131 | error.format(), 132 | ); 133 | throw createApiError( 134 | `Invalid response format from Bitbucket API for workspace list: ${error.message}`, 135 | 500, 136 | error, 137 | ); 138 | } 139 | throw error; 140 | } 141 | } catch (error) { 142 | if (error instanceof McpError) { 143 | throw error; 144 | } 145 | throw createApiError( 146 | `Failed to list workspaces: ${error instanceof Error ? error.message : String(error)}`, 147 | 500, 148 | error, 149 | ); 150 | } 151 | } 152 | 153 | /** 154 | * Get detailed information about a specific Bitbucket workspace 155 | * 156 | * Retrieves comprehensive details about a single workspace. 157 | * 158 | * @async 159 | * @memberof VendorAtlassianWorkspacesService 160 | * @param {string} workspace - The workspace slug 161 | * @returns {Promise<z.infer<typeof WorkspaceDetailedSchema>>} Promise containing the validated workspace information 162 | * @throws {McpError} If validation fails, credentials are missing, or API request fails 163 | * @example 164 | * // Get workspace details 165 | * const workspace = await get('my-workspace'); 166 | */ 167 | async function get( 168 | workspace: string, 169 | ): Promise<z.infer<typeof WorkspaceDetailedSchema>> { 170 | const methodLogger = Logger.forContext( 171 | 'services/vendor.atlassian.workspaces.service.ts', 172 | 'get', 173 | ); 174 | methodLogger.debug(`Getting Bitbucket workspace with slug: ${workspace}`); 175 | 176 | const credentials = getAtlassianCredentials(); 177 | if (!credentials) { 178 | throw createAuthMissingError( 179 | 'Atlassian credentials are required for this operation', 180 | ); 181 | } 182 | 183 | // Currently no query parameters for workspace details API 184 | const path = `${API_PATH}/workspaces/${workspace}`; 185 | 186 | methodLogger.debug(`Sending request to: ${path}`); 187 | try { 188 | const rawData = await fetchAtlassian(credentials, path); 189 | // Validate response with Zod schema 190 | try { 191 | const validatedData = WorkspaceDetailedSchema.parse(rawData); 192 | return validatedData; 193 | } catch (error) { 194 | if (error instanceof z.ZodError) { 195 | methodLogger.error( 196 | 'Invalid response from Bitbucket API:', 197 | error.format(), 198 | ); 199 | throw createApiError( 200 | `Invalid response format from Bitbucket API for workspace details: ${error.message}`, 201 | 500, 202 | error, 203 | ); 204 | } 205 | throw error; 206 | } 207 | } catch (error) { 208 | if (error instanceof McpError) { 209 | throw error; 210 | } 211 | throw createApiError( 212 | `Failed to get workspace details: ${error instanceof Error ? error.message : String(error)}`, 213 | 500, 214 | error, 215 | ); 216 | } 217 | } 218 | 219 | export default { list, get }; 220 | ``` -------------------------------------------------------------------------------- /src/utils/error.util.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { describe, expect, test } from '@jest/globals'; 2 | import { 3 | ErrorType, 4 | McpError, 5 | createApiError, 6 | createAuthMissingError, 7 | createAuthInvalidError, 8 | createUnexpectedError, 9 | ensureMcpError, 10 | formatErrorForMcpTool, 11 | formatErrorForMcpResource, 12 | getDeepOriginalError, 13 | } from './error.util.js'; 14 | 15 | describe('Error Utilities', () => { 16 | describe('Error creation functions', () => { 17 | test('createAuthMissingError creates an error with AUTH_MISSING type', () => { 18 | const error = createAuthMissingError('Missing credentials'); 19 | expect(error).toBeInstanceOf(McpError); 20 | expect(error.type).toBe(ErrorType.AUTH_MISSING); 21 | expect(error.message).toBe('Missing credentials'); 22 | expect(error.statusCode).toBeUndefined(); 23 | }); 24 | 25 | test('createAuthInvalidError creates an error with AUTH_INVALID type and 401 status', () => { 26 | const error = createAuthInvalidError('Invalid token'); 27 | expect(error).toBeInstanceOf(McpError); 28 | expect(error.type).toBe(ErrorType.AUTH_INVALID); 29 | expect(error.message).toBe('Invalid token'); 30 | expect(error.statusCode).toBe(401); 31 | }); 32 | 33 | test('createApiError creates an error with API_ERROR type and specified status', () => { 34 | const error = createApiError('Not found', 404, { 35 | details: 'Resource missing', 36 | }); 37 | expect(error).toBeInstanceOf(McpError); 38 | expect(error.type).toBe(ErrorType.API_ERROR); 39 | expect(error.message).toBe('Not found'); 40 | expect(error.statusCode).toBe(404); 41 | expect(error.originalError).toEqual({ 42 | details: 'Resource missing', 43 | }); 44 | }); 45 | 46 | test('createUnexpectedError creates an error with UNEXPECTED_ERROR type', () => { 47 | const originalError = new Error('Original error'); 48 | const error = createUnexpectedError( 49 | 'Something went wrong', 50 | originalError, 51 | ); 52 | expect(error).toBeInstanceOf(McpError); 53 | expect(error.type).toBe(ErrorType.UNEXPECTED_ERROR); 54 | expect(error.message).toBe('Something went wrong'); 55 | expect(error.statusCode).toBeUndefined(); 56 | expect(error.originalError).toBe(originalError); 57 | }); 58 | }); 59 | 60 | describe('ensureMcpError function', () => { 61 | test('returns the error if it is already an McpError', () => { 62 | const error = createApiError('API error', 500); 63 | expect(ensureMcpError(error)).toBe(error); 64 | }); 65 | 66 | test('wraps a standard Error with McpError', () => { 67 | const stdError = new Error('Standard error'); 68 | const mcpError = ensureMcpError(stdError); 69 | expect(mcpError).toBeInstanceOf(McpError); 70 | expect(mcpError.message).toBe('Standard error'); 71 | expect(mcpError.type).toBe(ErrorType.UNEXPECTED_ERROR); 72 | expect(mcpError.originalError).toBe(stdError); 73 | }); 74 | 75 | test('wraps a string with McpError', () => { 76 | const mcpError = ensureMcpError('Error message'); 77 | expect(mcpError).toBeInstanceOf(McpError); 78 | expect(mcpError.message).toBe('Error message'); 79 | expect(mcpError.type).toBe(ErrorType.UNEXPECTED_ERROR); 80 | }); 81 | 82 | test('wraps other types with McpError', () => { 83 | const mcpError = ensureMcpError({ message: 'Object error' }); 84 | expect(mcpError).toBeInstanceOf(McpError); 85 | expect(mcpError.message).toBe('[object Object]'); 86 | expect(mcpError.type).toBe(ErrorType.UNEXPECTED_ERROR); 87 | }); 88 | }); 89 | 90 | describe('getDeepOriginalError function', () => { 91 | test('returns the deepest error in a chain', () => { 92 | const deepestError = { message: 'Root cause' }; 93 | const level3 = createApiError('Level 3', 500, deepestError); 94 | const level2 = createApiError('Level 2', 500, level3); 95 | const level1 = createApiError('Level 1', 500, level2); 96 | 97 | expect(getDeepOriginalError(level1)).toEqual(deepestError); 98 | }); 99 | 100 | test('handles non-McpError values', () => { 101 | const originalValue = 'Original error text'; 102 | expect(getDeepOriginalError(originalValue)).toBe(originalValue); 103 | }); 104 | 105 | test('stops traversing at maximum depth', () => { 106 | // Create a circular error chain that would cause infinite recursion 107 | const circular1: any = new McpError( 108 | 'Circular 1', 109 | ErrorType.API_ERROR, 110 | ); 111 | const circular2: any = new McpError( 112 | 'Circular 2', 113 | ErrorType.API_ERROR, 114 | ); 115 | circular1.originalError = circular2; 116 | circular2.originalError = circular1; 117 | 118 | // Should not cause infinite recursion 119 | const result = getDeepOriginalError(circular1); 120 | 121 | // Expect either circular1 or circular2 depending on max depth 122 | expect([circular1, circular2]).toContain(result); 123 | }); 124 | }); 125 | 126 | describe('formatErrorForMcpTool function', () => { 127 | test('formats an McpError for MCP tool response', () => { 128 | const originalError = { 129 | code: 'NOT_FOUND', 130 | message: 'Repository does not exist', 131 | }; 132 | const error = createApiError( 133 | 'Resource not found', 134 | 404, 135 | originalError, 136 | ); 137 | 138 | const formatted = formatErrorForMcpTool(error); 139 | 140 | expect(formatted).toHaveProperty('content'); 141 | expect(formatted.content[0].type).toBe('text'); 142 | expect(formatted.content[0].text).toBe('Error: Resource not found'); 143 | 144 | expect(formatted).toHaveProperty('metadata'); 145 | expect(formatted.metadata?.errorType).toBe(ErrorType.API_ERROR); 146 | expect(formatted.metadata?.statusCode).toBe(404); 147 | expect(formatted.metadata?.errorDetails).toEqual(originalError); 148 | }); 149 | 150 | test('formats a non-McpError for MCP tool response', () => { 151 | const error = new Error('Standard error'); 152 | 153 | const formatted = formatErrorForMcpTool(error); 154 | 155 | expect(formatted).toHaveProperty('content'); 156 | expect(formatted.content[0].type).toBe('text'); 157 | expect(formatted.content[0].text).toBe('Error: Standard error'); 158 | 159 | expect(formatted).toHaveProperty('metadata'); 160 | expect(formatted.metadata?.errorType).toBe( 161 | ErrorType.UNEXPECTED_ERROR, 162 | ); 163 | }); 164 | 165 | test('extracts detailed error information from nested errors', () => { 166 | const deepError = { 167 | message: 'API quota exceeded', 168 | type: 'RateLimitError', 169 | }; 170 | const midError = createApiError( 171 | 'Rate limit exceeded', 172 | 429, 173 | deepError, 174 | ); 175 | const topError = createApiError('API error', 429, midError); 176 | 177 | const formatted = formatErrorForMcpTool(topError); 178 | 179 | expect(formatted.content[0].text).toBe('Error: API error'); 180 | expect(formatted.metadata?.errorDetails).toEqual(deepError); 181 | }); 182 | }); 183 | 184 | describe('formatErrorForMcpResource', () => { 185 | it('should format an error for MCP resource response', () => { 186 | const error = createApiError('API error'); 187 | const response = formatErrorForMcpResource(error, 'test://uri'); 188 | 189 | expect(response).toHaveProperty('contents'); 190 | expect(response.contents).toHaveLength(1); 191 | expect(response.contents[0]).toHaveProperty('uri', 'test://uri'); 192 | expect(response.contents[0]).toHaveProperty( 193 | 'text', 194 | 'Error: API error', 195 | ); 196 | expect(response.contents[0]).toHaveProperty( 197 | 'mimeType', 198 | 'text/plain', 199 | ); 200 | expect(response.contents[0]).toHaveProperty( 201 | 'description', 202 | 'Error: API_ERROR', 203 | ); 204 | }); 205 | }); 206 | }); 207 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.search.formatter.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { ContentType } from '../utils/atlassian.util.js'; 2 | import { CodeSearchResult } from '../services/vendor.atlassian.search.service.js'; 3 | import { 4 | formatSeparator, 5 | formatDate, 6 | formatUrl, 7 | } from '../utils/formatter.util.js'; 8 | import path from 'path'; 9 | import { getContentTypeDisplay } from '../utils/atlassian.util.js'; 10 | 11 | /** 12 | * Try to guess the language from the file path 13 | */ 14 | function getLanguageHint(filePath: string): string { 15 | const ext = path.extname(filePath).toLowerCase(); 16 | const langMap: Record<string, string> = { 17 | '.js': 'javascript', 18 | '.jsx': 'jsx', 19 | '.ts': 'typescript', 20 | '.tsx': 'tsx', 21 | '.py': 'python', 22 | '.java': 'java', 23 | '.rb': 'ruby', 24 | '.php': 'php', 25 | '.cs': 'csharp', 26 | '.go': 'go', 27 | '.rs': 'rust', 28 | '.c': 'c', 29 | '.cpp': 'cpp', 30 | '.h': 'c', 31 | '.hpp': 'cpp', 32 | '.tf': 'terraform', 33 | '.hcl': 'hcl', 34 | '.sh': 'bash', 35 | '.zsh': 'zsh', 36 | '.json': 'json', 37 | '.yaml': 'yaml', 38 | '.yml': 'yaml', 39 | '.xml': 'xml', 40 | '.md': 'markdown', 41 | '.sql': 'sql', 42 | '.dockerfile': 'dockerfile', 43 | dockerfile: 'dockerfile', 44 | '.gitignore': 'gitignore', 45 | }; 46 | return langMap[ext] || ''; 47 | } 48 | 49 | /** 50 | * Format a single code search result into markdown 51 | * 52 | * @param result The code search result to format 53 | * @returns Formatted markdown string 54 | */ 55 | function formatCodeSearchResult(result: CodeSearchResult): string { 56 | // Format the file path - No highlighting needed here 57 | const filePath = result.file.path || 'Unknown File'; // <-- Use direct path 58 | 59 | // Fix the link text 60 | const fileLink = result.file.links?.self?.href 61 | ? formatUrl(result.file.links.self.href, filePath) // Use filePath for link text 62 | : filePath; 63 | 64 | // Build markdown output 65 | let markdown = `### ${fileLink}\n\n`; // Use fixed fileLink 66 | 67 | // Add match summary 68 | markdown += `${result.content_match_count} ${ 69 | result.content_match_count === 1 ? 'match' : 'matches' 70 | } found\n\n`; 71 | 72 | // Get language hint for code block 73 | const langHint = getLanguageHint(filePath); 74 | markdown += '```' + langHint + '\n'; // Add language hint 75 | 76 | // Process each content match 77 | result.content_matches.forEach((contentMatch) => { 78 | // Process each line in the content match 79 | contentMatch.lines.forEach((line) => { 80 | // Add line number 81 | markdown += `${line.line}: `; 82 | 83 | // Process segments (some may be highlighted matches) 84 | if (line.segments.length) { 85 | line.segments.forEach((segment) => { 86 | // Use standard bold markdown for highlighting 87 | markdown += segment.match 88 | ? `\\\`${segment.text}\\\`` // <-- Changed highlighting to backticks 89 | : segment.text; 90 | }); 91 | } 92 | 93 | markdown += '\n'; 94 | }); 95 | 96 | // Add space between match groups only if there are multiple lines shown 97 | if (contentMatch.lines.length > 1) { 98 | markdown += '\n'; 99 | } 100 | }); 101 | 102 | markdown += '```\n\n'; 103 | 104 | return markdown; 105 | } 106 | 107 | /** 108 | * Format code search results into markdown 109 | * 110 | * @param response The code search response from the API 111 | * @returns Markdown formatted string of code search results 112 | */ 113 | export function formatCodeSearchResults(searchResponse: { 114 | values?: CodeSearchResult[]; 115 | size: number; 116 | }): string { 117 | const results = searchResponse.values || []; 118 | 119 | if (!results || results.length === 0) { 120 | // Add standard footer even for empty state 121 | return ( 122 | '**No code matches found.**\n\n' + 123 | '\n\n' + 124 | formatSeparator() + 125 | '\n' + 126 | `*Information retrieved at: ${formatDate(new Date())}*` 127 | ); 128 | } 129 | 130 | // Start with a summary 131 | let markdown = `## Code Search Results\n\nFound ${searchResponse.size} matches for the code search query.\n\n`; 132 | 133 | // Format each result 134 | results.forEach((result: CodeSearchResult) => { 135 | markdown += formatCodeSearchResult(result); 136 | }); 137 | 138 | // Add standard footer with timestamp 139 | markdown += '\n\n' + formatSeparator(); 140 | markdown += `\n*Information retrieved at: ${formatDate(new Date())}*`; 141 | 142 | return markdown; 143 | } 144 | 145 | /** 146 | * Format content search results into markdown 147 | * 148 | * @param response The content search response from the API 149 | * @param contentType Optional content type filter that was applied 150 | * @returns Markdown formatted string of content search results 151 | */ 152 | export function formatContentSearchResults( 153 | response: { values?: unknown[]; size: number }, 154 | contentType?: ContentType, 155 | ): string { 156 | const results = response.values || []; 157 | 158 | if (!results || results.length === 0) { 159 | // Add standard footer even for empty state 160 | return ( 161 | '**No content matches found.**\n\n' + 162 | '\n\n' + 163 | formatSeparator() + 164 | '\n' + 165 | `*Information retrieved at: ${formatDate(new Date())}*` 166 | ); 167 | } 168 | 169 | // Start with a summary 170 | const typeStr = contentType 171 | ? getContentTypeDisplay(contentType) 172 | : 'Content'; 173 | let markdown = `## ${typeStr} Search Results\n\nFound ${response.size} matches for the content search query.\n\n`; 174 | 175 | // Format each result - this is generic as content results can vary widely 176 | results.forEach((result) => { 177 | // We need to handle result as a generic object since content types vary 178 | const typedResult = result as Record<string, unknown>; 179 | 180 | // Try to determine the type from the result 181 | const type = (typedResult.type as string) || 'Unknown'; 182 | 183 | // Try to get a title/name 184 | let title = 'Untitled'; 185 | if (typedResult.title) { 186 | title = String(typedResult.title); 187 | } else if (typedResult.name) { 188 | title = String(typedResult.name); 189 | } else if (typedResult.summary) { 190 | const summary = String(typedResult.summary); 191 | title = summary.slice(0, 80) + (summary.length > 80 ? '...' : ''); 192 | } 193 | 194 | // Try to get a link 195 | let link = ''; 196 | const links = typedResult.links as 197 | | Record<string, { href?: string }> 198 | | undefined; 199 | if (links?.html?.href) { 200 | link = links.html.href; 201 | } else if (links?.self?.href) { 202 | link = links.self.href; 203 | } 204 | 205 | markdown += '### '; 206 | if (link) { 207 | markdown += formatUrl(link, title); 208 | } else { 209 | markdown += title; 210 | } 211 | markdown += '\n\n'; 212 | 213 | // Add type information 214 | markdown += `**Type**: ${type}\n`; 215 | 216 | // Add update/created date if available 217 | if (typedResult.updated_on) { 218 | markdown += `**Updated**: ${formatDate(typedResult.updated_on as string | Date)}\n`; 219 | } else if (typedResult.created_on) { 220 | markdown += `**Created**: ${formatDate(typedResult.created_on as string | Date)}\n`; 221 | } 222 | 223 | // Add description/content if available (limited to preserve readability) 224 | if (typedResult.description) { 225 | const description = String(typedResult.description); 226 | const limitedDesc = 227 | description.length > 500 228 | ? description.slice(0, 500) + '...' 229 | : description; 230 | markdown += `\n${limitedDesc}\n\n`; 231 | } else if (typedResult.content) { 232 | const content = String(typedResult.content); 233 | const limitedContent = 234 | content.length > 500 ? content.slice(0, 500) + '...' : content; 235 | markdown += `\n${limitedContent}\n\n`; 236 | } 237 | 238 | markdown += '\n'; 239 | }); 240 | 241 | // Add standard footer with timestamp 242 | markdown += '\n' + formatSeparator(); 243 | markdown += `\n*Information retrieved at: ${formatDate(new Date())}*`; 244 | 245 | return markdown; 246 | } 247 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.repositories.formatter.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { 2 | Repository, 3 | RepositoriesResponse, 4 | PaginatedCommits, 5 | Commit, 6 | } from '../services/vendor.atlassian.repositories.types.js'; 7 | import { PullRequestsResponse } from '../services/vendor.atlassian.pullrequests.types.js'; 8 | import { 9 | formatUrl, 10 | formatHeading, 11 | formatBulletList, 12 | formatSeparator, 13 | formatNumberedList, 14 | formatDate, 15 | } from '../utils/formatter.util.js'; 16 | 17 | /** 18 | * Format a list of repositories for display 19 | * @param repositoriesData - Raw repositories data from the API 20 | * @returns Formatted string with repositories information in markdown format 21 | */ 22 | export function formatRepositoriesList( 23 | repositoriesData: RepositoriesResponse, 24 | ): string { 25 | const repositories = repositoriesData.values || []; 26 | 27 | if (repositories.length === 0) { 28 | return 'No repositories found matching your criteria.'; 29 | } 30 | 31 | const lines: string[] = [formatHeading('Bitbucket Repositories', 1), '']; 32 | 33 | // Format each repository with its details 34 | const formattedList = formatNumberedList(repositories, (repo, _index) => { 35 | const itemLines: string[] = []; 36 | itemLines.push(formatHeading(repo.name, 2)); 37 | 38 | // Basic information 39 | const properties: Record<string, unknown> = { 40 | Name: repo.name, 41 | 'Full Name': repo.full_name, 42 | Owner: 43 | repo.owner?.display_name || repo.owner?.username || 'Unknown', 44 | Description: repo.description || 'No description provided', 45 | 'Project Key': repo.project?.key || 'N/A', 46 | Private: repo.is_private ? 'Yes' : 'No', 47 | Created: repo.created_on ? formatDate(repo.created_on) : 'N/A', 48 | Updated: repo.updated_on ? formatDate(repo.updated_on) : 'N/A', 49 | URL: repo.links?.html?.href 50 | ? formatUrl(repo.links.html.href, repo.full_name) 51 | : 'N/A', 52 | }; 53 | 54 | // Format as a bullet list 55 | itemLines.push(formatBulletList(properties, (key) => key)); 56 | 57 | return itemLines.join('\n'); 58 | }); 59 | 60 | lines.push(formattedList); 61 | 62 | // Add standard footer with timestamp 63 | lines.push('\n\n' + formatSeparator()); 64 | lines.push(`*Information retrieved at: ${formatDate(new Date())}*`); 65 | 66 | return lines.join('\n'); 67 | } 68 | 69 | /** 70 | * Format detailed repository information for display 71 | * @param repositoryData - Raw repository data from the API 72 | * @param pullRequestsData - Optional pull requests data for this repository 73 | * @returns Formatted string with repository details in markdown format 74 | */ 75 | export function formatRepositoryDetails( 76 | repositoryData: Repository, 77 | pullRequestsData?: PullRequestsResponse | null, 78 | ): string { 79 | // Create URL 80 | const repoUrl = repositoryData.links?.html?.href || ''; 81 | 82 | const lines: string[] = [ 83 | formatHeading(`Repository: ${repositoryData.name}`, 1), 84 | '', 85 | `> A ${repositoryData.is_private ? 'private' : 'public'} repository in the \`${repositoryData.full_name}\` workspace.`, 86 | '', 87 | formatHeading('Basic Information', 2), 88 | ]; 89 | 90 | // Format basic information as a bullet list 91 | const basicProperties: Record<string, unknown> = { 92 | Name: repositoryData.name, 93 | 'Full Name': repositoryData.full_name, 94 | UUID: repositoryData.uuid, 95 | Description: repositoryData.description || 'No description provided', 96 | Language: repositoryData.language || 'Not specified', 97 | 'Main Branch': repositoryData.mainbranch?.name || 'N/A', 98 | Private: repositoryData.is_private ? 'Yes' : 'No', 99 | Size: repositoryData.size 100 | ? `${(repositoryData.size / 1024).toFixed(2)} KB` 101 | : 'Unknown', 102 | 'Created On': repositoryData.created_on 103 | ? formatDate(repositoryData.created_on) 104 | : 'N/A', 105 | 'Updated On': repositoryData.updated_on 106 | ? formatDate(repositoryData.updated_on) 107 | : 'N/A', 108 | }; 109 | 110 | lines.push(formatBulletList(basicProperties, (key) => key)); 111 | 112 | // Owner information 113 | if (repositoryData.owner) { 114 | lines.push(''); 115 | lines.push(formatHeading('Owner', 2)); 116 | 117 | const ownerProperties: Record<string, unknown> = { 118 | Name: 119 | repositoryData.owner.display_name || 120 | repositoryData.owner.username || 121 | 'Unknown', 122 | Type: repositoryData.owner.type || 'Not specified', 123 | }; 124 | 125 | lines.push(formatBulletList(ownerProperties, (key) => key)); 126 | } 127 | 128 | // Links section 129 | lines.push(''); 130 | lines.push(formatHeading('Links', 2)); 131 | 132 | if (repoUrl) { 133 | lines.push(`- ${formatUrl(repoUrl, 'Open in Bitbucket')}`); 134 | } 135 | 136 | // Add recent pull requests section if available 137 | if ( 138 | pullRequestsData && 139 | pullRequestsData.values && 140 | pullRequestsData.values.length > 0 141 | ) { 142 | lines.push(''); 143 | lines.push(formatHeading('Recent Pull Requests', 2)); 144 | 145 | const prList = pullRequestsData.values.slice(0, 25); // Ensure max 25 146 | const formattedPrList = formatNumberedList(prList, (pr) => { 147 | return `**#${pr.id}**: [${pr.title}](${pr.links.html?.href || '#'}) - ${pr.state} by ${pr.author.display_name || 'Unknown'} (${formatDate(pr.updated_on)})`; 148 | }); 149 | 150 | lines.push(formattedPrList); 151 | 152 | if (repoUrl) { 153 | lines.push( 154 | `*View all pull requests in Bitbucket: [${repositoryData.full_name}/pull-requests](${repoUrl}/pull-requests)*`, 155 | ); 156 | } 157 | } else { 158 | // Add the section even if no PRs are available 159 | lines.push(''); 160 | lines.push(formatHeading('Recent Pull Requests', 2)); 161 | lines.push('No open pull requests found for this repository.'); 162 | 163 | if (repoUrl) { 164 | lines.push( 165 | `*View all pull requests in Bitbucket: [${repositoryData.full_name}/pull-requests](${repoUrl}/pull-requests)*`, 166 | ); 167 | } 168 | } 169 | 170 | // Add standard footer with timestamp 171 | lines.push('\n\n' + formatSeparator()); 172 | lines.push(`*Information retrieved at: ${formatDate(new Date())}*`); 173 | 174 | // Optionally keep the direct link 175 | if (repoUrl) { 176 | lines.push(`*View this repository in Bitbucket: ${repoUrl}*`); 177 | } 178 | 179 | return lines.join('\n'); 180 | } 181 | 182 | /** 183 | * Format commit history for display. 184 | * @param commitsData - Raw paginated commits data from the API. 185 | * @param options - Filtering options used to retrieve the history. 186 | * @returns Formatted string with commit history in markdown format. 187 | */ 188 | export function formatCommitHistory( 189 | commitsData: PaginatedCommits, 190 | options: { revision?: string; path?: string } = {}, 191 | ): string { 192 | const commits = commitsData.values || []; 193 | 194 | if (commits.length === 0) { 195 | return 'No commits found matching your criteria.'; 196 | } 197 | 198 | const headerParts = ['Commit History']; 199 | if (options.revision) { 200 | headerParts.push(`for revision \`${options.revision}\``); 201 | } 202 | if (options.path) { 203 | headerParts.push(`on path \`${options.path}\``); 204 | } 205 | 206 | const lines: string[] = [formatHeading(headerParts.join(' '), 1), '']; 207 | 208 | const formattedList = formatNumberedList(commits, (commit: Commit) => { 209 | const commitLines: string[] = []; 210 | const author = 211 | commit.author?.user?.display_name || 212 | commit.author?.raw || 213 | 'Unknown'; 214 | const commitUrl = commit.links?.html?.href; 215 | const shortHash = commit.hash.substring(0, 7); 216 | 217 | // Header: Hash (linked) - Date 218 | commitLines.push( 219 | `**${commitUrl ? formatUrl(commitUrl, shortHash) : shortHash}** - ${formatDate(commit.date)}`, 220 | ); 221 | 222 | // Author 223 | commitLines.push(` Author: ${author}`); 224 | 225 | // Message (indented blockquote) 226 | const message = commit.message.trim().replace(/\n/g, '\n > '); 227 | commitLines.push(' >'); 228 | commitLines.push(` > ${message}`); 229 | 230 | return commitLines.join('\n'); 231 | }); 232 | 233 | lines.push(formattedList); 234 | 235 | // Add standard footer with timestamp 236 | lines.push('\n\n' + formatSeparator()); 237 | lines.push(`*Information retrieved at: ${formatDate(new Date())}*`); 238 | 239 | return lines.join('\n'); 240 | } 241 | ``` -------------------------------------------------------------------------------- /src/utils/pagination.util.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { Logger } from './logger.util.js'; 2 | import { DATA_LIMITS } from './constants.util.js'; 3 | import { ResponsePagination } from '../types/common.types.js'; 4 | 5 | /** 6 | * Represents the possible pagination types. 7 | */ 8 | export enum PaginationType { 9 | CURSOR = 'cursor', // Confluence, Bitbucket (some endpoints) 10 | OFFSET = 'offset', // Jira 11 | PAGE = 'page', // Bitbucket (most endpoints) 12 | } 13 | 14 | /** 15 | * Interface representing the common structure of paginated data from APIs. 16 | * This union type covers properties used by offset, cursor, and page-based pagination. 17 | */ 18 | interface PaginationData { 19 | // Shared 20 | results?: unknown[]; 21 | values?: unknown[]; 22 | count?: number; 23 | size?: number; // Total count in Bitbucket page responses 24 | hasMore?: boolean; 25 | _links?: { next?: string }; // Confluence cursor 26 | // Offset-based (Jira) 27 | startAt?: number; 28 | maxResults?: number; 29 | total?: number; 30 | nextPage?: string; // Alternative next indicator for offset 31 | // Page-based (Bitbucket) 32 | page?: number; 33 | pagelen?: number; 34 | next?: string; // Bitbucket page URL 35 | } 36 | 37 | /** 38 | * Extract pagination information from API response 39 | * @param data The API response containing pagination information 40 | * @param paginationType The type of pagination mechanism used 41 | * @returns Object with nextCursor, hasMore, and count properties 42 | */ 43 | export function extractPaginationInfo<T extends Partial<PaginationData>>( 44 | data: T, 45 | paginationType: PaginationType, 46 | ): ResponsePagination | undefined { 47 | if (!data) { 48 | return undefined; 49 | } 50 | 51 | let pagination: ResponsePagination | undefined; 52 | const methodLogger = Logger.forContext( 53 | 'utils/pagination.util.ts', 54 | 'extractPaginationInfo', 55 | ); 56 | 57 | switch (paginationType) { 58 | case PaginationType.PAGE: { 59 | // Bitbucket page-based pagination (page, pagelen, size, next) 60 | if (data.page !== undefined && data.pagelen !== undefined) { 61 | const hasMore = !!data.next; 62 | let nextCursorValue: string | undefined = undefined; 63 | 64 | if (hasMore) { 65 | try { 66 | // First attempt to parse the full URL if it looks like one 67 | if ( 68 | typeof data.next === 'string' && 69 | data.next.includes('://') 70 | ) { 71 | const nextUrl = new URL(data.next); 72 | nextCursorValue = 73 | nextUrl.searchParams.get('page') || undefined; 74 | methodLogger.debug( 75 | `Successfully extracted page from URL: ${nextCursorValue}`, 76 | ); 77 | } else if (data.next === 'available') { 78 | // Handle the 'available' placeholder used in some transformedResponses 79 | nextCursorValue = String(Number(data.page) + 1); 80 | methodLogger.debug( 81 | `Using calculated next page from 'available': ${nextCursorValue}`, 82 | ); 83 | } else if (typeof data.next === 'string') { 84 | // Try to use data.next directly if it's not a URL but still a string 85 | nextCursorValue = data.next; 86 | methodLogger.debug( 87 | `Using next value directly: ${nextCursorValue}`, 88 | ); 89 | } 90 | } catch (e) { 91 | // If URL parsing fails, calculate the next page based on current page 92 | nextCursorValue = String(Number(data.page) + 1); 93 | methodLogger.debug( 94 | `Calculated next page after URL parsing error: ${nextCursorValue}`, 95 | ); 96 | methodLogger.warn( 97 | `Failed to parse next URL: ${data.next}`, 98 | e, 99 | ); 100 | } 101 | } 102 | 103 | pagination = { 104 | hasMore, 105 | count: data.values?.length ?? 0, 106 | page: data.page, 107 | size: data.pagelen, 108 | total: data.size, 109 | nextCursor: nextCursorValue, // Store next page number as cursor 110 | }; 111 | } 112 | break; 113 | } 114 | 115 | case PaginationType.OFFSET: { 116 | // Jira offset-based pagination 117 | const countOffset = data.values?.length; 118 | if ( 119 | data.startAt !== undefined && 120 | data.maxResults !== undefined && 121 | data.total !== undefined && 122 | data.startAt + data.maxResults < data.total 123 | ) { 124 | pagination = { 125 | hasMore: true, 126 | count: countOffset, 127 | total: data.total, 128 | nextCursor: String(data.startAt + data.maxResults), 129 | }; 130 | } else if (data.nextPage) { 131 | pagination = { 132 | hasMore: true, 133 | count: countOffset, 134 | nextCursor: data.nextPage, 135 | }; 136 | } 137 | break; 138 | } 139 | 140 | case PaginationType.CURSOR: { 141 | // Confluence cursor-based pagination 142 | const countCursor = data.results?.length; 143 | if (data._links && data._links.next) { 144 | const nextUrl = data._links.next; 145 | const cursorMatch = nextUrl.match(/cursor=([^&]+)/); 146 | if (cursorMatch && cursorMatch[1]) { 147 | pagination = { 148 | hasMore: true, 149 | count: countCursor, 150 | nextCursor: decodeURIComponent(cursorMatch[1]), 151 | }; 152 | } 153 | } 154 | break; 155 | } 156 | 157 | default: 158 | methodLogger.warn(`Unknown pagination type: ${paginationType}`); 159 | } 160 | 161 | // Ensure a default pagination object if none was created but data exists 162 | if (!pagination && (data.results || data.values)) { 163 | pagination = { 164 | hasMore: false, 165 | count: data.results?.length ?? data.values?.length ?? 0, 166 | }; 167 | } 168 | 169 | return pagination; 170 | } 171 | 172 | /** 173 | * Validates and enforces page size limits to prevent excessive data exposure (CWE-770) 174 | * @param requestedPageSize The requested page size from the client 175 | * @param contextInfo Optional context for logging (e.g., endpoint name) 176 | * @returns The validated page size (clamped to maximum allowed) 177 | */ 178 | export function validatePageSize( 179 | requestedPageSize?: number, 180 | contextInfo?: string, 181 | ): number { 182 | const methodLogger = Logger.forContext( 183 | 'utils/pagination.util.ts', 184 | 'validatePageSize', 185 | ); 186 | 187 | // Use default if not specified 188 | if (!requestedPageSize || requestedPageSize <= 0) { 189 | const defaultSize = DATA_LIMITS.DEFAULT_PAGE_SIZE; 190 | methodLogger.debug( 191 | `Using default page size: ${defaultSize}${contextInfo ? ` for ${contextInfo}` : ''}`, 192 | ); 193 | return defaultSize; 194 | } 195 | 196 | // Enforce maximum page size limit 197 | if (requestedPageSize > DATA_LIMITS.MAX_PAGE_SIZE) { 198 | const clampedSize = DATA_LIMITS.MAX_PAGE_SIZE; 199 | methodLogger.warn( 200 | `Page size ${requestedPageSize} exceeds maximum limit. Clamped to ${clampedSize}${contextInfo ? ` for ${contextInfo}` : ''}`, 201 | ); 202 | return clampedSize; 203 | } 204 | 205 | methodLogger.debug( 206 | `Using requested page size: ${requestedPageSize}${contextInfo ? ` for ${contextInfo}` : ''}`, 207 | ); 208 | return requestedPageSize; 209 | } 210 | 211 | /** 212 | * Validates pagination data to ensure it doesn't exceed configured limits 213 | * @param paginationData The pagination data to validate 214 | * @param contextInfo Optional context for logging 215 | * @returns True if data is within limits, false otherwise 216 | */ 217 | export function validatePaginationLimits( 218 | paginationData: { count?: number; size?: number; pagelen?: number }, 219 | contextInfo?: string, 220 | ): boolean { 221 | const methodLogger = Logger.forContext( 222 | 'utils/pagination.util.ts', 223 | 'validatePaginationLimits', 224 | ); 225 | 226 | // Check if the response contains more items than our maximum allowed 227 | const itemCount = paginationData.count ?? 0; 228 | const pageSize = paginationData.size ?? paginationData.pagelen ?? 0; 229 | 230 | if (itemCount > DATA_LIMITS.MAX_PAGE_SIZE) { 231 | methodLogger.warn( 232 | `Response contains ${itemCount} items, exceeding maximum of ${DATA_LIMITS.MAX_PAGE_SIZE}${contextInfo ? ` for ${contextInfo}` : ''}`, 233 | ); 234 | return false; 235 | } 236 | 237 | if (pageSize > DATA_LIMITS.MAX_PAGE_SIZE) { 238 | methodLogger.warn( 239 | `Response page size ${pageSize} exceeds maximum of ${DATA_LIMITS.MAX_PAGE_SIZE}${contextInfo ? ` for ${contextInfo}` : ''}`, 240 | ); 241 | return false; 242 | } 243 | 244 | return true; 245 | } 246 | ``` -------------------------------------------------------------------------------- /src/utils/error-handler.util.test.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { describe, expect, test } from '@jest/globals'; 2 | import { 3 | ErrorCode, 4 | buildErrorContext, 5 | detectErrorType, 6 | createUserFriendlyErrorMessage, 7 | handleControllerError, 8 | } from './error-handler.util.js'; 9 | import { McpError, ErrorType, createApiError } from './error.util.js'; 10 | 11 | describe('Error Handler Utilities', () => { 12 | describe('buildErrorContext function', () => { 13 | test('builds a complete error context object', () => { 14 | const context = buildErrorContext( 15 | 'Repository', 16 | 'retrieving', 17 | 'controllers/repositories.controller.ts@get', 18 | { workspaceSlug: 'atlassian', repoSlug: 'bitbucket' }, 19 | { queryParams: { sort: 'name' } }, 20 | ); 21 | 22 | expect(context).toEqual({ 23 | entityType: 'Repository', 24 | operation: 'retrieving', 25 | source: 'controllers/repositories.controller.ts@get', 26 | entityId: { workspaceSlug: 'atlassian', repoSlug: 'bitbucket' }, 27 | additionalInfo: { queryParams: { sort: 'name' } }, 28 | }); 29 | }); 30 | 31 | test('handles minimal required parameters', () => { 32 | const context = buildErrorContext( 33 | 'Repository', 34 | 'listing', 35 | 'controllers/repositories.controller.ts@list', 36 | ); 37 | 38 | expect(context).toEqual({ 39 | entityType: 'Repository', 40 | operation: 'listing', 41 | source: 'controllers/repositories.controller.ts@list', 42 | }); 43 | expect(context.entityId).toBeUndefined(); 44 | expect(context.additionalInfo).toBeUndefined(); 45 | }); 46 | }); 47 | 48 | describe('detectErrorType function', () => { 49 | test('detects network errors', () => { 50 | const error = new Error('network error: connection refused'); 51 | const result = detectErrorType(error); 52 | expect(result).toEqual({ 53 | code: ErrorCode.NETWORK_ERROR, 54 | statusCode: 500, 55 | }); 56 | }); 57 | 58 | test('detects rate limit errors', () => { 59 | const error = new Error('too many requests'); 60 | const result = detectErrorType(error); 61 | expect(result).toEqual({ 62 | code: ErrorCode.RATE_LIMIT_ERROR, 63 | statusCode: 429, 64 | }); 65 | }); 66 | 67 | test('detects not found errors', () => { 68 | const error = new Error('resource not found'); 69 | const result = detectErrorType(error); 70 | expect(result).toEqual({ 71 | code: ErrorCode.NOT_FOUND, 72 | statusCode: 404, 73 | }); 74 | }); 75 | 76 | test('detects access denied errors', () => { 77 | const error = new Error('insufficient permissions'); 78 | const result = detectErrorType(error); 79 | expect(result).toEqual({ 80 | code: ErrorCode.ACCESS_DENIED, 81 | statusCode: 403, 82 | }); 83 | }); 84 | 85 | test('detects validation errors', () => { 86 | const error = new Error('validation failed: invalid input'); 87 | const result = detectErrorType(error); 88 | expect(result).toEqual({ 89 | code: ErrorCode.VALIDATION_ERROR, 90 | statusCode: 400, 91 | }); 92 | }); 93 | 94 | test('defaults to unexpected error', () => { 95 | const error = new Error('something unexpected happened'); 96 | const result = detectErrorType(error); 97 | expect(result).toEqual({ 98 | code: ErrorCode.UNEXPECTED_ERROR, 99 | statusCode: 500, 100 | }); 101 | }); 102 | 103 | test('respects explicit status code from error', () => { 104 | const error = new McpError( 105 | 'Custom error', 106 | ErrorType.API_ERROR, 107 | 418, 108 | ); 109 | const result = detectErrorType(error); 110 | expect(result.statusCode).toBe(418); 111 | }); 112 | 113 | test('detects Bitbucket-specific repository not found errors', () => { 114 | const bitbucketError = { 115 | error: { 116 | message: 'repository not found', 117 | }, 118 | }; 119 | const mcpError = createApiError('API Error', 404, bitbucketError); 120 | const result = detectErrorType(mcpError); 121 | expect(result).toEqual({ 122 | code: ErrorCode.NOT_FOUND, 123 | statusCode: 404, 124 | }); 125 | }); 126 | 127 | test('detects Bitbucket-specific permission errors', () => { 128 | const bitbucketError = { 129 | error: { 130 | message: 'access denied for this repository', 131 | }, 132 | }; 133 | const mcpError = createApiError('API Error', 403, bitbucketError); 134 | const result = detectErrorType(mcpError); 135 | expect(result).toEqual({ 136 | code: ErrorCode.ACCESS_DENIED, 137 | statusCode: 403, 138 | }); 139 | }); 140 | 141 | test('detects Bitbucket-specific validation errors', () => { 142 | const bitbucketError = { 143 | error: { 144 | message: 'invalid parameter: repository name', 145 | }, 146 | }; 147 | const mcpError = createApiError('API Error', 400, bitbucketError); 148 | const result = detectErrorType(mcpError); 149 | expect(result).toEqual({ 150 | code: ErrorCode.VALIDATION_ERROR, 151 | statusCode: 400, 152 | }); 153 | }); 154 | }); 155 | 156 | describe('createUserFriendlyErrorMessage function', () => { 157 | test('creates NOT_FOUND message with entityId string', () => { 158 | const message = createUserFriendlyErrorMessage( 159 | ErrorCode.NOT_FOUND, 160 | { 161 | entityType: 'Repository', 162 | entityId: 'atlassian/bitbucket', 163 | }, 164 | ); 165 | expect(message).toContain( 166 | 'Repository atlassian/bitbucket not found', 167 | ); 168 | }); 169 | 170 | test('creates NOT_FOUND message with entityId object', () => { 171 | const message = createUserFriendlyErrorMessage( 172 | ErrorCode.NOT_FOUND, 173 | { 174 | entityType: 'Repository', 175 | entityId: { 176 | workspaceSlug: 'atlassian', 177 | repoSlug: 'bitbucket', 178 | }, 179 | }, 180 | ); 181 | expect(message).toContain( 182 | 'Repository atlassian/bitbucket not found', 183 | ); 184 | }); 185 | 186 | test('creates ACCESS_DENIED message', () => { 187 | const message = createUserFriendlyErrorMessage( 188 | ErrorCode.ACCESS_DENIED, 189 | { 190 | entityType: 'Repository', 191 | entityId: 'atlassian/bitbucket', 192 | }, 193 | ); 194 | expect(message).toContain( 195 | 'Access denied for repository atlassian/bitbucket', 196 | ); 197 | }); 198 | 199 | test('creates VALIDATION_ERROR message', () => { 200 | const originalMessage = 'Invalid repository name'; 201 | const message = createUserFriendlyErrorMessage( 202 | ErrorCode.VALIDATION_ERROR, 203 | { 204 | entityType: 'Repository', 205 | operation: 'creating', 206 | }, 207 | originalMessage, 208 | ); 209 | expect(message).toBe( 210 | `${originalMessage} Error details: ${originalMessage}`, 211 | ); 212 | }); 213 | 214 | test('creates NETWORK_ERROR message', () => { 215 | const message = createUserFriendlyErrorMessage( 216 | ErrorCode.NETWORK_ERROR, 217 | { 218 | entityType: 'Repository', 219 | operation: 'retrieving', 220 | }, 221 | ); 222 | expect(message).toContain('Network error'); 223 | expect(message).toContain('Bitbucket API'); 224 | }); 225 | 226 | test('creates RATE_LIMIT_ERROR message', () => { 227 | const message = createUserFriendlyErrorMessage( 228 | ErrorCode.RATE_LIMIT_ERROR, 229 | ); 230 | expect(message).toContain('Bitbucket API rate limit exceeded'); 231 | }); 232 | 233 | test('includes original message for non-specific errors', () => { 234 | const message = createUserFriendlyErrorMessage( 235 | ErrorCode.UNEXPECTED_ERROR, 236 | { 237 | entityType: 'Repository', 238 | operation: 'processing', 239 | }, 240 | 'Something went wrong', 241 | ); 242 | expect(message).toContain('unexpected error'); 243 | expect(message).toContain('Something went wrong'); 244 | }); 245 | }); 246 | 247 | describe('handleControllerError function', () => { 248 | test('throws appropriate API error with user-friendly message', () => { 249 | const originalError = new Error('Repository not found'); 250 | const context = buildErrorContext( 251 | 'Repository', 252 | 'retrieving', 253 | 'controllers/repositories.controller.ts@get', 254 | 'atlassian/bitbucket', 255 | ); 256 | 257 | expect(() => { 258 | handleControllerError(originalError, context); 259 | }).toThrow(McpError); 260 | 261 | try { 262 | handleControllerError(originalError, context); 263 | } catch (error) { 264 | expect(error).toBeInstanceOf(McpError); 265 | expect((error as McpError).type).toBe(ErrorType.API_ERROR); 266 | expect((error as McpError).statusCode).toBe(404); 267 | expect((error as McpError).message).toContain( 268 | 'Repository atlassian/bitbucket not found', 269 | ); 270 | expect((error as McpError).originalError).toBe(originalError); 271 | } 272 | }); 273 | }); 274 | }); 275 | ``` -------------------------------------------------------------------------------- /src/controllers/atlassian.pullrequests.comments.controller.ts: -------------------------------------------------------------------------------- ```typescript 1 | import { ControllerResponse } from '../types/common.types.js'; 2 | import { 3 | ListPullRequestCommentsToolArgsType, 4 | CreatePullRequestCommentToolArgsType, 5 | } from '../tools/atlassian.pullrequests.types.js'; 6 | import { 7 | atlassianPullRequestsService, 8 | Logger, 9 | handleControllerError, 10 | extractPaginationInfo, 11 | PaginationType, 12 | formatPagination, 13 | formatPullRequestComments, 14 | DEFAULT_PAGE_SIZE, 15 | applyDefaults, 16 | enhanceCommentsWithSnippets, 17 | optimizeBitbucketMarkdown, 18 | getDefaultWorkspace, 19 | ListCommentsParams, 20 | CreateCommentParams, 21 | } from './atlassian.pullrequests.base.controller.js'; 22 | 23 | /** 24 | * List comments on a Bitbucket pull request 25 | * @param options - Options including workspace slug, repo slug, and pull request ID 26 | * @returns Promise with formatted pull request comments as Markdown content 27 | */ 28 | async function listComments( 29 | options: ListPullRequestCommentsToolArgsType, 30 | ): Promise<ControllerResponse> { 31 | const methodLogger = Logger.forContext( 32 | 'controllers/atlassian.pullrequests.comments.controller.ts', 33 | 'listComments', 34 | ); 35 | 36 | try { 37 | // Create defaults object 38 | const defaults: Partial<ListPullRequestCommentsToolArgsType> = { 39 | limit: DEFAULT_PAGE_SIZE, 40 | }; 41 | 42 | // Apply defaults 43 | const mergedOptions = 44 | applyDefaults<ListPullRequestCommentsToolArgsType>( 45 | options, 46 | defaults, 47 | ); 48 | 49 | // Handle optional workspaceSlug - get default if not provided 50 | if (!mergedOptions.workspaceSlug) { 51 | methodLogger.debug( 52 | 'No workspace provided, fetching default workspace', 53 | ); 54 | const defaultWorkspace = await getDefaultWorkspace(); 55 | if (!defaultWorkspace) { 56 | throw new Error( 57 | 'Could not determine a default workspace. Please provide a workspaceSlug.', 58 | ); 59 | } 60 | mergedOptions.workspaceSlug = defaultWorkspace; 61 | methodLogger.debug( 62 | `Using default workspace: ${mergedOptions.workspaceSlug}`, 63 | ); 64 | } 65 | 66 | const { workspaceSlug, repoSlug, prId } = mergedOptions; 67 | 68 | // Validate required parameters 69 | if (!workspaceSlug || !repoSlug || !prId) { 70 | throw new Error( 71 | 'Workspace slug, repository slug, and pull request ID are required', 72 | ); 73 | } 74 | 75 | methodLogger.debug( 76 | `Listing comments for PR ${workspaceSlug}/${repoSlug}/${prId}`, 77 | { limit: mergedOptions.limit, cursor: mergedOptions.cursor }, 78 | ); 79 | 80 | // Map controller options to service parameters 81 | const serviceParams: ListCommentsParams = { 82 | workspace: workspaceSlug, 83 | repo_slug: repoSlug, 84 | pull_request_id: parseInt(prId, 10), 85 | pagelen: mergedOptions.limit, 86 | page: mergedOptions.cursor 87 | ? parseInt(mergedOptions.cursor, 10) 88 | : undefined, 89 | }; 90 | 91 | // Get comments from the service 92 | const commentsData = 93 | await atlassianPullRequestsService.getComments(serviceParams); 94 | 95 | methodLogger.debug( 96 | `Retrieved ${commentsData.values?.length || 0} comments`, 97 | ); 98 | 99 | // If no comments found, return a simple message 100 | if (!commentsData.values || commentsData.values.length === 0) { 101 | return { content: 'No comments found on this pull request.' }; 102 | } 103 | 104 | // Extract pagination information 105 | const pagination = extractPaginationInfo( 106 | commentsData, 107 | PaginationType.PAGE, 108 | ); 109 | 110 | // Enhance comments with code snippets (for inline comments) 111 | const enhancedComments = await enhanceCommentsWithSnippets( 112 | commentsData, 113 | 'listComments', 114 | ); 115 | 116 | // Format the comments using the formatter 117 | const formattedComments = formatPullRequestComments( 118 | enhancedComments, 119 | prId, 120 | ); 121 | 122 | // Create the final content by combining formatted comments with pagination info 123 | let finalContent = formattedComments; 124 | 125 | // Add pagination information if available 126 | if ( 127 | pagination && 128 | (pagination.hasMore || pagination.count !== undefined) 129 | ) { 130 | const paginationString = formatPagination(pagination); 131 | finalContent += '\n\n' + paginationString; 132 | } 133 | 134 | return { 135 | content: finalContent, 136 | }; 137 | } catch (error) { 138 | // Use the standardized error handler 139 | throw handleControllerError(error, { 140 | entityType: 'Pull Request Comments', 141 | operation: 'listing', 142 | source: 'controllers/atlassian.pullrequests.comments.controller.ts@listComments', 143 | additionalInfo: { options }, 144 | }); 145 | } 146 | } 147 | 148 | /** 149 | * Add a comment to a Bitbucket pull request 150 | * @param options - Options including workspace slug, repo slug, PR ID, and comment content 151 | * @returns Promise with a success message as content 152 | */ 153 | async function addComment( 154 | options: CreatePullRequestCommentToolArgsType, 155 | ): Promise<ControllerResponse> { 156 | const methodLogger = Logger.forContext( 157 | 'controllers/atlassian.pullrequests.comments.controller.ts', 158 | 'addComment', 159 | ); 160 | 161 | try { 162 | // Apply defaults if needed (none for this operation) 163 | const mergedOptions = 164 | applyDefaults<CreatePullRequestCommentToolArgsType>(options, {}); 165 | 166 | // Handle optional workspaceSlug - get default if not provided 167 | if (!mergedOptions.workspaceSlug) { 168 | methodLogger.debug( 169 | 'No workspace provided, fetching default workspace', 170 | ); 171 | const defaultWorkspace = await getDefaultWorkspace(); 172 | if (!defaultWorkspace) { 173 | throw new Error( 174 | 'Could not determine a default workspace. Please provide a workspaceSlug.', 175 | ); 176 | } 177 | mergedOptions.workspaceSlug = defaultWorkspace; 178 | methodLogger.debug( 179 | `Using default workspace: ${mergedOptions.workspaceSlug}`, 180 | ); 181 | } 182 | 183 | const { workspaceSlug, repoSlug, prId, content, inline } = 184 | mergedOptions; 185 | 186 | // Validate required parameters 187 | if (!workspaceSlug || !repoSlug || !prId || !content) { 188 | throw new Error( 189 | 'Workspace slug, repository slug, pull request ID, and comment content are required', 190 | ); 191 | } 192 | 193 | // For inline comments, both file path and line number are required 194 | if (inline && (!inline.path || inline.line === undefined)) { 195 | throw new Error( 196 | 'Both file path and line number are required for inline comments', 197 | ); 198 | } 199 | 200 | // Prepare the raw content, applying any Bitbucket-specific markdown optimizations 201 | const optimizedContent = optimizeBitbucketMarkdown(content); 202 | 203 | methodLogger.debug( 204 | `Adding${ 205 | inline ? ' inline' : '' 206 | } comment to PR ${workspaceSlug}/${repoSlug}/${prId}`, 207 | { 208 | contentLength: optimizedContent.length, 209 | isInline: !!inline, 210 | inlinePath: inline?.path, 211 | inlineLine: inline?.line, 212 | }, 213 | ); 214 | 215 | // Map controller options to service parameters 216 | const serviceParams: CreateCommentParams = { 217 | workspace: workspaceSlug, 218 | repo_slug: repoSlug, 219 | pull_request_id: parseInt(prId, 10), 220 | content: { 221 | raw: optimizedContent, 222 | }, 223 | }; 224 | 225 | // For inline comments, add the inline property 226 | if (inline) { 227 | serviceParams.inline = { 228 | path: inline.path, 229 | to: inline.line, 230 | }; 231 | } 232 | 233 | // For replies, add the parent property 234 | if (mergedOptions.parentId) { 235 | serviceParams.parent = { 236 | id: parseInt(mergedOptions.parentId, 10), 237 | }; 238 | } 239 | 240 | // Create the comment through the service 241 | const commentResult = 242 | await atlassianPullRequestsService.createComment(serviceParams); 243 | 244 | methodLogger.debug('Comment created successfully', { 245 | commentId: commentResult.id, 246 | isInline: !!inline, 247 | }); 248 | 249 | // Return a success message 250 | const commentType = inline ? 'inline' : ''; 251 | return { 252 | content: `${commentType} Comment successfully added to pull request #${prId}. Comment ID: ${commentResult.id}`, 253 | }; 254 | } catch (error) { 255 | // Use the standardized error handler 256 | throw handleControllerError(error, { 257 | entityType: 'Pull Request Comment', 258 | operation: 'adding', 259 | source: 'controllers/atlassian.pullrequests.comments.controller.ts@addComment', 260 | additionalInfo: { options }, 261 | }); 262 | } 263 | } 264 | 265 | // Export the controller functions 266 | export default { listComments, addComment }; 267 | ```