#
tokens: 48234/50000 14/114 files (page 3/6)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 3 of 6. Use http://codebase.md/aashari/mcp-server-atlassian-bitbucket?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .env.example
├── .github
│   ├── dependabot.yml
│   └── workflows
│       ├── ci-dependabot-auto-merge.yml
│       ├── ci-dependency-check.yml
│       └── ci-semantic-release.yml
├── .gitignore
├── .gitkeep
├── .npmignore
├── .npmrc
├── .prettierrc
├── .releaserc.json
├── .trigger-ci
├── CHANGELOG.md
├── eslint.config.mjs
├── jest.setup.js
├── package-lock.json
├── package.json
├── README.md
├── scripts
│   ├── ensure-executable.js
│   ├── package.json
│   └── update-version.js
├── src
│   ├── cli
│   │   ├── atlassian.diff.cli.ts
│   │   ├── atlassian.pullrequests.cli.test.ts
│   │   ├── atlassian.pullrequests.cli.ts
│   │   ├── atlassian.repositories.cli.test.ts
│   │   ├── atlassian.repositories.cli.ts
│   │   ├── atlassian.search.cli.test.ts
│   │   ├── atlassian.search.cli.ts
│   │   ├── atlassian.workspaces.cli.test.ts
│   │   ├── atlassian.workspaces.cli.ts
│   │   └── index.ts
│   ├── controllers
│   │   ├── atlassian.diff.controller.ts
│   │   ├── atlassian.diff.formatter.ts
│   │   ├── atlassian.pullrequests.approve.controller.ts
│   │   ├── atlassian.pullrequests.base.controller.ts
│   │   ├── atlassian.pullrequests.comments.controller.ts
│   │   ├── atlassian.pullrequests.controller.test.ts
│   │   ├── atlassian.pullrequests.controller.ts
│   │   ├── atlassian.pullrequests.create.controller.ts
│   │   ├── atlassian.pullrequests.formatter.ts
│   │   ├── atlassian.pullrequests.get.controller.ts
│   │   ├── atlassian.pullrequests.list.controller.ts
│   │   ├── atlassian.pullrequests.reject.controller.ts
│   │   ├── atlassian.pullrequests.update.controller.ts
│   │   ├── atlassian.repositories.branch.controller.ts
│   │   ├── atlassian.repositories.commit.controller.ts
│   │   ├── atlassian.repositories.content.controller.ts
│   │   ├── atlassian.repositories.controller.test.ts
│   │   ├── atlassian.repositories.details.controller.ts
│   │   ├── atlassian.repositories.formatter.ts
│   │   ├── atlassian.repositories.list.controller.ts
│   │   ├── atlassian.search.code.controller.ts
│   │   ├── atlassian.search.content.controller.ts
│   │   ├── atlassian.search.controller.test.ts
│   │   ├── atlassian.search.controller.ts
│   │   ├── atlassian.search.formatter.ts
│   │   ├── atlassian.search.pullrequests.controller.ts
│   │   ├── atlassian.search.repositories.controller.ts
│   │   ├── atlassian.workspaces.controller.test.ts
│   │   ├── atlassian.workspaces.controller.ts
│   │   └── atlassian.workspaces.formatter.ts
│   ├── index.ts
│   ├── services
│   │   ├── vendor.atlassian.pullrequests.service.ts
│   │   ├── vendor.atlassian.pullrequests.test.ts
│   │   ├── vendor.atlassian.pullrequests.types.ts
│   │   ├── vendor.atlassian.repositories.diff.service.ts
│   │   ├── vendor.atlassian.repositories.diff.types.ts
│   │   ├── vendor.atlassian.repositories.service.test.ts
│   │   ├── vendor.atlassian.repositories.service.ts
│   │   ├── vendor.atlassian.repositories.types.ts
│   │   ├── vendor.atlassian.search.service.ts
│   │   ├── vendor.atlassian.search.types.ts
│   │   ├── vendor.atlassian.workspaces.service.ts
│   │   ├── vendor.atlassian.workspaces.test.ts
│   │   └── vendor.atlassian.workspaces.types.ts
│   ├── tools
│   │   ├── atlassian.diff.tool.ts
│   │   ├── atlassian.diff.types.ts
│   │   ├── atlassian.pullrequests.tool.ts
│   │   ├── atlassian.pullrequests.types.test.ts
│   │   ├── atlassian.pullrequests.types.ts
│   │   ├── atlassian.repositories.tool.ts
│   │   ├── atlassian.repositories.types.ts
│   │   ├── atlassian.search.tool.ts
│   │   ├── atlassian.search.types.ts
│   │   ├── atlassian.workspaces.tool.ts
│   │   └── atlassian.workspaces.types.ts
│   ├── types
│   │   └── common.types.ts
│   └── utils
│       ├── adf.util.test.ts
│       ├── adf.util.ts
│       ├── atlassian.util.ts
│       ├── bitbucket-error-detection.test.ts
│       ├── cli.test.util.ts
│       ├── config.util.test.ts
│       ├── config.util.ts
│       ├── constants.util.ts
│       ├── defaults.util.ts
│       ├── diff.util.ts
│       ├── error-handler.util.test.ts
│       ├── error-handler.util.ts
│       ├── error.util.test.ts
│       ├── error.util.ts
│       ├── formatter.util.ts
│       ├── logger.util.ts
│       ├── markdown.util.test.ts
│       ├── markdown.util.ts
│       ├── pagination.util.ts
│       ├── path.util.test.ts
│       ├── path.util.ts
│       ├── query.util.ts
│       ├── shell.util.ts
│       ├── transport.util.test.ts
│       ├── transport.util.ts
│       └── workspace.util.ts
├── STYLE_GUIDE.md
└── tsconfig.json
```

# Files

--------------------------------------------------------------------------------
/src/tools/atlassian.repositories.types.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { z } from 'zod';
  2 | 
  3 | /**
  4 |  * Base pagination arguments for all tools
  5 |  */
  6 | const PaginationArgs = {
  7 | 	limit: z
  8 | 		.number()
  9 | 		.int()
 10 | 		.positive()
 11 | 		.max(100)
 12 | 		.optional()
 13 | 		.describe(
 14 | 			'Maximum number of items to return (1-100). Controls the response size. Defaults to 25 if omitted.',
 15 | 		),
 16 | 
 17 | 	cursor: z
 18 | 		.string()
 19 | 		.optional()
 20 | 		.describe(
 21 | 			'Pagination cursor for retrieving the next set of results. Obtained from previous response when more results are available.',
 22 | 		),
 23 | };
 24 | 
 25 | /**
 26 |  * Schema for list-repositories tool arguments
 27 |  */
 28 | export const ListRepositoriesToolArgs = z.object({
 29 | 	/**
 30 | 	 * Workspace slug containing the repositories
 31 | 	 */
 32 | 	workspaceSlug: z
 33 | 		.string()
 34 | 		.optional()
 35 | 		.describe(
 36 | 			'Workspace slug containing the repositories. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
 37 | 		),
 38 | 
 39 | 	/**
 40 | 	 * Optional query to filter repositories
 41 | 	 */
 42 | 	query: z
 43 | 		.string()
 44 | 		.optional()
 45 | 		.describe(
 46 | 			'Query string to filter repositories by name or other properties (text search). Example: "api" for repositories with "api" in the name/description. If omitted, returns all repositories.',
 47 | 		),
 48 | 
 49 | 	/**
 50 | 	 * Optional sort parameter
 51 | 	 */
 52 | 	sort: z
 53 | 		.string()
 54 | 		.optional()
 55 | 		.describe(
 56 | 			'Field to sort results by. Common values: "name", "created_on", "updated_on". Prefix with "-" for descending order. Example: "-updated_on" for most recently updated first.',
 57 | 		),
 58 | 
 59 | 	/**
 60 | 	 * Optional role filter
 61 | 	 */
 62 | 	role: z
 63 | 		.string()
 64 | 		.optional()
 65 | 		.describe(
 66 | 			'Filter repositories by the authenticated user\'s role. Common values: "owner", "admin", "contributor", "member". If omitted, returns repositories of all roles.',
 67 | 		),
 68 | 
 69 | 	/**
 70 | 	 * Optional project key filter
 71 | 	 */
 72 | 	projectKey: z
 73 | 		.string()
 74 | 		.optional()
 75 | 		.describe('Filter repositories by project key. Example: "project-api"'),
 76 | 
 77 | 	/**
 78 | 	 * Maximum number of repositories to return (default: 25)
 79 | 	 */
 80 | 	...PaginationArgs,
 81 | });
 82 | 
 83 | export type ListRepositoriesToolArgsType = z.infer<
 84 | 	typeof ListRepositoriesToolArgs
 85 | >;
 86 | 
 87 | /**
 88 |  * Schema for get-repository tool arguments
 89 |  */
 90 | export const GetRepositoryToolArgs = z.object({
 91 | 	/**
 92 | 	 * Workspace slug containing the repository
 93 | 	 */
 94 | 	workspaceSlug: z
 95 | 		.string()
 96 | 		.optional()
 97 | 		.describe(
 98 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
 99 | 		),
100 | 
101 | 	/**
102 | 	 * Repository slug to retrieve
103 | 	 */
104 | 	repoSlug: z
105 | 		.string()
106 | 		.min(1, 'Repository slug is required')
107 | 		.describe(
108 | 			'Repository slug to retrieve. This must be a valid repository in the specified workspace. Example: "project-api"',
109 | 		),
110 | });
111 | 
112 | export type GetRepositoryToolArgsType = z.infer<typeof GetRepositoryToolArgs>;
113 | 
114 | /**
115 |  * Schema for get-commit-history tool arguments.
116 |  */
117 | export const GetCommitHistoryToolArgs = z.object({
118 | 	workspaceSlug: z
119 | 		.string()
120 | 		.optional()
121 | 		.describe(
122 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
123 | 		),
124 | 	repoSlug: z
125 | 		.string()
126 | 		.min(1, 'Repository slug is required')
127 | 		.describe(
128 | 			'Repository slug whose commit history is to be retrieved. Example: "project-api"',
129 | 		),
130 | 	revision: z
131 | 		.string()
132 | 		.optional()
133 | 		.describe(
134 | 			'Optional branch name, tag, or commit hash to view history from. If omitted, uses the default branch.',
135 | 		),
136 | 	path: z
137 | 		.string()
138 | 		.optional()
139 | 		.describe(
140 | 			'Optional file path to filter commit history. Only shows commits affecting this file.',
141 | 		),
142 | 	...PaginationArgs, // Includes limit and cursor
143 | });
144 | 
145 | export type GetCommitHistoryToolArgsType = z.infer<
146 | 	typeof GetCommitHistoryToolArgs
147 | >;
148 | 
149 | /**
150 |  * Schema for create-branch tool arguments.
151 |  */
152 | export const CreateBranchToolArgsSchema = z.object({
153 | 	workspaceSlug: z
154 | 		.string()
155 | 		.optional()
156 | 		.describe(
157 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
158 | 		),
159 | 	repoSlug: z
160 | 		.string()
161 | 		.min(1, 'Repository slug is required')
162 | 		.describe('Repository slug where the branch will be created.'),
163 | 	newBranchName: z
164 | 		.string()
165 | 		.min(1, 'New branch name is required')
166 | 		.describe('The name for the new branch.'),
167 | 	sourceBranchOrCommit: z
168 | 		.string()
169 | 		.min(1, 'Source branch or commit is required')
170 | 		.describe('The name of the branch or the commit hash to branch from.'),
171 | });
172 | 
173 | export type CreateBranchToolArgsType = z.infer<
174 | 	typeof CreateBranchToolArgsSchema
175 | >;
176 | 
177 | /**
178 |  * Schema for clone-repository tool arguments.
179 |  */
180 | export const CloneRepositoryToolArgs = z.object({
181 | 	workspaceSlug: z
182 | 		.string()
183 | 		.optional()
184 | 		.describe(
185 | 			'Bitbucket workspace slug containing the repository. If not provided, the tool will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
186 | 		),
187 | 	repoSlug: z
188 | 		.string()
189 | 		.min(1, 'Repository slug is required')
190 | 		.describe(
191 | 			'Repository name/slug to clone. This is the short name of the repository. Example: "project-api"',
192 | 		),
193 | 	targetPath: z
194 | 		.string()
195 | 		.min(1, 'Target path is required')
196 | 		.describe(
197 | 			'Directory path where the repository will be cloned. IMPORTANT: Absolute paths are strongly recommended (e.g., "/home/user/projects" or "C:\\Users\\name\\projects"). Relative paths will be resolved relative to the server\'s working directory, which may not be what you expect. The repository will be cloned into a subdirectory at targetPath/repoSlug. Make sure you have write permissions to this location.',
198 | 		),
199 | });
200 | 
201 | export type CloneRepositoryToolArgsType = z.infer<
202 | 	typeof CloneRepositoryToolArgs
203 | >;
204 | 
205 | /**
206 |  * Schema for get-file-content tool arguments.
207 |  */
208 | export const GetFileContentToolArgs = z.object({
209 | 	workspaceSlug: z
210 | 		.string()
211 | 		.optional()
212 | 		.describe(
213 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
214 | 		),
215 | 	repoSlug: z
216 | 		.string()
217 | 		.min(1, 'Repository slug is required')
218 | 		.describe(
219 | 			'Repository slug containing the file. Example: "project-api"',
220 | 		),
221 | 	filePath: z
222 | 		.string()
223 | 		.min(1, 'File path is required')
224 | 		.describe(
225 | 			'Path to the file within the repository. Example: "README.md" or "src/main.js"',
226 | 		),
227 | 	revision: z
228 | 		.string()
229 | 		.optional()
230 | 		.describe(
231 | 			'Optional branch name, tag, or commit hash to retrieve the file from. If omitted, uses the default branch.',
232 | 		),
233 | });
234 | 
235 | export type GetFileContentToolArgsType = z.infer<typeof GetFileContentToolArgs>;
236 | 
237 | /**
238 |  * Schema for list-branches tool arguments
239 |  */
240 | export const ListBranchesToolArgs = z.object({
241 | 	/**
242 | 	 * Workspace slug containing the repository
243 | 	 */
244 | 	workspaceSlug: z
245 | 		.string()
246 | 		.optional()
247 | 		.describe(
248 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
249 | 		),
250 | 
251 | 	/**
252 | 	 * Repository slug to list branches from
253 | 	 */
254 | 	repoSlug: z
255 | 		.string()
256 | 		.min(1, 'Repository slug is required')
257 | 		.describe(
258 | 			'Repository slug to list branches from. Must be a valid repository slug in the specified workspace. Example: "project-api"',
259 | 		),
260 | 
261 | 	/**
262 | 	 * Optional query to filter branches
263 | 	 */
264 | 	query: z
265 | 		.string()
266 | 		.optional()
267 | 		.describe(
268 | 			'Query string to filter branches by name or other properties (text search).',
269 | 		),
270 | 
271 | 	/**
272 | 	 * Optional sort parameter
273 | 	 */
274 | 	sort: z
275 | 		.string()
276 | 		.optional()
277 | 		.describe(
278 | 			'Field to sort branches by. Common values: "name" (default), "-name", "target.date". Prefix with "-" for descending order.',
279 | 		),
280 | 
281 | 	/**
282 | 	 * Maximum number of branches to return (default: 25)
283 | 	 */
284 | 	...PaginationArgs,
285 | });
286 | 
287 | export type ListBranchesToolArgsType = z.infer<typeof ListBranchesToolArgs>;
288 | 
```

--------------------------------------------------------------------------------
/src/utils/error.util.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { Logger } from './logger.util.js';
  2 | import { formatSeparator } from './formatter.util.js';
  3 | 
  4 | /**
  5 |  * Error types for MCP errors
  6 |  */
  7 | export type McpErrorType =
  8 | 	| 'AUTHENTICATION_REQUIRED'
  9 | 	| 'NOT_FOUND'
 10 | 	| 'VALIDATION_ERROR'
 11 | 	| 'RATE_LIMIT_EXCEEDED'
 12 | 	| 'API_ERROR'
 13 | 	| 'UNEXPECTED_ERROR';
 14 | 
 15 | /**
 16 |  * Error types for classification
 17 |  */
 18 | export enum ErrorType {
 19 | 	AUTH_MISSING = 'AUTH_MISSING',
 20 | 	AUTH_INVALID = 'AUTH_INVALID',
 21 | 	API_ERROR = 'API_ERROR',
 22 | 	UNEXPECTED_ERROR = 'UNEXPECTED_ERROR',
 23 | }
 24 | 
 25 | /**
 26 |  * Custom error class with type classification
 27 |  */
 28 | export class McpError extends Error {
 29 | 	type: ErrorType;
 30 | 	errorType?: McpErrorType; // Add errorType property used by error-handler.util.ts
 31 | 	statusCode?: number;
 32 | 	originalError?: unknown;
 33 | 
 34 | 	constructor(
 35 | 		message: string,
 36 | 		type: ErrorType,
 37 | 		statusCode?: number,
 38 | 		originalError?: unknown,
 39 | 	) {
 40 | 		super(message);
 41 | 		this.name = 'McpError';
 42 | 		this.type = type;
 43 | 		this.statusCode = statusCode;
 44 | 		this.originalError = originalError;
 45 | 
 46 | 		// Set errorType based on type
 47 | 		switch (type) {
 48 | 			case ErrorType.AUTH_MISSING:
 49 | 			case ErrorType.AUTH_INVALID:
 50 | 				this.errorType = 'AUTHENTICATION_REQUIRED';
 51 | 				break;
 52 | 			case ErrorType.API_ERROR:
 53 | 				this.errorType =
 54 | 					statusCode === 404
 55 | 						? 'NOT_FOUND'
 56 | 						: statusCode === 429
 57 | 							? 'RATE_LIMIT_EXCEEDED'
 58 | 							: 'API_ERROR';
 59 | 				break;
 60 | 			case ErrorType.UNEXPECTED_ERROR:
 61 | 			default:
 62 | 				this.errorType = 'UNEXPECTED_ERROR';
 63 | 				break;
 64 | 		}
 65 | 	}
 66 | }
 67 | 
 68 | /**
 69 |  * Helper to unwrap nested McpErrors and return the deepest original error.
 70 |  * This is useful when an McpError contains another McpError as `originalError`
 71 |  * which in turn may wrap the vendor (Bitbucket) error text or object.
 72 |  */
 73 | export function getDeepOriginalError(error: unknown): unknown {
 74 | 	if (!error) {
 75 | 		return error;
 76 | 	}
 77 | 
 78 | 	let current = error;
 79 | 	let depth = 0;
 80 | 	const maxDepth = 10; // Prevent infinite recursion
 81 | 
 82 | 	while (
 83 | 		depth < maxDepth &&
 84 | 		current instanceof Error &&
 85 | 		'originalError' in current &&
 86 | 		current.originalError
 87 | 	) {
 88 | 		current = current.originalError;
 89 | 		depth++;
 90 | 	}
 91 | 
 92 | 	return current;
 93 | }
 94 | 
 95 | /**
 96 |  * Create an authentication missing error
 97 |  */
 98 | export function createAuthMissingError(
 99 | 	message: string = 'Authentication credentials are missing',
100 | 	originalError?: unknown,
101 | ): McpError {
102 | 	return new McpError(
103 | 		message,
104 | 		ErrorType.AUTH_MISSING,
105 | 		undefined,
106 | 		originalError,
107 | 	);
108 | }
109 | 
110 | /**
111 |  * Create an authentication invalid error
112 |  */
113 | export function createAuthInvalidError(
114 | 	message: string = 'Authentication credentials are invalid',
115 | 	originalError?: unknown,
116 | ): McpError {
117 | 	return new McpError(message, ErrorType.AUTH_INVALID, 401, originalError);
118 | }
119 | 
120 | /**
121 |  * Create an API error
122 |  */
123 | export function createApiError(
124 | 	message: string,
125 | 	statusCode?: number,
126 | 	originalError?: unknown,
127 | ): McpError {
128 | 	return new McpError(
129 | 		message,
130 | 		ErrorType.API_ERROR,
131 | 		statusCode,
132 | 		originalError,
133 | 	);
134 | }
135 | 
136 | /**
137 |  * Create an unexpected error
138 |  */
139 | export function createUnexpectedError(
140 | 	message: string = 'An unexpected error occurred',
141 | 	originalError?: unknown,
142 | ): McpError {
143 | 	return new McpError(
144 | 		message,
145 | 		ErrorType.UNEXPECTED_ERROR,
146 | 		undefined,
147 | 		originalError,
148 | 	);
149 | }
150 | 
151 | /**
152 |  * Ensure an error is an McpError
153 |  */
154 | export function ensureMcpError(error: unknown): McpError {
155 | 	if (error instanceof McpError) {
156 | 		return error;
157 | 	}
158 | 
159 | 	if (error instanceof Error) {
160 | 		return createUnexpectedError(error.message, error);
161 | 	}
162 | 
163 | 	return createUnexpectedError(String(error));
164 | }
165 | 
166 | /**
167 |  * Format error for MCP tool response
168 |  */
169 | export function formatErrorForMcpTool(error: unknown): {
170 | 	content: Array<{ type: 'text'; text: string }>;
171 | 	metadata?: {
172 | 		errorType: ErrorType;
173 | 		statusCode?: number;
174 | 		errorDetails?: unknown;
175 | 	};
176 | } {
177 | 	const methodLogger = Logger.forContext(
178 | 		'utils/error.util.ts',
179 | 		'formatErrorForMcpTool',
180 | 	);
181 | 	const mcpError = ensureMcpError(error);
182 | 	methodLogger.error(`${mcpError.type} error`, mcpError);
183 | 
184 | 	// Get the deep original error for additional context
185 | 	const originalError = getDeepOriginalError(mcpError.originalError);
186 | 
187 | 	// Safely extract details from the original error
188 | 	const errorDetails =
189 | 		originalError instanceof Error
190 | 			? { message: originalError.message }
191 | 			: originalError;
192 | 
193 | 	return {
194 | 		content: [
195 | 			{
196 | 				type: 'text' as const,
197 | 				text: `Error: ${mcpError.message}`,
198 | 			},
199 | 		],
200 | 		metadata: {
201 | 			errorType: mcpError.type,
202 | 			statusCode: mcpError.statusCode,
203 | 			errorDetails,
204 | 		},
205 | 	};
206 | }
207 | 
208 | /**
209 |  * Format error for MCP resource response
210 |  */
211 | export function formatErrorForMcpResource(
212 | 	error: unknown,
213 | 	uri: string,
214 | ): {
215 | 	contents: Array<{
216 | 		uri: string;
217 | 		text: string;
218 | 		mimeType: string;
219 | 		description?: string;
220 | 	}>;
221 | } {
222 | 	const methodLogger = Logger.forContext(
223 | 		'utils/error.util.ts',
224 | 		'formatErrorForMcpResource',
225 | 	);
226 | 	const mcpError = ensureMcpError(error);
227 | 	methodLogger.error(`${mcpError.type} error`, mcpError);
228 | 
229 | 	return {
230 | 		contents: [
231 | 			{
232 | 				uri,
233 | 				text: `Error: ${mcpError.message}`,
234 | 				mimeType: 'text/plain',
235 | 				description: `Error: ${mcpError.type}`,
236 | 			},
237 | 		],
238 | 	};
239 | }
240 | 
241 | /**
242 |  * Handle error in CLI context with improved user feedback
243 |  */
244 | export function handleCliError(error: unknown): never {
245 | 	const methodLogger = Logger.forContext(
246 | 		'utils/error.util.ts',
247 | 		'handleCliError',
248 | 	);
249 | 	const mcpError = ensureMcpError(error);
250 | 	methodLogger.error(`${mcpError.type} error`, mcpError);
251 | 
252 | 	// Get the deep original error for more context
253 | 	const originalError = getDeepOriginalError(mcpError.originalError);
254 | 
255 | 	// Build a well-formatted CLI output using markdown-style helpers
256 | 	const cliLines: string[] = [];
257 | 
258 | 	// Primary error headline
259 | 	cliLines.push(`❌  ${mcpError.message}`);
260 | 
261 | 	// Status code (if any)
262 | 	if (mcpError.statusCode) {
263 | 		cliLines.push(`HTTP Status: ${mcpError.statusCode}`);
264 | 	}
265 | 
266 | 	// Separator
267 | 	cliLines.push(formatSeparator());
268 | 
269 | 	// Provide helpful context based on error type
270 | 	if (mcpError.type === ErrorType.AUTH_MISSING) {
271 | 		cliLines.push(
272 | 			'Tip: Make sure to set up your Atlassian credentials in the configuration file or environment variables:',
273 | 		);
274 | 		cliLines.push(
275 | 			'- ATLASSIAN_SITE_NAME, ATLASSIAN_USER_EMAIL, and ATLASSIAN_API_TOKEN; or',
276 | 		);
277 | 		cliLines.push(
278 | 			'- ATLASSIAN_BITBUCKET_USERNAME and ATLASSIAN_BITBUCKET_APP_PASSWORD',
279 | 		);
280 | 	} else if (mcpError.type === ErrorType.AUTH_INVALID) {
281 | 		cliLines.push(
282 | 			'Tip: Check that your Atlassian API token or app password is correct and has not expired.',
283 | 		);
284 | 		cliLines.push(
285 | 			'Also verify that the configured user has access to the requested resource.',
286 | 		);
287 | 	} else if (mcpError.type === ErrorType.API_ERROR) {
288 | 		if (mcpError.statusCode === 429) {
289 | 			cliLines.push(
290 | 				'Tip: You may have exceeded your Bitbucket API rate limits. Try again later.',
291 | 			);
292 | 		}
293 | 	}
294 | 
295 | 	// Vendor error details (if available)
296 | 	if (originalError) {
297 | 		cliLines.push('Bitbucket API Error:');
298 | 		cliLines.push('```');
299 | 		if (typeof originalError === 'object' && originalError !== null) {
300 | 			// Try to extract the most useful parts of Bitbucket's error response
301 | 			const origErr = originalError as Record<string, unknown>;
302 | 			if (origErr.error && typeof origErr.error === 'object') {
303 | 				// Format {"error": {"message": "..."}} structure
304 | 				const bitbucketError = origErr.error as Record<string, unknown>;
305 | 				cliLines.push(
306 | 					`Message: ${bitbucketError.message || 'Unknown error'}`,
307 | 				);
308 | 				if (bitbucketError.detail)
309 | 					cliLines.push(`Detail: ${bitbucketError.detail}`);
310 | 			} else if (origErr.message) {
311 | 				// Simple message
312 | 				cliLines.push(`${String(origErr.message)}`);
313 | 			} else {
314 | 				// Fall back to JSON representation for anything else
315 | 				cliLines.push(JSON.stringify(originalError, null, 2));
316 | 			}
317 | 		} else {
318 | 			cliLines.push(String(originalError).trim());
319 | 		}
320 | 		cliLines.push('```');
321 | 	}
322 | 
323 | 	// Display DEBUG tip
324 | 	if (!process.env.DEBUG || !process.env.DEBUG.includes('mcp:')) {
325 | 		cliLines.push(
326 | 			'For more detailed error information, run with DEBUG=mcp:* environment variable.',
327 | 		);
328 | 	}
329 | 
330 | 	console.error(cliLines.join('\n'));
331 | 	process.exit(1);
332 | }
333 | 
```

--------------------------------------------------------------------------------
/src/controllers/atlassian.repositories.branch.controller.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import atlassianRepositoriesService from '../services/vendor.atlassian.repositories.service.js';
  2 | import { Logger } from '../utils/logger.util.js';
  3 | import { handleControllerError } from '../utils/error-handler.util.js';
  4 | import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js';
  5 | import {
  6 | 	extractPaginationInfo,
  7 | 	PaginationType,
  8 | } from '../utils/pagination.util.js';
  9 | import { formatPagination } from '../utils/formatter.util.js';
 10 | import { ControllerResponse } from '../types/common.types.js';
 11 | import {
 12 | 	CreateBranchToolArgsType,
 13 | 	ListBranchesToolArgsType,
 14 | } from '../tools/atlassian.repositories.types.js';
 15 | import { CreateBranchParams } from '../services/vendor.atlassian.repositories.types.js';
 16 | import { getDefaultWorkspace } from '../utils/workspace.util.js';
 17 | 
 18 | // Logger instance for this module
 19 | const logger = Logger.forContext(
 20 | 	'controllers/atlassian.repositories.branch.controller.ts',
 21 | );
 22 | 
 23 | /**
 24 |  * Creates a new branch in a repository.
 25 |  * @param options Options including workspace, repo, new branch name, and source target.
 26 |  * @returns Confirmation message.
 27 |  */
 28 | export async function handleCreateBranch(
 29 | 	options: CreateBranchToolArgsType,
 30 | ): Promise<ControllerResponse> {
 31 | 	const { repoSlug, newBranchName, sourceBranchOrCommit } = options;
 32 | 	let { workspaceSlug } = options;
 33 | 	const methodLogger = logger.forMethod('handleCreateBranch');
 34 | 
 35 | 	try {
 36 | 		methodLogger.debug('Creating new branch with options:', options);
 37 | 
 38 | 		// Handle optional workspaceSlug
 39 | 		if (!workspaceSlug) {
 40 | 			methodLogger.debug(
 41 | 				'No workspace provided, fetching default workspace',
 42 | 			);
 43 | 			const defaultWorkspace = await getDefaultWorkspace();
 44 | 			if (!defaultWorkspace) {
 45 | 				throw new Error(
 46 | 					'No default workspace found. Please provide a workspace slug.',
 47 | 				);
 48 | 			}
 49 | 			workspaceSlug = defaultWorkspace;
 50 | 			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
 51 | 		}
 52 | 
 53 | 		if (!repoSlug) {
 54 | 			throw new Error('Repository slug is required');
 55 | 		}
 56 | 
 57 | 		if (!newBranchName) {
 58 | 			throw new Error('New branch name is required');
 59 | 		}
 60 | 
 61 | 		if (!sourceBranchOrCommit) {
 62 | 			throw new Error(
 63 | 				'Source branch or commit is required as the starting point',
 64 | 			);
 65 | 		}
 66 | 
 67 | 		// First, check if branch already exists to avoid potential errors
 68 | 		methodLogger.debug('Checking if branch already exists');
 69 | 		try {
 70 | 			// Call API to check if branch exists
 71 | 			// Note: this is a simulation as the actual API might not have this specific endpoint
 72 | 			// We'll make a call using the list branches endpoint with a filter
 73 | 			const existingBranches =
 74 | 				await atlassianRepositoriesService.listBranches({
 75 | 					workspace: workspaceSlug,
 76 | 					repo_slug: repoSlug,
 77 | 					q: `name="${newBranchName}"`,
 78 | 				});
 79 | 
 80 | 			// If we get matching branches, assume the branch exists
 81 | 			if (existingBranches.values && existingBranches.values.length > 0) {
 82 | 				methodLogger.warn(
 83 | 					`Branch '${newBranchName}' already exists in ${workspaceSlug}/${repoSlug}`,
 84 | 				);
 85 | 				return {
 86 | 					content: `⚠️ Branch \`${newBranchName}\` already exists in the repository.`,
 87 | 				};
 88 | 			}
 89 | 		} catch (error) {
 90 | 			// If error is 404, branch doesn't exist and we can proceed
 91 | 			if ((error as { statusCode?: number }).statusCode !== 404) {
 92 | 				throw error; // Other errors should be propagated
 93 | 			}
 94 | 			methodLogger.debug(
 95 | 				`Branch '${newBranchName}' does not exist, proceeding with creation`,
 96 | 			);
 97 | 		}
 98 | 
 99 | 		// Prepare the branch creation parameters
100 | 		const createParams: CreateBranchParams = {
101 | 			workspace: workspaceSlug,
102 | 			repo_slug: repoSlug,
103 | 			name: newBranchName,
104 | 			target: {
105 | 				hash: sourceBranchOrCommit,
106 | 			},
107 | 		};
108 | 
109 | 		// Create the branch
110 | 		methodLogger.debug('Creating branch with params:', createParams);
111 | 		const result =
112 | 			await atlassianRepositoriesService.createBranch(createParams);
113 | 
114 | 		// Confirm success with a meaningful message
115 | 		methodLogger.debug('Branch created successfully:', result);
116 | 		return {
117 | 			content: `✅ Successfully created branch \`${newBranchName}\` from \`${sourceBranchOrCommit}\` in ${workspaceSlug}/${repoSlug}.`,
118 | 		};
119 | 	} catch (error) {
120 | 		throw handleControllerError(error, {
121 | 			entityType: 'Branch',
122 | 			operation: 'create',
123 | 			source: 'controllers/atlassian.repositories.branch.controller.ts@handleCreateBranch',
124 | 			additionalInfo: options,
125 | 		});
126 | 	}
127 | }
128 | 
129 | /**
130 |  * Lists branches in a repository with optional filtering
131 |  *
132 |  * @param options - Options containing workspaceSlug, repoSlug, and filters
133 |  * @returns Formatted list of branches and pagination information
134 |  */
135 | export async function handleListBranches(
136 | 	options: ListBranchesToolArgsType,
137 | ): Promise<ControllerResponse> {
138 | 	const methodLogger = logger.forMethod('handleListBranches');
139 | 	methodLogger.debug('Listing branches with options:', options);
140 | 
141 | 	try {
142 | 		// Apply defaults
143 | 		const defaults: Partial<ListBranchesToolArgsType> = {
144 | 			limit: DEFAULT_PAGE_SIZE,
145 | 			sort: 'name', // Default sort by name
146 | 		};
147 | 		const params = applyDefaults<ListBranchesToolArgsType>(
148 | 			options,
149 | 			defaults,
150 | 		);
151 | 
152 | 		// Handle optional workspaceSlug
153 | 		if (!params.workspaceSlug) {
154 | 			methodLogger.debug(
155 | 				'No workspace provided, fetching default workspace',
156 | 			);
157 | 			const defaultWorkspace = await getDefaultWorkspace();
158 | 			if (!defaultWorkspace) {
159 | 				throw new Error(
160 | 					'No default workspace found. Please provide a workspace slug.',
161 | 				);
162 | 			}
163 | 			params.workspaceSlug = defaultWorkspace;
164 | 			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
165 | 		}
166 | 
167 | 		// Required parameters check
168 | 		if (!params.repoSlug) {
169 | 			throw new Error('Repository slug is required');
170 | 		}
171 | 
172 | 		// Call the service to list branches
173 | 		methodLogger.debug('Listing branches with params:', {
174 | 			workspace: params.workspaceSlug,
175 | 			repo_slug: params.repoSlug,
176 | 			q: params.query ? `name ~ "${params.query}"` : undefined,
177 | 			sort: params.sort,
178 | 			pagelen: params.limit,
179 | 			page: params.cursor ? parseInt(params.cursor, 10) : undefined,
180 | 		});
181 | 
182 | 		const branchesData = await atlassianRepositoriesService.listBranches({
183 | 			workspace: params.workspaceSlug,
184 | 			repo_slug: params.repoSlug,
185 | 			q: params.query ? `name ~ "${params.query}"` : undefined,
186 | 			sort: params.sort,
187 | 			pagelen: params.limit,
188 | 			page: params.cursor ? parseInt(params.cursor, 10) : undefined,
189 | 		});
190 | 
191 | 		methodLogger.debug(
192 | 			`Retrieved ${branchesData.values?.length || 0} branches`,
193 | 		);
194 | 
195 | 		// Extract pagination information
196 | 		const pagination = extractPaginationInfo(
197 | 			branchesData,
198 | 			PaginationType.PAGE,
199 | 		);
200 | 
201 | 		// Format branches data into Markdown
202 | 		let content = '';
203 | 
204 | 		if (!branchesData.values || branchesData.values.length === 0) {
205 | 			content = 'No branches found in this repository.';
206 | 		} else {
207 | 			content = `# Branches in \`${params.workspaceSlug}/${params.repoSlug}\`\n\n`;
208 | 
209 | 			if (params.query) {
210 | 				content += `Filtered by query: "${params.query}"\n\n`;
211 | 			}
212 | 
213 | 			branchesData.values.forEach((branch) => {
214 | 				// Using target.hash to get the commit hash this branch points to
215 | 				const commitHash =
216 | 					branch.target?.hash?.substring(0, 8) || 'N/A';
217 | 				content += `## ${branch.name}\n\n`;
218 | 				content += `- **Latest Commit**: ${commitHash}\n`;
219 | 				content += `- **Type**: ${branch.type || 'branch'}\n`;
220 | 
221 | 				if (branch.default_merge_strategy) {
222 | 					content += `- **Default Merge Strategy**: ${branch.default_merge_strategy}\n`;
223 | 				}
224 | 
225 | 				if (branch.merge_strategies && branch.merge_strategies.length) {
226 | 					content += `- **Available Merge Strategies**: ${branch.merge_strategies.join(
227 | 						', ',
228 | 					)}\n`;
229 | 				}
230 | 
231 | 				content += '\n';
232 | 			});
233 | 		}
234 | 
235 | 		// Add pagination information if available
236 | 		if (
237 | 			pagination &&
238 | 			(pagination.hasMore || pagination.count !== undefined)
239 | 		) {
240 | 			const paginationString = formatPagination(pagination);
241 | 			content += paginationString;
242 | 		}
243 | 
244 | 		return { content };
245 | 	} catch (error) {
246 | 		throw handleControllerError(error, {
247 | 			entityType: 'Branches',
248 | 			operation: 'listing',
249 | 			source: 'controllers/atlassian.repositories.branch.controller.ts@handleListBranches',
250 | 			additionalInfo: options,
251 | 		});
252 | 	}
253 | }
254 | 
```

--------------------------------------------------------------------------------
/src/services/vendor.atlassian.search.service.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { fetchAtlassian } from '../utils/transport.util.js';
  2 | import { Logger } from '../utils/logger.util.js';
  3 | import { NETWORK_TIMEOUTS } from '../utils/constants.util.js';
  4 | import { URLSearchParams } from 'url';
  5 | import { getAtlassianCredentials } from '../utils/transport.util.js';
  6 | import {
  7 | 	ContentSearchParams,
  8 | 	ContentSearchResponse,
  9 | } from './vendor.atlassian.search.types.js';
 10 | 
 11 | const logger = Logger.forContext('services/vendor.atlassian.search.service.ts');
 12 | 
 13 | /**
 14 |  * Search options for code search in a workspace
 15 |  */
 16 | export interface SearchCodeParams {
 17 | 	workspaceSlug: string;
 18 | 	searchQuery: string;
 19 | 	page?: number;
 20 | 	pageLen?: number;
 21 | 	repoSlug?: string;
 22 | 	fields?: string;
 23 | 	language?: string;
 24 | 	extension?: string;
 25 | }
 26 | 
 27 | /**
 28 |  * Search options for commit search in a repository
 29 |  */
 30 | export interface SearchCommitsParams {
 31 | 	workspaceSlug: string;
 32 | 	repoSlug: string;
 33 | 	searchQuery: string;
 34 | 	page?: number;
 35 | 	pageLen?: number;
 36 | 	fields?: string;
 37 | }
 38 | 
 39 | /**
 40 |  * Response type for code search API
 41 |  */
 42 | export interface CodeSearchResponse {
 43 | 	size: number;
 44 | 	page: number;
 45 | 	pagelen: number;
 46 | 	query_substituted: boolean;
 47 | 	values: CodeSearchResult[];
 48 | }
 49 | 
 50 | /**
 51 |  * Response type for commits API
 52 |  */
 53 | export interface CommitsResponse {
 54 | 	size: number;
 55 | 	page: number;
 56 | 	pagelen: number;
 57 | 	next?: string;
 58 | 	previous?: string;
 59 | 	values: CommitResult[];
 60 | }
 61 | 
 62 | /**
 63 |  * Commit result type
 64 |  */
 65 | export interface CommitResult {
 66 | 	hash: string;
 67 | 	date: string;
 68 | 	message: string;
 69 | 	type: string;
 70 | 	author: {
 71 | 		raw: string;
 72 | 		type: string;
 73 | 		user?: {
 74 | 			display_name: string;
 75 | 			account_id: string;
 76 | 			links: {
 77 | 				self: { href: string };
 78 | 				avatar: { href: string };
 79 | 			};
 80 | 		};
 81 | 	};
 82 | 	links: {
 83 | 		self: { href: string };
 84 | 		html: { href: string };
 85 | 	};
 86 | 	repository?: {
 87 | 		name: string;
 88 | 		full_name: string;
 89 | 		links: {
 90 | 			self: { href: string };
 91 | 			html: { href: string };
 92 | 		};
 93 | 	};
 94 | }
 95 | 
 96 | /**
 97 |  * Code search result type
 98 |  */
 99 | export interface CodeSearchResult {
100 | 	type: string;
101 | 	content_match_count: number;
102 | 	content_matches: ContentMatch[];
103 | 	path_matches: PathMatch[];
104 | 	file: {
105 | 		path: string;
106 | 		type: string;
107 | 		links: {
108 | 			self: {
109 | 				href: string;
110 | 			};
111 | 		};
112 | 	};
113 | }
114 | 
115 | /**
116 |  * Content match type
117 |  */
118 | export interface ContentMatch {
119 | 	lines: {
120 | 		line: number;
121 | 		segments: {
122 | 			text: string;
123 | 			match?: boolean;
124 | 		}[];
125 | 	}[];
126 | }
127 | 
128 | /**
129 |  * Path match type
130 |  */
131 | export interface PathMatch {
132 | 	text: string;
133 | 	match?: boolean;
134 | }
135 | 
136 | /**
137 |  * Search for commits in a repository using the Bitbucket API
138 |  *
139 |  * @param {SearchCommitsParams} params - Parameters for the commit search
140 |  * @returns {Promise<CommitsResponse>} The search results from the Bitbucket API
141 |  */
142 | export async function searchCommits(
143 | 	params: SearchCommitsParams,
144 | ): Promise<CommitsResponse> {
145 | 	// Build the query parameters - the Bitbucket API allows searching commits by message
146 | 	const queryParams = new URLSearchParams();
147 | 
148 | 	// If search query is provided, add it as a q parameter
149 | 	if (params.searchQuery) {
150 | 		queryParams.append('q', `message ~ "${params.searchQuery}"`);
151 | 	}
152 | 
153 | 	// Add optional pagination parameters
154 | 	if (params.page) {
155 | 		queryParams.append('page', params.page.toString());
156 | 	}
157 | 
158 | 	if (params.pageLen) {
159 | 		queryParams.append('pagelen', params.pageLen.toString());
160 | 	}
161 | 
162 | 	// Add optional fields parameter for enhanced responses
163 | 	if (params.fields) {
164 | 		queryParams.append('fields', params.fields);
165 | 	}
166 | 
167 | 	// Get credentials for API call
168 | 	const credentials = getAtlassianCredentials();
169 | 	if (!credentials) {
170 | 		throw new Error('No Atlassian credentials available');
171 | 	}
172 | 
173 | 	// Set useBitbucketAuth to true since we're calling the Bitbucket API
174 | 	credentials.useBitbucketAuth = true;
175 | 
176 | 	// Create API path for Bitbucket commits
177 | 	const path = `/2.0/repositories/${params.workspaceSlug}/${params.repoSlug}/commits${
178 | 		queryParams.toString() ? '?' + queryParams.toString() : ''
179 | 	}`;
180 | 
181 | 	// Track searching commits in repository
182 | 	logger.debug(
183 | 		`Searching commits in repository: ${params.workspaceSlug}/${params.repoSlug}`,
184 | 		{
185 | 			searchQuery: params.searchQuery,
186 | 			path,
187 | 		},
188 | 	);
189 | 
190 | 	// Call Bitbucket API with credentials and path
191 | 	return fetchAtlassian(credentials, path, {
192 | 		timeout: NETWORK_TIMEOUTS.SEARCH_REQUEST_TIMEOUT,
193 | 	});
194 | }
195 | 
196 | /**
197 |  * Search for code in a workspace using the Bitbucket API
198 |  *
199 |  * @param {SearchCodeParams} params - Parameters for the code search
200 |  * @returns {Promise<CodeSearchResponse>} The search results from the Bitbucket API
201 |  */
202 | export async function searchCode(
203 | 	params: SearchCodeParams,
204 | ): Promise<CodeSearchResponse> {
205 | 	// If repoSlug is provided, enhance the search query with repo: syntax
206 | 	const searchQuery = params.repoSlug
207 | 		? `${params.searchQuery} repo:${params.repoSlug}`
208 | 		: params.searchQuery;
209 | 
210 | 	// Language mapping to handle common alternative names
211 | 	const languageMapping: Record<string, string> = {
212 | 		hcl: 'terraform',
213 | 		tf: 'terraform',
214 | 		typescript: 'ts',
215 | 		javascript: 'js',
216 | 		python: 'py',
217 | 		// Add more mappings as needed
218 | 	};
219 | 
220 | 	// Append language and extension filters if provided
221 | 	let finalSearchQuery = searchQuery;
222 | 	if (params.language) {
223 | 		// Use the mapped language name if available, otherwise use the original
224 | 		const mappedLanguage = params.language.toLowerCase();
225 | 		const apiLanguage = languageMapping[mappedLanguage] || mappedLanguage;
226 | 
227 | 		logger.debug(
228 | 			`Language mapping: "${mappedLanguage}" -> "${apiLanguage}"`,
229 | 		);
230 | 		finalSearchQuery += ` lang:${apiLanguage}`;
231 | 	}
232 | 	if (params.extension) {
233 | 		finalSearchQuery += ` ext:${params.extension}`;
234 | 	}
235 | 
236 | 	// Build the query parameters
237 | 	const queryParams = new URLSearchParams({
238 | 		search_query: finalSearchQuery,
239 | 	});
240 | 
241 | 	// Add optional pagination parameters
242 | 	if (params.page) {
243 | 		queryParams.append('page', params.page.toString());
244 | 	}
245 | 
246 | 	if (params.pageLen) {
247 | 		queryParams.append('pagelen', params.pageLen.toString());
248 | 	}
249 | 
250 | 	// Add optional fields parameter for enhanced responses
251 | 	if (params.fields) {
252 | 		queryParams.append('fields', params.fields);
253 | 	}
254 | 
255 | 	// Get credentials for API call
256 | 	const credentials = getAtlassianCredentials();
257 | 	if (!credentials) {
258 | 		throw new Error('No Atlassian credentials available');
259 | 	}
260 | 
261 | 	// Set useBitbucketAuth to true since we're calling the Bitbucket API
262 | 	credentials.useBitbucketAuth = true;
263 | 
264 | 	// Create API path for Bitbucket code search
265 | 	const path = `/2.0/workspaces/${params.workspaceSlug}/search/code?${queryParams.toString()}`;
266 | 
267 | 	// Track searching code in workspace
268 | 	logger.debug(`Searching code in workspace: ${params.workspaceSlug}`, {
269 | 		searchQuery: finalSearchQuery,
270 | 		path,
271 | 	});
272 | 
273 | 	// Call Bitbucket API with credentials and path
274 | 	return fetchAtlassian(credentials, path, {
275 | 		timeout: NETWORK_TIMEOUTS.SEARCH_REQUEST_TIMEOUT,
276 | 	});
277 | }
278 | 
279 | /**
280 |  * Search for content in Bitbucket
281 |  *
282 |  * @param params Search parameters
283 |  * @returns Content search response
284 |  */
285 | async function searchContent(
286 | 	params: ContentSearchParams,
287 | ): Promise<ContentSearchResponse> {
288 | 	const logger = Logger.forContext(
289 | 		'services/vendor.atlassian.search.service.ts',
290 | 		'searchContent',
291 | 	);
292 | 
293 | 	try {
294 | 		const credentials = getAtlassianCredentials();
295 | 		if (!credentials) {
296 | 			throw new Error(
297 | 				'Atlassian credentials are required for content search',
298 | 			);
299 | 		}
300 | 
301 | 		// Build query parameters
302 | 		const queryParams = new URLSearchParams();
303 | 
304 | 		// Format the query
305 | 		queryParams.set('q', params.query);
306 | 
307 | 		// Add pagination parameters
308 | 		queryParams.set('pagelen', String(params.limit || 25));
309 | 		queryParams.set('page', String(params.page || 1));
310 | 
311 | 		// Add content type filter if specified
312 | 		if (params.contentType) {
313 | 			queryParams.set('content_type', params.contentType);
314 | 		}
315 | 
316 | 		// Construct URL based on whether a repository is specified
317 | 		let url = `/2.0/search/${params.workspaceSlug}`;
318 | 		if (params.repoSlug) {
319 | 			url = `/2.0/search/${params.workspaceSlug}/${params.repoSlug}`;
320 | 		}
321 | 
322 | 		// Add query parameters
323 | 		url += `?${queryParams.toString()}`;
324 | 
325 | 		logger.debug(`Content search request URL: ${url}`);
326 | 
327 | 		// Make the request
328 | 		const response = await fetchAtlassian<ContentSearchResponse>(
329 | 			credentials,
330 | 			url,
331 | 			{
332 | 				timeout: NETWORK_TIMEOUTS.SEARCH_REQUEST_TIMEOUT,
333 | 			},
334 | 		);
335 | 
336 | 		return response;
337 | 	} catch (error) {
338 | 		logger.error('Content search failed:', error);
339 | 		throw error;
340 | 	}
341 | }
342 | 
343 | export default {
344 | 	searchCode,
345 | 	searchCommits,
346 | 	searchContent,
347 | };
348 | 
```

--------------------------------------------------------------------------------
/src/services/vendor.atlassian.repositories.types.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { z } from 'zod';
  2 | 
  3 | /**
  4 |  * Types for Atlassian Bitbucket Repositories API
  5 |  */
  6 | 
  7 | // Link href schema
  8 | const LinkSchema = z.object({
  9 | 	href: z.string(),
 10 | 	name: z.string().optional(),
 11 | });
 12 | 
 13 | /**
 14 |  * Repository SCM type
 15 |  */
 16 | export const RepositorySCMSchema = z.enum(['git', 'hg']);
 17 | 
 18 | /**
 19 |  * Repository fork policy
 20 |  */
 21 | export const RepositoryForkPolicySchema = z.enum([
 22 | 	'allow_forks',
 23 | 	'no_public_forks',
 24 | 	'no_forks',
 25 | ]);
 26 | 
 27 | /**
 28 |  * Repository links object
 29 |  */
 30 | export const RepositoryLinksSchema = z.object({
 31 | 	self: LinkSchema.optional(),
 32 | 	html: LinkSchema.optional(),
 33 | 	avatar: LinkSchema.optional(),
 34 | 	pullrequests: LinkSchema.optional(),
 35 | 	commits: LinkSchema.optional(),
 36 | 	forks: LinkSchema.optional(),
 37 | 	watchers: LinkSchema.optional(),
 38 | 	downloads: LinkSchema.optional(),
 39 | 	clone: z.array(LinkSchema).optional(),
 40 | 	hooks: LinkSchema.optional(),
 41 | 	issues: LinkSchema.optional(),
 42 | });
 43 | 
 44 | /**
 45 |  * Repository owner links schema
 46 |  */
 47 | const OwnerLinksSchema = z.object({
 48 | 	self: LinkSchema.optional(),
 49 | 	html: LinkSchema.optional(),
 50 | 	avatar: LinkSchema.optional(),
 51 | });
 52 | 
 53 | /**
 54 |  * Repository owner object
 55 |  */
 56 | export const RepositoryOwnerSchema = z.object({
 57 | 	type: z.enum(['user', 'team']),
 58 | 	username: z.string().optional(),
 59 | 	display_name: z.string().optional(),
 60 | 	uuid: z.string().optional(),
 61 | 	links: OwnerLinksSchema.optional(),
 62 | });
 63 | 
 64 | /**
 65 |  * Repository branch object
 66 |  */
 67 | export const RepositoryBranchSchema = z.object({
 68 | 	type: z.literal('branch'),
 69 | 	name: z.string(),
 70 | });
 71 | 
 72 | /**
 73 |  * Repository project links schema
 74 |  */
 75 | const ProjectLinksSchema = z.object({
 76 | 	self: LinkSchema.optional(),
 77 | 	html: LinkSchema.optional(),
 78 | });
 79 | 
 80 | /**
 81 |  * Repository project object
 82 |  */
 83 | export const RepositoryProjectSchema = z.object({
 84 | 	type: z.literal('project'),
 85 | 	key: z.string(),
 86 | 	uuid: z.string(),
 87 | 	name: z.string(),
 88 | 	links: ProjectLinksSchema.optional(),
 89 | });
 90 | 
 91 | /**
 92 |  * Repository object returned from the API
 93 |  */
 94 | export const RepositorySchema = z.object({
 95 | 	type: z.literal('repository'),
 96 | 	uuid: z.string(),
 97 | 	full_name: z.string(),
 98 | 	name: z.string(),
 99 | 	description: z.string().optional(),
100 | 	is_private: z.boolean(),
101 | 	fork_policy: RepositoryForkPolicySchema.optional(),
102 | 	created_on: z.string().optional(),
103 | 	updated_on: z.string().optional(),
104 | 	size: z.number().optional(),
105 | 	language: z.string().optional(),
106 | 	has_issues: z.boolean().optional(),
107 | 	has_wiki: z.boolean().optional(),
108 | 	scm: RepositorySCMSchema,
109 | 	owner: RepositoryOwnerSchema,
110 | 	mainbranch: RepositoryBranchSchema.optional(),
111 | 	project: RepositoryProjectSchema.optional(),
112 | 	links: RepositoryLinksSchema,
113 | });
114 | export type Repository = z.infer<typeof RepositorySchema>;
115 | 
116 | /**
117 |  * Parameters for listing repositories
118 |  */
119 | export const ListRepositoriesParamsSchema = z.object({
120 | 	workspace: z.string(),
121 | 	q: z.string().optional(),
122 | 	sort: z.string().optional(),
123 | 	page: z.number().optional(),
124 | 	pagelen: z.number().optional(),
125 | 	role: z.string().optional(),
126 | });
127 | export type ListRepositoriesParams = z.infer<
128 | 	typeof ListRepositoriesParamsSchema
129 | >;
130 | 
131 | /**
132 |  * Parameters for getting a repository by identifier
133 |  */
134 | export const GetRepositoryParamsSchema = z.object({
135 | 	workspace: z.string(),
136 | 	repo_slug: z.string(),
137 | });
138 | export type GetRepositoryParams = z.infer<typeof GetRepositoryParamsSchema>;
139 | 
140 | /**
141 |  * API response for listing repositories
142 |  */
143 | export const RepositoriesResponseSchema = z.object({
144 | 	pagelen: z.number(),
145 | 	page: z.number(),
146 | 	size: z.number(),
147 | 	next: z.string().optional(),
148 | 	previous: z.string().optional(),
149 | 	values: z.array(RepositorySchema),
150 | });
151 | export type RepositoriesResponse = z.infer<typeof RepositoriesResponseSchema>;
152 | 
153 | // --- Commit History Types ---
154 | 
155 | /**
156 |  * Parameters for listing commits.
157 |  */
158 | export const ListCommitsParamsSchema = z.object({
159 | 	workspace: z.string(),
160 | 	repo_slug: z.string(),
161 | 	include: z.string().optional(), // Branch, tag, or hash to include history from
162 | 	exclude: z.string().optional(), // Branch, tag, or hash to exclude history up to
163 | 	path: z.string().optional(), // File path to filter commits by
164 | 	page: z.number().optional(),
165 | 	pagelen: z.number().optional(),
166 | });
167 | export type ListCommitsParams = z.infer<typeof ListCommitsParamsSchema>;
168 | 
169 | /**
170 |  * Commit author user links schema
171 |  */
172 | const CommitAuthorUserLinksSchema = z.object({
173 | 	self: LinkSchema.optional(),
174 | 	avatar: LinkSchema.optional(),
175 | });
176 | 
177 | /**
178 |  * Commit author user schema
179 |  */
180 | const CommitAuthorUserSchema = z.object({
181 | 	display_name: z.string().optional(),
182 | 	nickname: z.string().optional(),
183 | 	account_id: z.string().optional(),
184 | 	uuid: z.string().optional(),
185 | 	type: z.string(), // Usually 'user'
186 | 	links: CommitAuthorUserLinksSchema.optional(),
187 | });
188 | 
189 | /**
190 |  * Commit author schema
191 |  */
192 | export const CommitAuthorSchema = z.object({
193 | 	raw: z.string(),
194 | 	type: z.string(), // Usually 'author'
195 | 	user: CommitAuthorUserSchema.optional(),
196 | });
197 | 
198 | /**
199 |  * Commit links schema
200 |  */
201 | const CommitLinksSchema = z.object({
202 | 	self: LinkSchema.optional(),
203 | 	html: LinkSchema.optional(),
204 | 	diff: LinkSchema.optional(),
205 | 	approve: LinkSchema.optional(),
206 | 	comments: LinkSchema.optional(),
207 | });
208 | 
209 | /**
210 |  * Commit summary schema
211 |  */
212 | const CommitSummarySchema = z.object({
213 | 	raw: z.string().optional(),
214 | 	markup: z.string().optional(),
215 | 	html: z.string().optional(),
216 | });
217 | 
218 | /**
219 |  * Commit parent schema
220 |  */
221 | const CommitParentSchema = z.object({
222 | 	hash: z.string(),
223 | 	type: z.string(),
224 | 	links: z.unknown(),
225 | });
226 | 
227 | /**
228 |  * Represents a single commit in the history.
229 |  */
230 | export const CommitSchema = z.object({
231 | 	hash: z.string(),
232 | 	type: z.string(), // Usually 'commit'
233 | 	author: CommitAuthorSchema,
234 | 	date: z.string(), // ISO 8601 format date string
235 | 	message: z.string(),
236 | 	links: CommitLinksSchema,
237 | 	summary: CommitSummarySchema.optional(),
238 | 	parents: z.array(CommitParentSchema),
239 | });
240 | export type Commit = z.infer<typeof CommitSchema>;
241 | 
242 | /**
243 |  * API response for listing commits (paginated).
244 |  */
245 | export const PaginatedCommitsSchema = z.object({
246 | 	pagelen: z.number(),
247 | 	page: z.number().optional(),
248 | 	size: z.number().optional(),
249 | 	next: z.string().optional(),
250 | 	previous: z.string().optional(),
251 | 	values: z.array(CommitSchema),
252 | });
253 | export type PaginatedCommits = z.infer<typeof PaginatedCommitsSchema>;
254 | 
255 | /**
256 |  * Parameters for creating a branch.
257 |  */
258 | export const CreateBranchParamsSchema = z.object({
259 | 	workspace: z.string(),
260 | 	repo_slug: z.string(),
261 | 	name: z.string(), // New branch name
262 | 	target: z.object({
263 | 		hash: z.string(), // Source branch name or commit hash
264 | 	}),
265 | });
266 | export type CreateBranchParams = z.infer<typeof CreateBranchParamsSchema>;
267 | 
268 | /**
269 |  * Response object when creating a branch.
270 |  * Contains details about the newly created branch reference.
271 |  */
272 | export const BranchRefSchema = z.object({
273 | 	type: z.literal('branch'),
274 | 	name: z.string(),
275 | 	target: z.object({
276 | 		hash: z.string(),
277 | 		type: z.string(), // e.g., 'commit'
278 | 	}),
279 | });
280 | export type BranchRef = z.infer<typeof BranchRefSchema>;
281 | 
282 | /**
283 |  * Parameters for getting a file's content from a repository.
284 |  */
285 | export const GetFileContentParamsSchema = z.object({
286 | 	workspace: z.string(),
287 | 	repo_slug: z.string(),
288 | 	commit: z.string(), // Branch name, tag, or commit hash
289 | 	path: z.string(), // File path within the repository
290 | });
291 | export type GetFileContentParams = z.infer<typeof GetFileContentParamsSchema>;
292 | 
293 | /**
294 |  * Represents a branch target (usually a commit).
295 |  */
296 | export const BranchTargetSchema = z.object({
297 | 	hash: z.string(),
298 | 	type: z.string(), // Usually 'commit'
299 | });
300 | 
301 | /**
302 |  * Represents a branch in a Bitbucket repository.
303 |  */
304 | export const BranchSchema = z.object({
305 | 	name: z.string(),
306 | 	type: z.literal('branch'),
307 | 	target: BranchTargetSchema,
308 | 	merge_strategies: z.array(z.string()).optional(),
309 | 	default_merge_strategy: z.string().optional(),
310 | 	links: z.record(z.string(), z.unknown()).optional(),
311 | });
312 | 
313 | /**
314 |  * Parameters for listing branches in a repository.
315 |  */
316 | export const ListBranchesParamsSchema = z.object({
317 | 	workspace: z.string(),
318 | 	repo_slug: z.string(),
319 | 	page: z.number().optional(),
320 | 	pagelen: z.number().optional(),
321 | 	q: z.string().optional(), // Query for filtering branches
322 | 	sort: z.string().optional(), // Sort field
323 | });
324 | export type ListBranchesParams = z.infer<typeof ListBranchesParamsSchema>;
325 | 
326 | /**
327 |  * API response for listing branches (paginated).
328 |  */
329 | export const BranchesResponseSchema = z.object({
330 | 	pagelen: z.number(),
331 | 	page: z.number().optional(),
332 | 	size: z.number().optional(),
333 | 	next: z.string().optional(),
334 | 	previous: z.string().optional(),
335 | 	values: z.array(BranchSchema),
336 | });
337 | export type BranchesResponse = z.infer<typeof BranchesResponseSchema>;
338 | 
```

--------------------------------------------------------------------------------
/src/services/vendor.atlassian.pullrequests.types.ts:
--------------------------------------------------------------------------------

```typescript
  1 | /**
  2 |  * Types for Atlassian Bitbucket Pull Requests API
  3 |  */
  4 | 
  5 | import { Repository } from './vendor.atlassian.repositories.types.js';
  6 | 
  7 | /**
  8 |  * Pull request state
  9 |  */
 10 | export type PullRequestState = 'OPEN' | 'MERGED' | 'DECLINED' | 'SUPERSEDED';
 11 | 
 12 | /**
 13 |  * Pull request author or user reference
 14 |  */
 15 | export interface PullRequestUser {
 16 | 	type: 'user' | 'team';
 17 | 	uuid?: string;
 18 | 	display_name?: string;
 19 | 	nickname?: string;
 20 | 	account_id?: string;
 21 | 	links?: {
 22 | 		self?: { href: string };
 23 | 		html?: { href: string };
 24 | 		avatar?: { href: string };
 25 | 	};
 26 | }
 27 | 
 28 | /**
 29 |  * Content representation for rendering
 30 |  */
 31 | export interface ContentRepresentation {
 32 | 	raw: string;
 33 | 	markup: string;
 34 | 	html: string;
 35 | }
 36 | 
 37 | /**
 38 |  * Rendered content fields
 39 |  */
 40 | export interface RenderedContent {
 41 | 	title?: ContentRepresentation;
 42 | 	description?: ContentRepresentation;
 43 | 	reason?: ContentRepresentation;
 44 | }
 45 | 
 46 | /**
 47 |  * Pull request summary
 48 |  */
 49 | export interface PullRequestSummary {
 50 | 	raw: string;
 51 | 	markup: string;
 52 | 	html: string;
 53 | }
 54 | 
 55 | /**
 56 |  * Pull request links object
 57 |  */
 58 | export interface PullRequestLinks {
 59 | 	self?: { href: string; name?: string };
 60 | 	html?: { href: string; name?: string };
 61 | 	commits?: { href: string; name?: string };
 62 | 	approve?: { href: string; name?: string };
 63 | 	diff?: { href: string; name?: string };
 64 | 	diffstat?: { href: string; name?: string };
 65 | 	comments?: { href: string; name?: string };
 66 | 	activity?: { href: string; name?: string };
 67 | 	merge?: { href: string; name?: string };
 68 | 	decline?: { href: string; name?: string };
 69 | }
 70 | 
 71 | /**
 72 |  * Pull request branch reference
 73 |  */
 74 | export interface PullRequestBranchRef {
 75 | 	repository: Partial<Repository>;
 76 | 	branch: {
 77 | 		name: string;
 78 | 		merge_strategies?: string[];
 79 | 		default_merge_strategy?: string;
 80 | 	};
 81 | 	commit?: {
 82 | 		hash: string;
 83 | 	};
 84 | }
 85 | 
 86 | /**
 87 |  * Pull request object returned from the API
 88 |  */
 89 | export interface PullRequest {
 90 | 	type: 'pullrequest';
 91 | 	id: number;
 92 | 	title: string;
 93 | 	rendered?: RenderedContent;
 94 | 	summary?: PullRequestSummary;
 95 | 	state: PullRequestState;
 96 | 	author: PullRequestUser;
 97 | 	source: PullRequestBranchRef;
 98 | 	destination: PullRequestBranchRef;
 99 | 	merge_commit?: {
100 | 		hash: string;
101 | 	};
102 | 	comment_count?: number;
103 | 	task_count?: number;
104 | 	close_source_branch?: boolean;
105 | 	closed_by?: PullRequestUser;
106 | 	reason?: string;
107 | 	created_on: string;
108 | 	updated_on: string;
109 | 	reviewers?: PullRequestUser[];
110 | 	participants?: PullRequestUser[];
111 | 	links: PullRequestLinks;
112 | }
113 | 
114 | /**
115 |  * Extended pull request object with optional fields
116 |  * @remarks Currently identical to PullRequest, but allows for future extension
117 |  */
118 | export type PullRequestDetailed = PullRequest;
119 | 
120 | /**
121 |  * Parameters for listing pull requests
122 |  */
123 | export interface ListPullRequestsParams {
124 | 	workspace: string;
125 | 	repo_slug: string;
126 | 	state?: PullRequestState | PullRequestState[];
127 | 	q?: string;
128 | 	sort?: string;
129 | 	page?: number;
130 | 	pagelen?: number;
131 | }
132 | 
133 | /**
134 |  * Parameters for getting a pull request by ID
135 |  */
136 | export interface GetPullRequestParams {
137 | 	workspace: string;
138 | 	repo_slug: string;
139 | 	pull_request_id: number;
140 | }
141 | 
142 | /**
143 |  * API response for listing pull requests
144 |  */
145 | export interface PullRequestsResponse {
146 | 	pagelen: number;
147 | 	page: number;
148 | 	size: number;
149 | 	next?: string;
150 | 	previous?: string;
151 | 	values: PullRequest[];
152 | }
153 | 
154 | /**
155 |  * Parameters for getting pull request comments
156 |  */
157 | export interface GetPullRequestCommentsParams {
158 | 	/**
159 | 	 * The workspace slug or UUID
160 | 	 */
161 | 	workspace: string;
162 | 
163 | 	/**
164 | 	 * The repository slug or UUID
165 | 	 */
166 | 	repo_slug: string;
167 | 
168 | 	/**
169 | 	 * The pull request ID
170 | 	 */
171 | 	pull_request_id: number;
172 | 
173 | 	/**
174 | 	 * Page number for pagination
175 | 	 */
176 | 	page?: number;
177 | 
178 | 	/**
179 | 	 * Number of items per page
180 | 	 */
181 | 	pagelen?: number;
182 | 
183 | 	/**
184 | 	 * Property to sort by (e.g., 'created_on', '-updated_on')
185 | 	 */
186 | 	sort?: string;
187 | }
188 | 
189 | /**
190 |  * Parameters for creating a comment to a pull request
191 |  */
192 | export interface CreatePullRequestCommentParams {
193 | 	/**
194 | 	 * The workspace slug or UUID
195 | 	 */
196 | 	workspace: string;
197 | 
198 | 	/**
199 | 	 * The repository slug or UUID
200 | 	 */
201 | 	repo_slug: string;
202 | 
203 | 	/**
204 | 	 * The pull request ID
205 | 	 */
206 | 	pull_request_id: number;
207 | 
208 | 	/**
209 | 	 * The content of the comment
210 | 	 */
211 | 	content: {
212 | 		/**
213 | 		 * Raw comment text (can contain markdown)
214 | 		 */
215 | 		raw: string;
216 | 	};
217 | 
218 | 	/**
219 | 	 * Optional inline comment location
220 | 	 */
221 | 	inline?: {
222 | 		/**
223 | 		 * The file path for the inline comment
224 | 		 */
225 | 		path: string;
226 | 
227 | 		/**
228 | 		 * The line number in the file
229 | 		 */
230 | 		to?: number;
231 | 	};
232 | 
233 | 	/**
234 | 	 * For threaded comments, ID of the parent comment
235 | 	 */
236 | 	parent?: {
237 | 		id: number;
238 | 	};
239 | }
240 | 
241 | /**
242 |  * Inline comment position information
243 |  */
244 | export interface InlineCommentPosition {
245 | 	/**
246 | 	 * The file path the comment is on
247 | 	 */
248 | 	path: string;
249 | 
250 | 	/**
251 | 	 * The original file path if renamed/moved
252 | 	 */
253 | 	from_path?: string;
254 | 
255 | 	/**
256 | 	 * Line number in the "from" file
257 | 	 */
258 | 	from?: number;
259 | 
260 | 	/**
261 | 	 * Line number in the "to" file
262 | 	 */
263 | 	to?: number;
264 | }
265 | 
266 | /**
267 |  * Pull request comment object
268 |  */
269 | export interface PullRequestComment {
270 | 	/**
271 | 	 * Comment ID
272 | 	 */
273 | 	id: number;
274 | 
275 | 	/**
276 | 	 * Comment content
277 | 	 */
278 | 	content: {
279 | 		raw: string;
280 | 		markup?: string;
281 | 		html?: string;
282 | 		type?: string;
283 | 	};
284 | 
285 | 	/**
286 | 	 * User who created the comment
287 | 	 */
288 | 	user: PullRequestUser;
289 | 
290 | 	/**
291 | 	 * When the comment was created
292 | 	 */
293 | 	created_on: string;
294 | 
295 | 	/**
296 | 	 * When the comment was last updated
297 | 	 */
298 | 	updated_on: string;
299 | 
300 | 	/**
301 | 	 * Whether the comment has been deleted
302 | 	 */
303 | 	deleted?: boolean;
304 | 
305 | 	/**
306 | 	 * For inline comments, contains file and line information
307 | 	 */
308 | 	inline?: InlineCommentPosition;
309 | 
310 | 	/**
311 | 	 * For threaded comments, ID of the parent comment
312 | 	 */
313 | 	parent?: {
314 | 		id: number;
315 | 	};
316 | 
317 | 	/**
318 | 	 * Links related to this comment
319 | 	 */
320 | 	links?: {
321 | 		self?: { href: string };
322 | 		html?: { href: string };
323 | 		code?: { href: string };
324 | 	};
325 | 
326 | 	/**
327 | 	 * Type of the object
328 | 	 */
329 | 	type: 'pullrequest_comment';
330 | }
331 | 
332 | /**
333 |  * API response for listing pull request comments
334 |  */
335 | export interface PullRequestCommentsResponse {
336 | 	/**
337 | 	 * Number of items per page
338 | 	 */
339 | 	pagelen: number;
340 | 
341 | 	/**
342 | 	 * Current page number
343 | 	 */
344 | 	page: number;
345 | 
346 | 	/**
347 | 	 * Total number of items
348 | 	 */
349 | 	size: number;
350 | 
351 | 	/**
352 | 	 * URL for the next page, if available
353 | 	 */
354 | 	next?: string;
355 | 
356 | 	/**
357 | 	 * URL for the previous page, if available
358 | 	 */
359 | 	previous?: string;
360 | 
361 | 	/**
362 | 	 * Array of comment objects
363 | 	 */
364 | 	values: PullRequestComment[];
365 | 
366 | 	/**
367 | 	 * Reference to the pull request these comments belong to
368 | 	 */
369 | 	pullrequest?: {
370 | 		id: number;
371 | 		title?: string;
372 | 	};
373 | }
374 | 
375 | /**
376 |  * Parameters for creating a pull request
377 |  */
378 | export interface CreatePullRequestParams {
379 | 	/**
380 | 	 * The workspace slug or UUID
381 | 	 */
382 | 	workspace: string;
383 | 
384 | 	/**
385 | 	 * The repository slug or UUID
386 | 	 */
387 | 	repo_slug: string;
388 | 
389 | 	/**
390 | 	 * Title of the pull request
391 | 	 */
392 | 	title: string;
393 | 
394 | 	/**
395 | 	 * Source branch information
396 | 	 */
397 | 	source: {
398 | 		branch: {
399 | 			name: string;
400 | 		};
401 | 	};
402 | 
403 | 	/**
404 | 	 * Destination branch information
405 | 	 */
406 | 	destination: {
407 | 		branch: {
408 | 			name: string;
409 | 		};
410 | 	};
411 | 
412 | 	/**
413 | 	 * Optional description for the pull request
414 | 	 */
415 | 	description?: string;
416 | 
417 | 	/**
418 | 	 * Whether to close the source branch after merge
419 | 	 */
420 | 	close_source_branch?: boolean;
421 | }
422 | 
423 | /**
424 |  * Diffstat response representing changes in a pull request
425 |  */
426 | export interface DiffstatResponse {
427 | 	pagelen?: number;
428 | 	values: DiffstatFileChange[];
429 | 	page?: number;
430 | 	size?: number;
431 | }
432 | 
433 | /**
434 |  * Individual file change in a diffstat
435 |  */
436 | export interface DiffstatFileChange {
437 | 	status: string;
438 | 	old?: {
439 | 		path: string;
440 | 		type?: string;
441 | 	};
442 | 	new?: {
443 | 		path: string;
444 | 		type?: string;
445 | 	};
446 | 	lines_added?: number;
447 | 	lines_removed?: number;
448 | }
449 | 
450 | /**
451 |  * Parameters for updating a pull request
452 |  */
453 | export interface UpdatePullRequestParams {
454 | 	/**
455 | 	 * The workspace slug or UUID
456 | 	 */
457 | 	workspace: string;
458 | 
459 | 	/**
460 | 	 * The repository slug or UUID
461 | 	 */
462 | 	repo_slug: string;
463 | 
464 | 	/**
465 | 	 * The pull request ID
466 | 	 */
467 | 	pull_request_id: number;
468 | 
469 | 	/**
470 | 	 * Updated title of the pull request
471 | 	 */
472 | 	title?: string;
473 | 
474 | 	/**
475 | 	 * Updated description for the pull request
476 | 	 */
477 | 	description?: string;
478 | }
479 | 
480 | /**
481 |  * Parameters for approving a pull request
482 |  */
483 | export interface ApprovePullRequestParams {
484 | 	/**
485 | 	 * The workspace slug or UUID
486 | 	 */
487 | 	workspace: string;
488 | 
489 | 	/**
490 | 	 * The repository slug or UUID
491 | 	 */
492 | 	repo_slug: string;
493 | 
494 | 	/**
495 | 	 * The pull request ID
496 | 	 */
497 | 	pull_request_id: number;
498 | }
499 | 
500 | /**
501 |  * Parameters for requesting changes on a pull request
502 |  */
503 | export interface RejectPullRequestParams {
504 | 	/**
505 | 	 * The workspace slug or UUID
506 | 	 */
507 | 	workspace: string;
508 | 
509 | 	/**
510 | 	 * The repository slug or UUID
511 | 	 */
512 | 	repo_slug: string;
513 | 
514 | 	/**
515 | 	 * The pull request ID
516 | 	 */
517 | 	pull_request_id: number;
518 | }
519 | 
520 | /**
521 |  * Pull request participant representing approval/rejection status
522 |  */
523 | export interface PullRequestParticipant {
524 | 	/**
525 | 	 * Type of the object
526 | 	 */
527 | 	type: 'participant';
528 | 
529 | 	/**
530 | 	 * User information
531 | 	 */
532 | 	user: PullRequestUser;
533 | 
534 | 	/**
535 | 	 * Participant role
536 | 	 */
537 | 	role: 'PARTICIPANT' | 'REVIEWER';
538 | 
539 | 	/**
540 | 	 * Whether the participant has approved the PR
541 | 	 */
542 | 	approved: boolean;
543 | 
544 | 	/**
545 | 	 * Participant state
546 | 	 */
547 | 	state: 'approved' | 'changes_requested' | null;
548 | 
549 | 	/**
550 | 	 * When the participant last participated
551 | 	 */
552 | 	participated_on: string;
553 | }
554 | 
```

--------------------------------------------------------------------------------
/src/controllers/atlassian.repositories.list.controller.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import atlassianRepositoriesService from '../services/vendor.atlassian.repositories.service.js';
  2 | import { Logger } from '../utils/logger.util.js';
  3 | import { handleControllerError } from '../utils/error-handler.util.js';
  4 | import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js';
  5 | import {
  6 | 	extractPaginationInfo,
  7 | 	PaginationType,
  8 | } from '../utils/pagination.util.js';
  9 | import { formatPagination } from '../utils/formatter.util.js';
 10 | import { ControllerResponse } from '../types/common.types.js';
 11 | import { ListRepositoriesToolArgsType } from '../tools/atlassian.repositories.types.js';
 12 | import { formatRepositoriesList } from './atlassian.repositories.formatter.js';
 13 | import { ListRepositoriesParams } from '../services/vendor.atlassian.repositories.types.js';
 14 | import { getDefaultWorkspace } from '../utils/workspace.util.js';
 15 | import { formatBitbucketQuery } from '../utils/query.util.js';
 16 | 
 17 | // Create a contextualized logger for this file
 18 | const logger = Logger.forContext(
 19 | 	'controllers/atlassian.repositories.list.controller.ts',
 20 | );
 21 | 
 22 | /**
 23 |  * Lists repositories for a specific workspace with pagination and filtering options
 24 |  * @param options - Options for listing repositories including workspaceSlug
 25 |  * @returns Formatted list of repositories with pagination information
 26 |  */
 27 | export async function handleRepositoriesList(
 28 | 	options: ListRepositoriesToolArgsType,
 29 | ): Promise<ControllerResponse> {
 30 | 	const methodLogger = logger.forMethod('handleRepositoriesList');
 31 | 	methodLogger.debug('Listing Bitbucket repositories...', options);
 32 | 
 33 | 	try {
 34 | 		// Get workspace slug from options or default
 35 | 		let workspaceSlug = options.workspaceSlug;
 36 | 		if (!workspaceSlug) {
 37 | 			methodLogger.debug(
 38 | 				'No workspace slug provided, using default workspace',
 39 | 			);
 40 | 			const defaultWorkspace = await getDefaultWorkspace();
 41 | 
 42 | 			if (!defaultWorkspace) {
 43 | 				throw new Error(
 44 | 					'No workspace slug provided and no default workspace could be determined. Please provide a workspace slug or configure a default workspace.',
 45 | 				);
 46 | 			}
 47 | 
 48 | 			workspaceSlug = defaultWorkspace;
 49 | 			methodLogger.debug(`Using default workspace: ${workspaceSlug}`);
 50 | 		}
 51 | 
 52 | 		// Create defaults object with proper typing
 53 | 		const defaults: Partial<ListRepositoriesToolArgsType> = {
 54 | 			limit: DEFAULT_PAGE_SIZE,
 55 | 			sort: '-updated_on',
 56 | 		};
 57 | 
 58 | 		// Apply defaults
 59 | 		const mergedOptions = applyDefaults<ListRepositoriesToolArgsType>(
 60 | 			{ ...options, workspaceSlug },
 61 | 			defaults,
 62 | 		);
 63 | 
 64 | 		// Format the query for Bitbucket API if provided
 65 | 		// Combine query and projectKey if both are present
 66 | 		const queryParts: string[] = [];
 67 | 		if (mergedOptions.query) {
 68 | 			// Assuming formatBitbucketQuery handles basic name/description search
 69 | 			queryParts.push(formatBitbucketQuery(mergedOptions.query));
 70 | 		}
 71 | 		if (mergedOptions.projectKey) {
 72 | 			queryParts.push(`project.key = "${mergedOptions.projectKey}"`);
 73 | 		}
 74 | 		const combinedQuery = queryParts.join(' AND '); // Combine with AND
 75 | 
 76 | 		if (combinedQuery) {
 77 | 			logger.info(`Searching repositories with query: ${combinedQuery}`);
 78 | 		}
 79 | 
 80 | 		// Map controller options to service parameters
 81 | 		const serviceParams: ListRepositoriesParams = {
 82 | 			// Required workspace
 83 | 			workspace: workspaceSlug,
 84 | 			// Handle limit with default value
 85 | 			pagelen: mergedOptions.limit,
 86 | 			// Map cursor to page for page-based pagination
 87 | 			page: mergedOptions.cursor
 88 | 				? parseInt(mergedOptions.cursor, 10)
 89 | 				: undefined,
 90 | 			// Set default sort to updated_on descending if not specified
 91 | 			sort: mergedOptions.sort,
 92 | 			// Optional filter parameters
 93 | 			...(combinedQuery && { q: combinedQuery }), // <-- Use combined query
 94 | 			...(mergedOptions.role && { role: mergedOptions.role }),
 95 | 		};
 96 | 
 97 | 		methodLogger.debug('Using service parameters:', serviceParams);
 98 | 
 99 | 		const repositoriesData =
100 | 			await atlassianRepositoriesService.list(serviceParams);
101 | 		// Log only the count of repositories returned instead of the entire response
102 | 		methodLogger.debug(
103 | 			`Retrieved ${repositoriesData.values?.length || 0} repositories`,
104 | 		);
105 | 
106 | 		// Post-filter by project key if provided and Bitbucket API returned extra results
107 | 		if (mergedOptions.projectKey && repositoriesData.values) {
108 | 			const originalCount = repositoriesData.values.length;
109 | 
110 | 			// Only keep repositories with exact project key match
111 | 			// NOTE: This filtering is done client-side since Bitbucket API doesn't directly support
112 | 			// filtering by project key in its query parameters. This means all repositories are first
113 | 			// fetched and then filtered locally, which may result in fewer results than expected
114 | 			// if the limit parameter is also used.
115 | 			repositoriesData.values = repositoriesData.values.filter(
116 | 				(repo) => repo.project?.key === mergedOptions.projectKey,
117 | 			);
118 | 
119 | 			const filteredCount = repositoriesData.values.length;
120 | 
121 | 			// Log filtering results to help with debugging
122 | 			if (filteredCount !== originalCount) {
123 | 				methodLogger.debug(
124 | 					`Post-filtered repositories by projectKey=${mergedOptions.projectKey}: ${filteredCount} of ${originalCount} matched.`,
125 | 				);
126 | 
127 | 				// Adjust the size to reflect the actual filtered count (matters for pagination)
128 | 				if (repositoriesData.size) {
129 | 					// Adjust total size proportionally based on how many were filtered out
130 | 					const filterRatio = filteredCount / originalCount;
131 | 					const estimatedTotalSize = Math.ceil(
132 | 						repositoriesData.size * filterRatio,
133 | 					);
134 | 					repositoriesData.size = Math.max(
135 | 						filteredCount,
136 | 						estimatedTotalSize,
137 | 					);
138 | 
139 | 					methodLogger.debug(
140 | 						`Adjusted size from ${repositoriesData.size} to ${estimatedTotalSize} based on filtering ratio`,
141 | 					);
142 | 				}
143 | 
144 | 				// If this is the first page and we have fewer results than requested, try to fetch more
145 | 				if (
146 | 					filteredCount <
147 | 						(serviceParams.pagelen || DEFAULT_PAGE_SIZE) &&
148 | 					repositoriesData.next
149 | 				) {
150 | 					methodLogger.debug(
151 | 						`After filtering, only ${filteredCount} items remain. Fetching more pages to supplement...`,
152 | 					);
153 | 
154 | 					// Keep fetching next pages until we have enough items or no more pages
155 | 					let nextPageUrl: string | undefined = repositoriesData.next;
156 | 					let totalItemsNeeded =
157 | 						(serviceParams.pagelen || DEFAULT_PAGE_SIZE) -
158 | 						filteredCount;
159 | 
160 | 					while (nextPageUrl && totalItemsNeeded > 0) {
161 | 						try {
162 | 							// Extract the next page number
163 | 							let nextPage: number | undefined;
164 | 							try {
165 | 								const nextUrl = new URL(nextPageUrl);
166 | 								const pageParam =
167 | 									nextUrl.searchParams.get('page');
168 | 								if (pageParam) {
169 | 									nextPage = parseInt(pageParam, 10);
170 | 								}
171 | 							} catch (e) {
172 | 								methodLogger.warn(
173 | 									`Could not extract next page from URL: ${nextPageUrl}`,
174 | 									e,
175 | 								);
176 | 								break;
177 | 							}
178 | 
179 | 							if (!nextPage) break;
180 | 
181 | 							// Fetch the next page
182 | 							const nextPageParams = {
183 | 								...serviceParams,
184 | 								page: nextPage,
185 | 							};
186 | 
187 | 							const nextPageData =
188 | 								await atlassianRepositoriesService.list(
189 | 									nextPageParams,
190 | 								);
191 | 
192 | 							// Filter the next page results
193 | 							if (nextPageData.values) {
194 | 								const nextPageFiltered =
195 | 									nextPageData.values.filter(
196 | 										(repo) =>
197 | 											repo.project?.key ===
198 | 											mergedOptions.projectKey,
199 | 									);
200 | 
201 | 								// Add items to reach the requested limit
202 | 								const itemsToAdd = nextPageFiltered.slice(
203 | 									0,
204 | 									totalItemsNeeded,
205 | 								);
206 | 
207 | 								if (itemsToAdd.length > 0) {
208 | 									repositoriesData.values = [
209 | 										...repositoriesData.values,
210 | 										...itemsToAdd,
211 | 									];
212 | 
213 | 									totalItemsNeeded -= itemsToAdd.length;
214 | 
215 | 									methodLogger.debug(
216 | 										`Added ${itemsToAdd.length} items from page ${nextPage} to reach requested limit. ${totalItemsNeeded} more needed.`,
217 | 									);
218 | 								}
219 | 
220 | 								// Update next page URL for the loop
221 | 								nextPageUrl = nextPageData.next || undefined;
222 | 
223 | 								// If we've fetched all filtered items from this page but there are more pages
224 | 								// and we still need more items, continue to the next page
225 | 								if (
226 | 									nextPageFiltered.length <=
227 | 										itemsToAdd.length &&
228 | 									totalItemsNeeded > 0
229 | 								) {
230 | 									continue;
231 | 								}
232 | 
233 | 								// If we got all the items we need, update pagination accordingly
234 | 								if (totalItemsNeeded <= 0) {
235 | 									// We have enough items now, but there are more available
236 | 									if (nextPageData.next) {
237 | 										repositoriesData.next =
238 | 											nextPageData.next;
239 | 									}
240 | 									break;
241 | 								}
242 | 							} else {
243 | 								// No values in the response, stop fetching
244 | 								nextPageUrl = undefined;
245 | 							}
246 | 						} catch (fetchError) {
247 | 							// Log the error but continue with what we have
248 | 							methodLogger.warn(
249 | 								`Error fetching page to supplement filtered results:`,
250 | 								fetchError,
251 | 							);
252 | 							break;
253 | 						}
254 | 					}
255 | 				}
256 | 			}
257 | 		}
258 | 
259 | 		// Extract pagination information using the utility
260 | 		const pagination = extractPaginationInfo(
261 | 			repositoriesData,
262 | 			PaginationType.PAGE,
263 | 		);
264 | 
265 | 		// Format the repositories data for display using the formatter
266 | 		const formattedRepositories = formatRepositoriesList(repositoriesData);
267 | 
268 | 		// Create the final content by combining the formatted repositories with pagination information
269 | 		let finalContent = formattedRepositories;
270 | 
271 | 		// Add pagination information if available
272 | 		if (
273 | 			pagination &&
274 | 			(pagination.hasMore || pagination.count !== undefined)
275 | 		) {
276 | 			const paginationString = formatPagination(pagination);
277 | 			finalContent += '\n\n' + paginationString;
278 | 		}
279 | 
280 | 		return {
281 | 			content: finalContent,
282 | 		};
283 | 	} catch (error) {
284 | 		// Use the standardized error handler
285 | 		throw handleControllerError(error, {
286 | 			entityType: 'Repositories',
287 | 			operation: 'listing',
288 | 			source: 'controllers/atlassian.repositories.list.controller.ts@handleRepositoriesList',
289 | 			additionalInfo: { options },
290 | 		});
291 | 	}
292 | }
293 | 
```

--------------------------------------------------------------------------------
/src/controllers/atlassian.diff.controller.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { Logger } from '../utils/logger.util.js';
  2 | import { handleControllerError } from '../utils/error-handler.util.js';
  3 | import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js';
  4 | import { ControllerResponse } from '../types/common.types.js';
  5 | import {
  6 | 	extractPaginationInfo,
  7 | 	PaginationType,
  8 | } from '../utils/pagination.util.js';
  9 | import { formatPagination } from '../utils/formatter.util.js';
 10 | import * as diffService from '../services/vendor.atlassian.repositories.diff.service.js';
 11 | import { formatDiffstat, formatFullDiff } from './atlassian.diff.formatter.js';
 12 | import { getDefaultWorkspace } from '../utils/workspace.util.js';
 13 | 
 14 | const controllerLogger = Logger.forContext(
 15 | 	'controllers/atlassian.diff.controller.ts',
 16 | );
 17 | controllerLogger.debug('Bitbucket diff controller initialized');
 18 | 
 19 | /**
 20 |  * Base interface that extends Record<string, unknown> for error handling compatibility
 21 |  */
 22 | interface BaseDiffOptions extends Record<string, unknown> {
 23 | 	workspaceSlug?: string;
 24 | 	repoSlug: string;
 25 | 	includeFullDiff?: boolean;
 26 | 	limit?: number;
 27 | 	cursor?: number;
 28 | 	topic?: boolean;
 29 | }
 30 | 
 31 | /**
 32 |  * Interface for branch diff options
 33 |  */
 34 | interface BranchDiffOptions extends BaseDiffOptions {
 35 | 	sourceBranch: string;
 36 | 	destinationBranch?: string;
 37 | }
 38 | 
 39 | /**
 40 |  * Interface for commit diff options
 41 |  */
 42 | interface CommitDiffOptions extends BaseDiffOptions {
 43 | 	sinceCommit: string;
 44 | 	untilCommit: string;
 45 | }
 46 | 
 47 | /**
 48 |  * Compare two branches and return the differences
 49 |  *
 50 |  * @param options - Options for branch comparison
 51 |  * @returns Promise with formatted diff content and pagination
 52 |  */
 53 | async function branchDiff(
 54 | 	options: BranchDiffOptions,
 55 | ): Promise<ControllerResponse> {
 56 | 	const methodLogger = controllerLogger.forMethod('branchDiff');
 57 | 
 58 | 	try {
 59 | 		methodLogger.debug('Comparing branches', options);
 60 | 
 61 | 		// Apply defaults
 62 | 		const defaults = {
 63 | 			limit: DEFAULT_PAGE_SIZE,
 64 | 			includeFullDiff: true,
 65 | 			destinationBranch: 'main', // Default to main if not provided
 66 | 			topic: false, // Default to topic=false which shows all changes between branches
 67 | 		};
 68 | 
 69 | 		// Explicitly cast the result of applyDefaults to preserve the original types
 70 | 		const params = applyDefaults(options, defaults) as typeof options &
 71 | 			typeof defaults;
 72 | 
 73 | 		// Handle optional workspaceSlug
 74 | 		if (!params.workspaceSlug) {
 75 | 			methodLogger.debug(
 76 | 				'No workspace provided, fetching default workspace',
 77 | 			);
 78 | 			const defaultWorkspace = await getDefaultWorkspace();
 79 | 			if (!defaultWorkspace) {
 80 | 				throw new Error(
 81 | 					'Could not determine a default workspace. Please provide a workspaceSlug.',
 82 | 				);
 83 | 			}
 84 | 			params.workspaceSlug = defaultWorkspace;
 85 | 			methodLogger.debug(
 86 | 				`Using default workspace: ${params.workspaceSlug}`,
 87 | 			);
 88 | 		}
 89 | 
 90 | 		// Construct the spec (e.g., "main..feature")
 91 | 		// NOTE: Bitbucket API expects the destination branch first, then the source branch
 92 | 		// This is the opposite of what some Git tools use (e.g., git diff source..destination)
 93 | 		// The diff shows changes that would need to be applied to destination to match source
 94 | 		//
 95 | 		// IMPORTANT: This behavior is counterintuitive in two ways:
 96 | 		// 1. The parameter names "sourceBranch" and "destinationBranch" suggest a certain direction,
 97 | 		//    but the output is displayed as "destinationBranch → sourceBranch"
 98 | 		// 2. When comparing branches with newer content in the feature branch (source), full diffs
 99 | 		//    might only show when using parameters in one order, and only summaries in the other order
100 | 		//
101 | 		// We document this behavior clearly in the CLI and Tool interfaces
102 | 		const spec = `${params.destinationBranch}..${params.sourceBranch}`;
103 | 
104 | 		methodLogger.debug(`Using diff spec: ${spec}`);
105 | 
106 | 		try {
107 | 			// Fetch diffstat for the branches
108 | 			const diffstat = await diffService.getDiffstat({
109 | 				workspace: params.workspaceSlug,
110 | 				repo_slug: params.repoSlug,
111 | 				spec,
112 | 				pagelen: params.limit,
113 | 				cursor: params.cursor,
114 | 				topic: params.topic,
115 | 			});
116 | 
117 | 			// Extract pagination info
118 | 			const pagination = extractPaginationInfo(
119 | 				diffstat,
120 | 				PaginationType.PAGE,
121 | 			);
122 | 
123 | 			// Fetch full diff if requested
124 | 			let rawDiff: string | null = null;
125 | 			if (params.includeFullDiff) {
126 | 				rawDiff = await diffService.getRawDiff({
127 | 					workspace: params.workspaceSlug,
128 | 					repo_slug: params.repoSlug,
129 | 					spec,
130 | 				});
131 | 			}
132 | 
133 | 			// Format the results
134 | 			let content =
135 | 				params.includeFullDiff && rawDiff
136 | 					? formatFullDiff(
137 | 							diffstat,
138 | 							rawDiff,
139 | 							params.destinationBranch,
140 | 							params.sourceBranch,
141 | 						)
142 | 					: formatDiffstat(
143 | 							diffstat,
144 | 							params.destinationBranch,
145 | 							params.sourceBranch,
146 | 						);
147 | 
148 | 			// Add pagination information if available
149 | 			if (
150 | 				pagination &&
151 | 				(pagination.hasMore || pagination.count !== undefined)
152 | 			) {
153 | 				const paginationString = formatPagination(pagination);
154 | 				content += '\n\n' + paginationString;
155 | 			}
156 | 
157 | 			return {
158 | 				content,
159 | 			};
160 | 		} catch (error) {
161 | 			// Enhance error handling for common diff-specific errors
162 | 			if (
163 | 				error instanceof Error &&
164 | 				error.message.includes(
165 | 					'source or destination could not be found',
166 | 				)
167 | 			) {
168 | 				// Create a more user-friendly error message
169 | 				throw new Error(
170 | 					`Unable to generate diff between '${params.sourceBranch}' and '${params.destinationBranch}'. ` +
171 | 						`One or both of these branches may not exist in the repository. ` +
172 | 						`Please verify both branch names and ensure you have access to view them.`,
173 | 				);
174 | 			}
175 | 			// Re-throw other errors to be handled by the outer catch block
176 | 			throw error;
177 | 		}
178 | 	} catch (error) {
179 | 		throw handleControllerError(error, {
180 | 			entityType: 'Branch Diff',
181 | 			operation: 'comparing branches',
182 | 			source: 'controllers/atlassian.diff.controller.ts@branchDiff',
183 | 			additionalInfo: options,
184 | 		});
185 | 	}
186 | }
187 | 
188 | /**
189 |  * Compare two commits and return the differences
190 |  *
191 |  * @param options - Options for commit comparison
192 |  * @returns Promise with formatted diff content and pagination
193 |  */
194 | async function commitDiff(
195 | 	options: CommitDiffOptions,
196 | ): Promise<ControllerResponse> {
197 | 	const methodLogger = controllerLogger.forMethod('commitDiff');
198 | 
199 | 	try {
200 | 		methodLogger.debug('Comparing commits', options);
201 | 
202 | 		// Apply defaults
203 | 		const defaults = {
204 | 			limit: DEFAULT_PAGE_SIZE,
205 | 			includeFullDiff: true,
206 | 			topic: false, // Default to topic=false which shows all changes between commits
207 | 		};
208 | 
209 | 		// Explicitly cast the result of applyDefaults to preserve the original types
210 | 		const params = applyDefaults(options, defaults) as typeof options &
211 | 			typeof defaults;
212 | 
213 | 		// Handle optional workspaceSlug
214 | 		if (!params.workspaceSlug) {
215 | 			methodLogger.debug(
216 | 				'No workspace provided, fetching default workspace',
217 | 			);
218 | 			const defaultWorkspace = await getDefaultWorkspace();
219 | 			if (!defaultWorkspace) {
220 | 				throw new Error(
221 | 					'Could not determine a default workspace. Please provide a workspaceSlug.',
222 | 				);
223 | 			}
224 | 			params.workspaceSlug = defaultWorkspace;
225 | 			methodLogger.debug(
226 | 				`Using default workspace: ${params.workspaceSlug}`,
227 | 			);
228 | 		}
229 | 
230 | 		// Construct the spec (e.g., "a1b2c3d..e4f5g6h")
231 | 		// NOTE: Bitbucket API expects the base/since commit first, then the target/until commit
232 | 		// The diff shows changes that would need to be applied to base to match target
233 | 		//
234 | 		// IMPORTANT: The parameter names are counterintuitive to how they must be used:
235 | 		// 1. For proper results with full code changes, sinceCommit should be the NEWER commit,
236 | 		//    and untilCommit should be the OLDER commit (reverse chronological order)
237 | 		// 2. If used with chronological order (older → newer), the result may show "No changes detected"
238 | 		//
239 | 		// We document this behavior clearly in the CLI and Tool interfaces
240 | 		const spec = `${params.sinceCommit}..${params.untilCommit}`;
241 | 
242 | 		methodLogger.debug(`Using diff spec: ${spec}`);
243 | 
244 | 		try {
245 | 			// Fetch diffstat for the commits
246 | 			const diffstat = await diffService.getDiffstat({
247 | 				workspace: params.workspaceSlug,
248 | 				repo_slug: params.repoSlug,
249 | 				spec,
250 | 				pagelen: params.limit,
251 | 				cursor: params.cursor,
252 | 			});
253 | 
254 | 			// Extract pagination info
255 | 			const pagination = extractPaginationInfo(
256 | 				diffstat,
257 | 				PaginationType.PAGE,
258 | 			);
259 | 
260 | 			// Fetch full diff if requested
261 | 			let rawDiff: string | null = null;
262 | 			if (params.includeFullDiff) {
263 | 				rawDiff = await diffService.getRawDiff({
264 | 					workspace: params.workspaceSlug,
265 | 					repo_slug: params.repoSlug,
266 | 					spec,
267 | 				});
268 | 			}
269 | 
270 | 			// Format the results
271 | 			let content =
272 | 				params.includeFullDiff && rawDiff
273 | 					? formatFullDiff(
274 | 							diffstat,
275 | 							rawDiff,
276 | 							params.sinceCommit,
277 | 							params.untilCommit,
278 | 						)
279 | 					: formatDiffstat(
280 | 							diffstat,
281 | 							params.sinceCommit,
282 | 							params.untilCommit,
283 | 						);
284 | 
285 | 			// Add pagination information if available
286 | 			if (
287 | 				pagination &&
288 | 				(pagination.hasMore || pagination.count !== undefined)
289 | 			) {
290 | 				const paginationString = formatPagination(pagination);
291 | 				content += '\n\n' + paginationString;
292 | 			}
293 | 
294 | 			return {
295 | 				content,
296 | 			};
297 | 		} catch (error) {
298 | 			// Enhance error handling for common diff-specific errors
299 | 			if (
300 | 				error instanceof Error &&
301 | 				error.message.includes(
302 | 					'source or destination could not be found',
303 | 				)
304 | 			) {
305 | 				// Create a more user-friendly error message
306 | 				throw new Error(
307 | 					`Unable to generate diff between commits '${params.sinceCommit}' and '${params.untilCommit}'. ` +
308 | 						`One or both of these commits may not exist in the repository or may be in the wrong order. ` +
309 | 						`Please verify both commit hashes and ensure you have access to view them.`,
310 | 				);
311 | 			}
312 | 			// Re-throw other errors to be handled by the outer catch block
313 | 			throw error;
314 | 		}
315 | 	} catch (error) {
316 | 		throw handleControllerError(error, {
317 | 			entityType: 'Commit Diff',
318 | 			operation: 'comparing commits',
319 | 			source: 'controllers/atlassian.diff.controller.ts@commitDiff',
320 | 			additionalInfo: options,
321 | 		});
322 | 	}
323 | }
324 | 
325 | export default { branchDiff, commitDiff };
326 | 
```

--------------------------------------------------------------------------------
/src/controllers/atlassian.search.controller.test.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import atlassianSearchController from './atlassian.search.controller.js';
  2 | import { getAtlassianCredentials } from '../utils/transport.util.js';
  3 | import { config } from '../utils/config.util.js';
  4 | import { handleRepositoriesList } from './atlassian.repositories.list.controller.js';
  5 | import atlassianWorkspacesController from './atlassian.workspaces.controller.js';
  6 | 
  7 | describe('Atlassian Search Controller', () => {
  8 | 	// Load configuration and check for credentials before all tests
  9 | 	beforeAll(() => {
 10 | 		config.load(); // Ensure config is loaded
 11 | 		const credentials = getAtlassianCredentials();
 12 | 		if (!credentials) {
 13 | 			console.warn(
 14 | 				'Skipping Atlassian Search Controller tests: No credentials available',
 15 | 			);
 16 | 		}
 17 | 	});
 18 | 
 19 | 	// Helper function to skip tests when credentials are missing
 20 | 	const skipIfNoCredentials = () => !getAtlassianCredentials();
 21 | 
 22 | 	// Helper to get valid repository information for testing
 23 | 	async function getRepositoryInfo(): Promise<{
 24 | 		workspaceSlug: string;
 25 | 		repoSlug: string;
 26 | 	} | null> {
 27 | 		if (skipIfNoCredentials()) return null;
 28 | 
 29 | 		try {
 30 | 			// First get a workspace
 31 | 			const workspacesResult = await atlassianWorkspacesController.list({
 32 | 				limit: 1,
 33 | 			});
 34 | 
 35 | 			if (workspacesResult.content === 'No Bitbucket workspaces found.') {
 36 | 				return null;
 37 | 			}
 38 | 
 39 | 			// Extract workspace slug
 40 | 			const workspaceMatch = workspacesResult.content.match(
 41 | 				/\*\*Slug\*\*:\s+([^\s\n]+)/,
 42 | 			);
 43 | 			const workspaceSlug = workspaceMatch ? workspaceMatch[1] : null;
 44 | 
 45 | 			if (!workspaceSlug) return null;
 46 | 
 47 | 			// Get a repository from this workspace
 48 | 			const reposResult = await handleRepositoriesList({
 49 | 				workspaceSlug,
 50 | 				limit: 1,
 51 | 			});
 52 | 
 53 | 			if (
 54 | 				reposResult.content ===
 55 | 				'No repositories found in this workspace.'
 56 | 			) {
 57 | 				return null;
 58 | 			}
 59 | 
 60 | 			// Extract repo slug
 61 | 			const repoSlugMatch = reposResult.content.match(
 62 | 				/\*\*Slug\*\*:\s+([^\s\n]+)/,
 63 | 			);
 64 | 			const repoSlug = repoSlugMatch ? repoSlugMatch[1] : null;
 65 | 
 66 | 			if (!repoSlug) return null;
 67 | 
 68 | 			return { workspaceSlug, repoSlug };
 69 | 		} catch (error) {
 70 | 			console.warn(
 71 | 				'Could not fetch repository info for search tests:',
 72 | 				error,
 73 | 			);
 74 | 			return null;
 75 | 		}
 76 | 	}
 77 | 
 78 | 	describe('search', () => {
 79 | 		it('should search across all scopes when type=code', async () => {
 80 | 			if (skipIfNoCredentials()) return;
 81 | 
 82 | 			const repoInfo = await getRepositoryInfo();
 83 | 			if (!repoInfo) {
 84 | 				return; // Skip silently - no repository info available for testing
 85 | 			}
 86 | 
 87 | 			const result = await atlassianSearchController.search({
 88 | 				workspace: repoInfo.workspaceSlug,
 89 | 				repo: repoInfo.repoSlug,
 90 | 				type: 'code',
 91 | 				query: 'initial commit',
 92 | 			});
 93 | 
 94 | 			// Verify the response structure
 95 | 			expect(result).toHaveProperty('content');
 96 | 			expect(typeof result.content).toBe('string');
 97 | 
 98 | 			// Should include code search results header
 99 | 			expect(result.content).toContain('Code Search Results');
100 | 		}, 30000);
101 | 
102 | 		it('should search only repositories when type=repositories', async () => {
103 | 			if (skipIfNoCredentials()) return;
104 | 
105 | 			const repoInfo = await getRepositoryInfo();
106 | 			if (!repoInfo) {
107 | 				return; // Skip silently - no repository info available for testing
108 | 			}
109 | 
110 | 			const result = await atlassianSearchController.search({
111 | 				workspace: repoInfo.workspaceSlug,
112 | 				type: 'repositories',
113 | 				query: repoInfo.repoSlug,
114 | 			});
115 | 
116 | 			// Verify the response structure
117 | 			expect(result).toHaveProperty('content');
118 | 			expect(typeof result.content).toBe('string');
119 | 
120 | 			// Should include only repository section
121 | 			expect(result.content).toContain('Repository Search Results');
122 | 			expect(result.content).not.toContain('Pull Request Search Results');
123 | 		}, 30000);
124 | 
125 | 		it('should search only pull requests when type=pullrequests', async () => {
126 | 			if (skipIfNoCredentials()) return;
127 | 
128 | 			const repoInfo = await getRepositoryInfo();
129 | 			if (!repoInfo) {
130 | 				return; // Skip silently - no repository info available for testing
131 | 			}
132 | 
133 | 			const result = await atlassianSearchController.search({
134 | 				workspace: repoInfo.workspaceSlug,
135 | 				repo: repoInfo.repoSlug,
136 | 				type: 'pullrequests',
137 | 				query: 'test',
138 | 			});
139 | 
140 | 			// Verify the response structure
141 | 			expect(result).toHaveProperty('content');
142 | 			expect(typeof result.content).toBe('string');
143 | 
144 | 			// Should include only PR section
145 | 			expect(result.content).not.toContain('Repository Search Results');
146 | 			expect(result.content).toContain('Pull Request Search Results');
147 | 		}, 30000);
148 | 
149 | 		it('should filter results with query parameter', async () => {
150 | 			if (skipIfNoCredentials()) return;
151 | 
152 | 			const repoInfo = await getRepositoryInfo();
153 | 			if (!repoInfo) {
154 | 				return; // Skip silently - no repository info available for testing
155 | 			}
156 | 
157 | 			// Use a query that might match something (repository name itself often works)
158 | 			const result = await atlassianSearchController.search({
159 | 				workspace: repoInfo.workspaceSlug,
160 | 				query: repoInfo.repoSlug,
161 | 				type: 'repositories',
162 | 			});
163 | 
164 | 			// Verify the response structure
165 | 			expect(result).toHaveProperty('content');
166 | 			expect(typeof result.content).toBe('string');
167 | 
168 | 			// If results are found, content should include the query term
169 | 			const resultsFound = !result.content.includes('No results found');
170 | 			if (resultsFound) {
171 | 				expect(result.content.toLowerCase()).toContain(
172 | 					repoInfo.repoSlug.toLowerCase(),
173 | 				);
174 | 			}
175 | 		}, 30000);
176 | 
177 | 		it('should handle pagination options (limit/cursor)', async () => {
178 | 			if (skipIfNoCredentials()) return;
179 | 
180 | 			const repoInfo = await getRepositoryInfo();
181 | 			if (!repoInfo) {
182 | 				return; // Skip silently - no repository info available for testing
183 | 			}
184 | 
185 | 			// Fetch first page with limit 1
186 | 			const result1 = await atlassianSearchController.search({
187 | 				workspace: repoInfo.workspaceSlug,
188 | 				type: 'repositories',
189 | 				limit: 1,
190 | 				query: repoInfo.repoSlug,
191 | 			});
192 | 
193 | 			// Extract pagination information from content
194 | 			const hasMoreResults = result1.content.includes(
195 | 				'More results are available.',
196 | 			);
197 | 			const cursorMatch = result1.content.match(
198 | 				/\*Next cursor: `([^`]+)`\*/,
199 | 			);
200 | 			const nextCursor = cursorMatch ? cursorMatch[1] : null;
201 | 
202 | 			// If pagination is possible, test cursor-based pagination
203 | 			if (hasMoreResults && nextCursor) {
204 | 				const result2 = await atlassianSearchController.search({
205 | 					workspace: repoInfo.workspaceSlug,
206 | 					type: 'repositories',
207 | 					limit: 1,
208 | 					cursor: nextCursor,
209 | 					query: repoInfo.repoSlug,
210 | 				});
211 | 
212 | 				// Both responses should have proper structure
213 | 				expect(result2).toHaveProperty('content');
214 | 
215 | 				// The content should be different
216 | 				expect(result1.content).not.toEqual(result2.content);
217 | 			} else {
218 | 				console.warn(
219 | 					'Skipping cursor part of pagination test: Either no second page available or no items found.',
220 | 				);
221 | 			}
222 | 		}, 30000);
223 | 
224 | 		it('should give an error when workspace is missing or empty', async () => {
225 | 			if (skipIfNoCredentials()) return;
226 | 
227 | 			// Empty workspace should return an error message
228 | 			const result = await atlassianSearchController.search({
229 | 				type: 'repositories',
230 | 				workspace: '', // Empty workspace should trigger error
231 | 				query: 'test',
232 | 			});
233 | 
234 | 			// Verify the response structure
235 | 			expect(result).toHaveProperty('content');
236 | 			expect(typeof result.content).toBe('string');
237 | 
238 | 			// Content should include error message
239 | 			expect(result.content).toContain('Error:');
240 | 			expect(result.content).toContain('workspace');
241 | 		}, 30000);
242 | 
243 | 		it('should work without a repo when type=repositories', async () => {
244 | 			if (skipIfNoCredentials()) return;
245 | 
246 | 			const repoInfo = await getRepositoryInfo();
247 | 			if (!repoInfo) {
248 | 				return; // Skip silently - no repository info available for testing
249 | 			}
250 | 
251 | 			// Should not throw an error when repo is missing but type is repositories
252 | 			const result = await atlassianSearchController.search({
253 | 				workspace: repoInfo.workspaceSlug,
254 | 				type: 'repositories',
255 | 				query: repoInfo.repoSlug,
256 | 			});
257 | 
258 | 			// Verify the response structure
259 | 			expect(result).toHaveProperty('content');
260 | 			expect(typeof result.content).toBe('string');
261 | 		}, 30000);
262 | 
263 | 		it('should require repo when type=pullrequests', async () => {
264 | 			if (skipIfNoCredentials()) return;
265 | 
266 | 			const repoInfo = await getRepositoryInfo();
267 | 			if (!repoInfo) {
268 | 				return; // Skip silently - no repository info available for testing
269 | 			}
270 | 
271 | 			// When searching pull requests without a repo, should return an error message
272 | 			const result = await atlassianSearchController.search({
273 | 				workspace: repoInfo.workspaceSlug,
274 | 				type: 'pullrequests',
275 | 				query: 'test',
276 | 				// Intentionally omit repo
277 | 			});
278 | 
279 | 			// Content should include an error message
280 | 			expect(result.content).toContain('Error:');
281 | 			expect(result.content).toContain('required');
282 | 		}, 30000);
283 | 
284 | 		it('should handle no results scenario', async () => {
285 | 			if (skipIfNoCredentials()) return;
286 | 
287 | 			const repoInfo = await getRepositoryInfo();
288 | 			if (!repoInfo) {
289 | 				return; // Skip silently - no repository info available for testing
290 | 			}
291 | 
292 | 			// Use a query string that will definitely not match anything
293 | 			const noMatchQuery = 'xzqwxtrv12345xyz987nonexistentstring';
294 | 
295 | 			const result = await atlassianSearchController.search({
296 | 				workspace: repoInfo.workspaceSlug,
297 | 				query: noMatchQuery,
298 | 				type: 'code',
299 | 				repo: repoInfo.repoSlug,
300 | 			});
301 | 
302 | 			// Verify the response structure
303 | 			expect(result).toHaveProperty('content');
304 | 			expect(typeof result.content).toBe('string');
305 | 
306 | 			// Content should show no results
307 | 			expect(result.content).toContain('No code matches found');
308 | 		}, 30000);
309 | 
310 | 		it('should handle errors for invalid workspace', async () => {
311 | 			if (skipIfNoCredentials()) return;
312 | 
313 | 			const invalidWorkspace =
314 | 				'this-workspace-definitely-does-not-exist-12345';
315 | 
316 | 			// Expect the controller call to reject when underlying controllers fail
317 | 			await expect(
318 | 				atlassianSearchController.search({
319 | 					workspace: invalidWorkspace,
320 | 					type: 'repositories',
321 | 					query: 'test-query', // Add a query to avoid the query validation error
322 | 				}),
323 | 			).rejects.toThrow();
324 | 		}, 30000);
325 | 	});
326 | });
327 | 
```

--------------------------------------------------------------------------------
/src/controllers/atlassian.repositories.content.controller.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import atlassianRepositoriesService from '../services/vendor.atlassian.repositories.service.js';
  2 | import { Logger } from '../utils/logger.util.js';
  3 | import { handleControllerError } from '../utils/error-handler.util.js';
  4 | import { ControllerResponse } from '../types/common.types.js';
  5 | import { CloneRepositoryToolArgsType } from '../tools/atlassian.repositories.types.js';
  6 | import { getDefaultWorkspace } from '../utils/workspace.util.js';
  7 | import { executeShellCommand } from '../utils/shell.util.js';
  8 | import * as path from 'path';
  9 | import * as fs from 'fs/promises';
 10 | import { constants } from 'fs';
 11 | 
 12 | // Logger instance for this module
 13 | const logger = Logger.forContext(
 14 | 	'controllers/atlassian.repositories.content.controller.ts',
 15 | );
 16 | 
 17 | /**
 18 |  * Clones a Bitbucket repository to the local filesystem
 19 |  * @param options Options including repository identifiers and target path
 20 |  * @returns Information about the cloned repository
 21 |  */
 22 | export async function handleCloneRepository(
 23 | 	options: CloneRepositoryToolArgsType,
 24 | ): Promise<ControllerResponse> {
 25 | 	const methodLogger = logger.forMethod('handleCloneRepository');
 26 | 	methodLogger.debug('Cloning repository with options:', options);
 27 | 
 28 | 	try {
 29 | 		// Handle optional workspaceSlug
 30 | 		let { workspaceSlug } = options;
 31 | 		if (!workspaceSlug) {
 32 | 			methodLogger.debug(
 33 | 				'No workspace provided, fetching default workspace',
 34 | 			);
 35 | 			const defaultWorkspace = await getDefaultWorkspace();
 36 | 			if (!defaultWorkspace) {
 37 | 				throw new Error(
 38 | 					'No default workspace found. Please provide a workspace slug.',
 39 | 				);
 40 | 			}
 41 | 			workspaceSlug = defaultWorkspace;
 42 | 			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
 43 | 		}
 44 | 
 45 | 		// Required parameters check
 46 | 		const { repoSlug, targetPath } = options;
 47 | 		if (!repoSlug) {
 48 | 			throw new Error('Repository slug is required');
 49 | 		}
 50 | 		if (!targetPath) {
 51 | 			throw new Error('Target path is required');
 52 | 		}
 53 | 
 54 | 		// Normalize and resolve the target path
 55 | 		// If it's a relative path, convert it to absolute based on current working directory
 56 | 		const processedTargetPath = path.isAbsolute(targetPath)
 57 | 			? targetPath
 58 | 			: path.resolve(process.cwd(), targetPath);
 59 | 
 60 | 		methodLogger.debug(
 61 | 			`Normalized target path: ${processedTargetPath} (original: ${targetPath})`,
 62 | 		);
 63 | 
 64 | 		// Validate directory access and permissions before proceeding
 65 | 		try {
 66 | 			// Check if target directory exists
 67 | 			try {
 68 | 				await fs.access(processedTargetPath, constants.F_OK);
 69 | 				methodLogger.debug(
 70 | 					`Target directory exists: ${processedTargetPath}`,
 71 | 				);
 72 | 
 73 | 				// If it exists, check if we have write permission
 74 | 				try {
 75 | 					await fs.access(processedTargetPath, constants.W_OK);
 76 | 					methodLogger.debug(
 77 | 						`Have write permission to: ${processedTargetPath}`,
 78 | 					);
 79 | 				} catch {
 80 | 					throw new Error(
 81 | 						`Permission denied: You don't have write access to the target directory: ${processedTargetPath}`,
 82 | 					);
 83 | 				}
 84 | 			} catch {
 85 | 				// Directory doesn't exist, try to create it
 86 | 				methodLogger.debug(
 87 | 					`Target directory doesn't exist, creating: ${processedTargetPath}`,
 88 | 				);
 89 | 				try {
 90 | 					await fs.mkdir(processedTargetPath, { recursive: true });
 91 | 					methodLogger.debug(
 92 | 						`Successfully created directory: ${processedTargetPath}`,
 93 | 					);
 94 | 				} catch (mkdirError) {
 95 | 					throw new Error(
 96 | 						`Failed to create target directory ${processedTargetPath}: ${(mkdirError as Error).message}. Please ensure you have write permissions to the parent directory.`,
 97 | 					);
 98 | 				}
 99 | 			}
100 | 		} catch (accessError) {
101 | 			methodLogger.error('Path access error:', accessError);
102 | 			throw accessError;
103 | 		}
104 | 
105 | 		// Get repository details to determine clone URL
106 | 		methodLogger.debug(
107 | 			`Getting repository details for ${workspaceSlug}/${repoSlug}`,
108 | 		);
109 | 		const repoDetails = await atlassianRepositoriesService.get({
110 | 			workspace: workspaceSlug,
111 | 			repo_slug: repoSlug,
112 | 		});
113 | 
114 | 		// Find SSH clone URL (preferred) or fall back to HTTPS
115 | 		let cloneUrl: string | undefined;
116 | 		let cloneProtocol: string = 'SSH'; // Default to SSH
117 | 
118 | 		if (repoDetails.links?.clone) {
119 | 			// First try to find SSH clone URL
120 | 			const sshClone = repoDetails.links.clone.find(
121 | 				(link) => link.name === 'ssh',
122 | 			);
123 | 
124 | 			if (sshClone) {
125 | 				cloneUrl = sshClone.href;
126 | 			} else {
127 | 				// Fall back to HTTPS if SSH is not available
128 | 				const httpsClone = repoDetails.links.clone.find(
129 | 					(link) => link.name === 'https',
130 | 				);
131 | 
132 | 				if (httpsClone) {
133 | 					cloneUrl = httpsClone.href;
134 | 					cloneProtocol = 'HTTPS';
135 | 					methodLogger.warn(
136 | 						'SSH clone URL not found, falling back to HTTPS',
137 | 					);
138 | 				}
139 | 			}
140 | 		}
141 | 
142 | 		if (!cloneUrl) {
143 | 			throw new Error(
144 | 				'Could not find a valid clone URL for the repository',
145 | 			);
146 | 		}
147 | 
148 | 		// Determine full target directory path
149 | 		// Clone into a subdirectory named after the repo slug
150 | 		const targetDir = path.join(processedTargetPath, repoSlug);
151 | 		methodLogger.debug(`Will clone to: ${targetDir}`);
152 | 
153 | 		// Check if directory already exists
154 | 		try {
155 | 			const stats = await fs.stat(targetDir);
156 | 			if (stats.isDirectory()) {
157 | 				methodLogger.warn(
158 | 					`Target directory already exists: ${targetDir}`,
159 | 				);
160 | 				return {
161 | 					content: `⚠️ Target directory \`${targetDir}\` already exists. Please choose a different target path or remove the existing directory.`,
162 | 				};
163 | 			}
164 | 		} catch {
165 | 			// Error means directory doesn't exist, which is what we want
166 | 			methodLogger.debug(
167 | 				`Target directory doesn't exist, proceeding with clone`,
168 | 			);
169 | 		}
170 | 
171 | 		// Execute git clone command
172 | 		methodLogger.debug(`Cloning from URL (${cloneProtocol}): ${cloneUrl}`);
173 | 		const command = `git clone ${cloneUrl} "${targetDir}"`;
174 | 
175 | 		try {
176 | 			const result = await executeShellCommand(
177 | 				command,
178 | 				'cloning repository',
179 | 			);
180 | 
181 | 			// Return success message with more detailed information
182 | 			return {
183 | 				content:
184 | 					`✅ Successfully cloned repository \`${workspaceSlug}/${repoSlug}\` to \`${targetDir}\` using ${cloneProtocol}.\n\n` +
185 | 					`**Details:**\n` +
186 | 					`- **Repository**: ${workspaceSlug}/${repoSlug}\n` +
187 | 					`- **Clone Protocol**: ${cloneProtocol}\n` +
188 | 					`- **Target Location**: ${targetDir}\n\n` +
189 | 					`**Output:**\n\`\`\`\n${result}\n\`\`\`\n\n` +
190 | 					`**Note**: If this is your first time cloning with SSH, ensure your SSH keys are set up correctly.`,
191 | 			};
192 | 		} catch (cloneError) {
193 | 			// Enhanced error message with troubleshooting steps
194 | 			const errorMsg = `Failed to clone repository: ${(cloneError as Error).message}`;
195 | 			let troubleshooting = '';
196 | 
197 | 			if (cloneProtocol === 'SSH') {
198 | 				troubleshooting =
199 | 					`\n\n**Troubleshooting SSH Clone Issues:**\n` +
200 | 					`1. Ensure you have SSH keys set up with Bitbucket\n` +
201 | 					`2. Check if your SSH agent is running: \`eval "$(ssh-agent -s)"; ssh-add\`\n` +
202 | 					`3. Verify connectivity: \`ssh -T [email protected]\`\n` +
203 | 					`4. Try using HTTPS instead (modify your tool call with a different repository URL)`;
204 | 			} else {
205 | 				troubleshooting =
206 | 					`\n\n**Troubleshooting HTTPS Clone Issues:**\n` +
207 | 					`1. Check your Bitbucket credentials\n` +
208 | 					`2. Ensure the target directory is writable\n` +
209 | 					`3. Try running the command manually to see detailed errors`;
210 | 			}
211 | 
212 | 			throw new Error(errorMsg + troubleshooting);
213 | 		}
214 | 	} catch (error) {
215 | 		throw handleControllerError(error, {
216 | 			entityType: 'Repository',
217 | 			operation: 'clone',
218 | 			source: 'controllers/atlassian.repositories.content.controller.ts@handleCloneRepository',
219 | 			additionalInfo: options,
220 | 		});
221 | 	}
222 | }
223 | 
224 | /**
225 |  * Retrieves file content from a repository
226 |  * @param options Options including repository identifiers and file path
227 |  * @returns The file content as text
228 |  */
229 | export async function handleGetFileContent(options: {
230 | 	workspaceSlug?: string;
231 | 	repoSlug: string;
232 | 	path: string;
233 | 	ref?: string;
234 | }): Promise<ControllerResponse> {
235 | 	const methodLogger = logger.forMethod('handleGetFileContent');
236 | 	methodLogger.debug('Getting file content with options:', options);
237 | 
238 | 	try {
239 | 		// Required parameters check
240 | 		const { repoSlug, path: filePath } = options;
241 | 		let { workspaceSlug } = options;
242 | 
243 | 		if (!workspaceSlug) {
244 | 			methodLogger.debug(
245 | 				'No workspace provided, fetching default workspace',
246 | 			);
247 | 			const defaultWorkspace = await getDefaultWorkspace();
248 | 			if (!defaultWorkspace) {
249 | 				throw new Error(
250 | 					'No default workspace found. Please provide a workspace slug.',
251 | 				);
252 | 			}
253 | 			workspaceSlug = defaultWorkspace;
254 | 			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
255 | 		}
256 | 
257 | 		if (!repoSlug) {
258 | 			throw new Error('Repository slug is required');
259 | 		}
260 | 		if (!filePath) {
261 | 			throw new Error('File path is required');
262 | 		}
263 | 
264 | 		// Get repository details to determine the correct default branch
265 | 		let commitRef = options.ref;
266 | 		if (!commitRef) {
267 | 			methodLogger.debug(
268 | 				`No ref provided, fetching repository details to get default branch`,
269 | 			);
270 | 			try {
271 | 				const repoDetails = await atlassianRepositoriesService.get({
272 | 					workspace: workspaceSlug,
273 | 					repo_slug: repoSlug,
274 | 				});
275 | 
276 | 				// Use the repository's actual default branch
277 | 				if (repoDetails.mainbranch?.name) {
278 | 					commitRef = repoDetails.mainbranch.name;
279 | 					methodLogger.debug(
280 | 						`Using repository default branch: ${commitRef}`,
281 | 					);
282 | 				} else {
283 | 					// Fallback to common default branches
284 | 					commitRef = 'main';
285 | 					methodLogger.debug(
286 | 						`No default branch found, falling back to: ${commitRef}`,
287 | 					);
288 | 				}
289 | 			} catch (repoError) {
290 | 				methodLogger.warn(
291 | 					'Failed to get repository details, using fallback branch',
292 | 					repoError,
293 | 				);
294 | 				commitRef = 'main';
295 | 			}
296 | 		}
297 | 
298 | 		// Get file content from service
299 | 		methodLogger.debug(
300 | 			`Fetching file content for ${workspaceSlug}/${repoSlug}/${filePath}`,
301 | 			{ ref: commitRef },
302 | 		);
303 | 		const fileContent = await atlassianRepositoriesService.getFileContent({
304 | 			workspace: workspaceSlug,
305 | 			repo_slug: repoSlug,
306 | 			path: filePath,
307 | 			commit: commitRef,
308 | 		});
309 | 
310 | 		// Return the file content as is
311 | 		methodLogger.debug(
312 | 			`Retrieved file content (${fileContent.length} bytes)`,
313 | 		);
314 | 		return {
315 | 			content: fileContent,
316 | 		};
317 | 	} catch (error) {
318 | 		throw handleControllerError(error, {
319 | 			entityType: 'File Content',
320 | 			operation: 'get',
321 | 			source: 'controllers/atlassian.repositories.content.controller.ts@handleGetFileContent',
322 | 			additionalInfo: options,
323 | 		});
324 | 	}
325 | }
326 | 
```

--------------------------------------------------------------------------------
/src/utils/logger.util.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import * as fs from 'fs';
  2 | import * as path from 'path';
  3 | import * as os from 'os';
  4 | import * as crypto from 'crypto';
  5 | 
  6 | /**
  7 |  * Format a timestamp for logging
  8 |  * @returns Formatted timestamp [HH:MM:SS]
  9 |  */
 10 | function getTimestamp(): string {
 11 | 	const now = new Date();
 12 | 	return `[${now.toISOString().split('T')[1].split('.')[0]}]`;
 13 | }
 14 | 
 15 | /**
 16 |  * Safely convert object to string with size limits
 17 |  * @param obj Object to stringify
 18 |  * @param maxLength Maximum length of the resulting string
 19 |  * @returns Safely stringified object
 20 |  */
 21 | function safeStringify(obj: unknown, maxLength = 1000): string {
 22 | 	try {
 23 | 		const str = JSON.stringify(obj);
 24 | 		if (str.length <= maxLength) {
 25 | 			return str;
 26 | 		}
 27 | 		return `${str.substring(0, maxLength)}... (truncated, ${str.length} chars total)`;
 28 | 	} catch {
 29 | 		return '[Object cannot be stringified]';
 30 | 	}
 31 | }
 32 | 
 33 | /**
 34 |  * Extract essential values from larger objects for logging
 35 |  * @param obj The object to extract values from
 36 |  * @param keys Keys to extract (if available)
 37 |  * @returns Object containing only the specified keys
 38 |  */
 39 | function extractEssentialValues(
 40 | 	obj: Record<string, unknown>,
 41 | 	keys: string[],
 42 | ): Record<string, unknown> {
 43 | 	const result: Record<string, unknown> = {};
 44 | 	keys.forEach((key) => {
 45 | 		if (Object.prototype.hasOwnProperty.call(obj, key)) {
 46 | 			result[key] = obj[key];
 47 | 		}
 48 | 	});
 49 | 	return result;
 50 | }
 51 | 
 52 | /**
 53 |  * Format source path consistently using the standardized format:
 54 |  * [module/file.ts@function] or [module/file.ts]
 55 |  *
 56 |  * @param filePath File path (with or without src/ prefix)
 57 |  * @param functionName Optional function name
 58 |  * @returns Formatted source path according to standard pattern
 59 |  */
 60 | function formatSourcePath(filePath: string, functionName?: string): string {
 61 | 	// Always strip 'src/' prefix for consistency
 62 | 	const normalizedPath = filePath.replace(/^src\//, '');
 63 | 
 64 | 	return functionName
 65 | 		? `[${normalizedPath}@${functionName}]`
 66 | 		: `[${normalizedPath}]`;
 67 | }
 68 | 
 69 | /**
 70 |  * Check if debug logging is enabled for a specific module
 71 |  *
 72 |  * This function parses the DEBUG environment variable to determine if a specific
 73 |  * module should have debug logging enabled. The DEBUG variable can be:
 74 |  * - 'true' or '1': Enable all debug logging
 75 |  * - Comma-separated list of modules: Enable debug only for those modules
 76 |  * - Module patterns with wildcards: e.g., 'controllers/*' enables all controllers
 77 |  *
 78 |  * Examples:
 79 |  * - DEBUG=true
 80 |  * - DEBUG=controllers/*,services/aws.sso.auth.service.ts
 81 |  * - DEBUG=transport,utils/formatter*
 82 |  *
 83 |  * @param modulePath The module path to check against DEBUG patterns
 84 |  * @returns true if debug is enabled for this module, false otherwise
 85 |  */
 86 | function isDebugEnabledForModule(modulePath: string): boolean {
 87 | 	const debugEnv = process.env.DEBUG;
 88 | 
 89 | 	if (!debugEnv) {
 90 | 		return false;
 91 | 	}
 92 | 
 93 | 	// If debug is set to true or 1, enable all debug logging
 94 | 	if (debugEnv === 'true' || debugEnv === '1') {
 95 | 		return true;
 96 | 	}
 97 | 
 98 | 	// Parse comma-separated debug patterns
 99 | 	const debugPatterns = debugEnv.split(',').map((p) => p.trim());
100 | 
101 | 	// Check if the module matches any pattern
102 | 	return debugPatterns.some((pattern) => {
103 | 		// Convert glob-like patterns to regex
104 | 		// * matches anything within a path segment
105 | 		// ** matches across path segments
106 | 		const regexPattern = pattern
107 | 			.replace(/\*/g, '.*') // Convert * to regex .*
108 | 			.replace(/\?/g, '.'); // Convert ? to regex .
109 | 
110 | 		const regex = new RegExp(`^${regexPattern}$`);
111 | 		return (
112 | 			regex.test(modulePath) ||
113 | 			// Check for pattern matches without the 'src/' prefix
114 | 			regex.test(modulePath.replace(/^src\//, ''))
115 | 		);
116 | 	});
117 | }
118 | 
119 | // Generate a unique session ID for this process
120 | const SESSION_ID = crypto.randomUUID();
121 | 
122 | // Get the package name from environment variables or default to 'mcp-server'
123 | const getPkgName = (): string => {
124 | 	try {
125 | 		// Try to get it from package.json first if available
126 | 		const packageJsonPath = path.resolve(process.cwd(), 'package.json');
127 | 		if (fs.existsSync(packageJsonPath)) {
128 | 			const packageJson = JSON.parse(
129 | 				fs.readFileSync(packageJsonPath, 'utf8'),
130 | 			);
131 | 			if (packageJson.name) {
132 | 				// Extract the last part of the name if it's scoped
133 | 				const match = packageJson.name.match(/(@[\w-]+\/)?(.+)/);
134 | 				return match ? match[2] : packageJson.name;
135 | 			}
136 | 		}
137 | 	} catch {
138 | 		// Silently fail and use default
139 | 	}
140 | 
141 | 	// Fallback to environment variable or default
142 | 	return process.env.PACKAGE_NAME || 'mcp-server';
143 | };
144 | 
145 | // MCP logs directory setup
146 | const HOME_DIR = os.homedir();
147 | const MCP_DATA_DIR = path.join(HOME_DIR, '.mcp', 'data');
148 | const CLI_NAME = getPkgName();
149 | 
150 | // Ensure the MCP data directory exists
151 | if (!fs.existsSync(MCP_DATA_DIR)) {
152 | 	fs.mkdirSync(MCP_DATA_DIR, { recursive: true });
153 | }
154 | 
155 | // Create the log file path with session ID
156 | const LOG_FILENAME = `${CLI_NAME}.${SESSION_ID}.log`;
157 | const LOG_FILEPATH = path.join(MCP_DATA_DIR, LOG_FILENAME);
158 | 
159 | // Write initial log header
160 | fs.writeFileSync(
161 | 	LOG_FILEPATH,
162 | 	`# ${CLI_NAME} Log Session\n` +
163 | 		`Session ID: ${SESSION_ID}\n` +
164 | 		`Started: ${new Date().toISOString()}\n` +
165 | 		`Process ID: ${process.pid}\n` +
166 | 		`Working Directory: ${process.cwd()}\n` +
167 | 		`Command: ${process.argv.join(' ')}\n\n` +
168 | 		`## Log Entries\n\n`,
169 | 	'utf8',
170 | );
171 | 
172 | // Logger singleton to track initialization
173 | let isLoggerInitialized = false;
174 | 
175 | /**
176 |  * Logger class for consistent logging across the application.
177 |  *
178 |  * RECOMMENDED USAGE:
179 |  *
180 |  * 1. Create a file-level logger using the static forContext method:
181 |  *    ```
182 |  *    const logger = Logger.forContext('controllers/myController.ts');
183 |  *    ```
184 |  *
185 |  * 2. For method-specific logging, create a method logger:
186 |  *    ```
187 |  *    const methodLogger = Logger.forContext('controllers/myController.ts', 'myMethod');
188 |  *    ```
189 |  *
190 |  * 3. Avoid using raw string prefixes in log messages. Instead, use contextualized loggers.
191 |  *
192 |  * 4. For debugging objects, use the debugResponse method to log only essential properties.
193 |  *
194 |  * 5. Set DEBUG environment variable to control which modules show debug logs:
195 |  *    - DEBUG=true (enable all debug logs)
196 |  *    - DEBUG=controllers/*,services/* (enable for specific module groups)
197 |  *    - DEBUG=transport,utils/formatter* (enable specific modules, supports wildcards)
198 |  */
199 | class Logger {
200 | 	private context?: string;
201 | 	private modulePath: string;
202 | 	private static sessionId = SESSION_ID;
203 | 	private static logFilePath = LOG_FILEPATH;
204 | 
205 | 	constructor(context?: string, modulePath: string = '') {
206 | 		this.context = context;
207 | 		this.modulePath = modulePath;
208 | 
209 | 		// Log initialization message only once
210 | 		if (!isLoggerInitialized) {
211 | 			this.info(
212 | 				`Logger initialized with session ID: ${Logger.sessionId}`,
213 | 			);
214 | 			this.info(`Logs will be saved to: ${Logger.logFilePath}`);
215 | 			isLoggerInitialized = true;
216 | 		}
217 | 	}
218 | 
219 | 	/**
220 | 	 * Create a contextualized logger for a specific file or component.
221 | 	 * This is the preferred method for creating loggers.
222 | 	 *
223 | 	 * @param filePath The file path (e.g., 'controllers/aws.sso.auth.controller.ts')
224 | 	 * @param functionName Optional function name for more specific context
225 | 	 * @returns A new Logger instance with the specified context
226 | 	 *
227 | 	 * @example
228 | 	 * // File-level logger
229 | 	 * const logger = Logger.forContext('controllers/myController.ts');
230 | 	 *
231 | 	 * // Method-level logger
232 | 	 * const methodLogger = Logger.forContext('controllers/myController.ts', 'myMethod');
233 | 	 */
234 | 	static forContext(filePath: string, functionName?: string): Logger {
235 | 		return new Logger(formatSourcePath(filePath, functionName), filePath);
236 | 	}
237 | 
238 | 	/**
239 | 	 * Create a method level logger from a context logger
240 | 	 * @param method Method name
241 | 	 * @returns A new logger with the method context
242 | 	 */
243 | 	forMethod(method: string): Logger {
244 | 		return Logger.forContext(this.modulePath, method);
245 | 	}
246 | 
247 | 	private _formatMessage(message: string): string {
248 | 		return this.context ? `${this.context} ${message}` : message;
249 | 	}
250 | 
251 | 	private _formatArgs(args: unknown[]): unknown[] {
252 | 		// If the first argument is an object and not an Error, safely stringify it
253 | 		if (
254 | 			args.length > 0 &&
255 | 			typeof args[0] === 'object' &&
256 | 			args[0] !== null &&
257 | 			!(args[0] instanceof Error)
258 | 		) {
259 | 			args[0] = safeStringify(args[0]);
260 | 		}
261 | 		return args;
262 | 	}
263 | 
264 | 	_log(
265 | 		level: 'info' | 'warn' | 'error' | 'debug',
266 | 		message: string,
267 | 		...args: unknown[]
268 | 	) {
269 | 		// Skip debug messages if not enabled for this module
270 | 		if (level === 'debug' && !isDebugEnabledForModule(this.modulePath)) {
271 | 			return;
272 | 		}
273 | 
274 | 		const timestamp = getTimestamp();
275 | 		const prefix = `${timestamp} [${level.toUpperCase()}]`;
276 | 		let logMessage = `${prefix} ${this._formatMessage(message)}`;
277 | 
278 | 		const formattedArgs = this._formatArgs(args);
279 | 		if (formattedArgs.length > 0) {
280 | 			// Handle errors specifically
281 | 			if (formattedArgs[0] instanceof Error) {
282 | 				const error = formattedArgs[0] as Error;
283 | 				logMessage += ` Error: ${error.message}`;
284 | 				if (error.stack) {
285 | 					logMessage += `\n${error.stack}`;
286 | 				}
287 | 				// If there are more args, add them after the error
288 | 				if (formattedArgs.length > 1) {
289 | 					logMessage += ` ${formattedArgs
290 | 						.slice(1)
291 | 						.map((arg) =>
292 | 							typeof arg === 'string' ? arg : safeStringify(arg),
293 | 						)
294 | 						.join(' ')}`;
295 | 				}
296 | 			} else {
297 | 				logMessage += ` ${formattedArgs
298 | 					.map((arg) =>
299 | 						typeof arg === 'string' ? arg : safeStringify(arg),
300 | 					)
301 | 					.join(' ')}`;
302 | 			}
303 | 		}
304 | 
305 | 		// Write to log file
306 | 		try {
307 | 			fs.appendFileSync(Logger.logFilePath, `${logMessage}\n`, 'utf8');
308 | 		} catch (err) {
309 | 			// If we can't write to the log file, log the error to console
310 | 			console.error(`Failed to write to log file: ${err}`);
311 | 		}
312 | 
313 | 		if (process.env.NODE_ENV === 'test') {
314 | 			console[level](logMessage);
315 | 		} else {
316 | 			console.error(logMessage);
317 | 		}
318 | 	}
319 | 
320 | 	info(message: string, ...args: unknown[]) {
321 | 		this._log('info', message, ...args);
322 | 	}
323 | 
324 | 	warn(message: string, ...args: unknown[]) {
325 | 		this._log('warn', message, ...args);
326 | 	}
327 | 
328 | 	error(message: string, ...args: unknown[]) {
329 | 		this._log('error', message, ...args);
330 | 	}
331 | 
332 | 	debug(message: string, ...args: unknown[]) {
333 | 		this._log('debug', message, ...args);
334 | 	}
335 | 
336 | 	/**
337 | 	 * Log essential information about an API response
338 | 	 * @param message Log message
339 | 	 * @param response API response object
340 | 	 * @param essentialKeys Keys to extract from the response
341 | 	 */
342 | 	debugResponse(
343 | 		message: string,
344 | 		response: Record<string, unknown>,
345 | 		essentialKeys: string[],
346 | 	) {
347 | 		const essentialInfo = extractEssentialValues(response, essentialKeys);
348 | 		this.debug(message, essentialInfo);
349 | 	}
350 | 
351 | 	/**
352 | 	 * Get the current session ID
353 | 	 * @returns The UUID for the current logging session
354 | 	 */
355 | 	static getSessionId(): string {
356 | 		return Logger.sessionId;
357 | 	}
358 | 
359 | 	/**
360 | 	 * Get the current log file path
361 | 	 * @returns The path to the current log file
362 | 	 */
363 | 	static getLogFilePath(): string {
364 | 		return Logger.logFilePath;
365 | 	}
366 | }
367 | 
368 | // Only export the Logger class to enforce contextual logging via Logger.forContext
369 | export { Logger };
370 | 
```

--------------------------------------------------------------------------------
/src/utils/transport.util.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { Logger } from './logger.util.js';
  2 | import { config } from './config.util.js';
  3 | import { NETWORK_TIMEOUTS, DATA_LIMITS } from './constants.util.js';
  4 | import {
  5 | 	createAuthInvalidError,
  6 | 	createApiError,
  7 | 	createUnexpectedError,
  8 | 	McpError,
  9 | } from './error.util.js';
 10 | 
 11 | /**
 12 |  * Interface for Atlassian API credentials
 13 |  */
 14 | export interface AtlassianCredentials {
 15 | 	// Standard Atlassian credentials
 16 | 	siteName?: string;
 17 | 	userEmail?: string;
 18 | 	apiToken?: string;
 19 | 	// Bitbucket-specific credentials (alternative approach)
 20 | 	bitbucketUsername?: string;
 21 | 	bitbucketAppPassword?: string;
 22 | 	// Indicates which auth method to use
 23 | 	useBitbucketAuth?: boolean;
 24 | }
 25 | 
 26 | /**
 27 |  * Interface for HTTP request options
 28 |  */
 29 | export interface RequestOptions {
 30 | 	method?: 'GET' | 'POST' | 'PUT' | 'DELETE';
 31 | 	headers?: Record<string, string>;
 32 | 	body?: unknown;
 33 | 	timeout?: number;
 34 | }
 35 | 
 36 | // Create a contextualized logger for this file
 37 | const transportLogger = Logger.forContext('utils/transport.util.ts');
 38 | 
 39 | // Log transport utility initialization
 40 | transportLogger.debug('Transport utility initialized');
 41 | 
 42 | /**
 43 |  * Get Atlassian credentials from environment variables
 44 |  * @returns AtlassianCredentials object or null if credentials are missing
 45 |  */
 46 | export function getAtlassianCredentials(): AtlassianCredentials | null {
 47 | 	const methodLogger = Logger.forContext(
 48 | 		'utils/transport.util.ts',
 49 | 		'getAtlassianCredentials',
 50 | 	);
 51 | 
 52 | 	// First try standard Atlassian credentials (preferred for consistency)
 53 | 	const siteName = config.get('ATLASSIAN_SITE_NAME');
 54 | 	const userEmail = config.get('ATLASSIAN_USER_EMAIL');
 55 | 	const apiToken = config.get('ATLASSIAN_API_TOKEN');
 56 | 
 57 | 	// If standard credentials are available, use them
 58 | 	if (userEmail && apiToken) {
 59 | 		methodLogger.debug('Using standard Atlassian credentials');
 60 | 		return {
 61 | 			siteName,
 62 | 			userEmail,
 63 | 			apiToken,
 64 | 			useBitbucketAuth: false,
 65 | 		};
 66 | 	}
 67 | 
 68 | 	// If standard credentials are not available, try Bitbucket-specific credentials
 69 | 	const bitbucketUsername = config.get('ATLASSIAN_BITBUCKET_USERNAME');
 70 | 	const bitbucketAppPassword = config.get('ATLASSIAN_BITBUCKET_APP_PASSWORD');
 71 | 
 72 | 	if (bitbucketUsername && bitbucketAppPassword) {
 73 | 		methodLogger.debug('Using Bitbucket-specific credentials');
 74 | 		return {
 75 | 			bitbucketUsername,
 76 | 			bitbucketAppPassword,
 77 | 			useBitbucketAuth: true,
 78 | 		};
 79 | 	}
 80 | 
 81 | 	// If neither set of credentials is available, return null
 82 | 	methodLogger.warn(
 83 | 		'Missing Atlassian credentials. Please set either ATLASSIAN_SITE_NAME, ATLASSIAN_USER_EMAIL, and ATLASSIAN_API_TOKEN environment variables, or ATLASSIAN_BITBUCKET_USERNAME and ATLASSIAN_BITBUCKET_APP_PASSWORD for Bitbucket-specific auth.',
 84 | 	);
 85 | 	return null;
 86 | }
 87 | 
 88 | /**
 89 |  * Fetch data from Atlassian API
 90 |  * @param credentials Atlassian API credentials
 91 |  * @param path API endpoint path (without base URL)
 92 |  * @param options Request options
 93 |  * @returns Response data
 94 |  */
 95 | export async function fetchAtlassian<T>(
 96 | 	credentials: AtlassianCredentials,
 97 | 	path: string,
 98 | 	options: RequestOptions = {},
 99 | ): Promise<T> {
100 | 	const methodLogger = Logger.forContext(
101 | 		'utils/transport.util.ts',
102 | 		'fetchAtlassian',
103 | 	);
104 | 
105 | 	const baseUrl = 'https://api.bitbucket.org';
106 | 
107 | 	// Set up auth headers based on credential type
108 | 	let authHeader: string;
109 | 
110 | 	if (credentials.useBitbucketAuth) {
111 | 		// Bitbucket API uses a different auth format
112 | 		if (
113 | 			!credentials.bitbucketUsername ||
114 | 			!credentials.bitbucketAppPassword
115 | 		) {
116 | 			throw createAuthInvalidError(
117 | 				'Missing Bitbucket username or app password',
118 | 			);
119 | 		}
120 | 		authHeader = `Basic ${Buffer.from(
121 | 			`${credentials.bitbucketUsername}:${credentials.bitbucketAppPassword}`,
122 | 		).toString('base64')}`;
123 | 	} else {
124 | 		// Standard Atlassian API (Jira, Confluence)
125 | 		if (!credentials.userEmail || !credentials.apiToken) {
126 | 			throw createAuthInvalidError('Missing Atlassian credentials');
127 | 		}
128 | 		authHeader = `Basic ${Buffer.from(
129 | 			`${credentials.userEmail}:${credentials.apiToken}`,
130 | 		).toString('base64')}`;
131 | 	}
132 | 
133 | 	// Ensure path starts with a slash
134 | 	const normalizedPath = path.startsWith('/') ? path : `/${path}`;
135 | 
136 | 	// Construct the full URL
137 | 	const url = `${baseUrl}${normalizedPath}`;
138 | 
139 | 	// Set up authentication and headers
140 | 	const headers = {
141 | 		Authorization: authHeader,
142 | 		'Content-Type': 'application/json',
143 | 		Accept: 'application/json',
144 | 		...options.headers,
145 | 	};
146 | 
147 | 	// Prepare request options
148 | 	const requestOptions: RequestInit = {
149 | 		method: options.method || 'GET',
150 | 		headers,
151 | 		body: options.body ? JSON.stringify(options.body) : undefined,
152 | 	};
153 | 
154 | 	methodLogger.debug(`Calling Atlassian API: ${url}`);
155 | 
156 | 	// Set up timeout handling with configurable values
157 | 	const defaultTimeout = config.getNumber(
158 | 		'ATLASSIAN_REQUEST_TIMEOUT',
159 | 		NETWORK_TIMEOUTS.DEFAULT_REQUEST_TIMEOUT,
160 | 	);
161 | 	const timeoutMs = options.timeout ?? defaultTimeout;
162 | 	const controller = new AbortController();
163 | 	const timeoutId = setTimeout(() => {
164 | 		methodLogger.warn(`Request timeout after ${timeoutMs}ms: ${url}`);
165 | 		controller.abort();
166 | 	}, timeoutMs);
167 | 
168 | 	// Add abort signal to request options
169 | 	requestOptions.signal = controller.signal;
170 | 
171 | 	try {
172 | 		const response = await fetch(url, requestOptions);
173 | 		clearTimeout(timeoutId);
174 | 
175 | 		// Log the raw response status and headers
176 | 		methodLogger.debug(
177 | 			`Raw response received: ${response.status} ${response.statusText}`,
178 | 			{
179 | 				url,
180 | 				status: response.status,
181 | 				statusText: response.statusText,
182 | 				headers: Object.fromEntries(response.headers.entries()),
183 | 			},
184 | 		);
185 | 
186 | 		// Validate response size to prevent excessive memory usage (CWE-770)
187 | 		const contentLength = response.headers.get('content-length');
188 | 		if (contentLength) {
189 | 			const responseSize = parseInt(contentLength, 10);
190 | 			if (responseSize > DATA_LIMITS.MAX_RESPONSE_SIZE) {
191 | 				methodLogger.warn(
192 | 					`Response size ${responseSize} bytes exceeds limit of ${DATA_LIMITS.MAX_RESPONSE_SIZE} bytes`,
193 | 				);
194 | 				throw createApiError(
195 | 					`Response size (${Math.round(responseSize / (1024 * 1024))}MB) exceeds maximum limit of ${Math.round(DATA_LIMITS.MAX_RESPONSE_SIZE / (1024 * 1024))}MB`,
196 | 					413,
197 | 					{ responseSize, limit: DATA_LIMITS.MAX_RESPONSE_SIZE },
198 | 				);
199 | 			}
200 | 		}
201 | 
202 | 		if (!response.ok) {
203 | 			const errorText = await response.text();
204 | 			methodLogger.error(
205 | 				`API error: ${response.status} ${response.statusText}`,
206 | 				errorText,
207 | 			);
208 | 
209 | 			// Try to parse the error response
210 | 			let errorMessage = `${response.status} ${response.statusText}`;
211 | 			let parsedBitbucketError = null;
212 | 
213 | 			try {
214 | 				if (
215 | 					errorText &&
216 | 					(errorText.startsWith('{') || errorText.startsWith('['))
217 | 				) {
218 | 					const parsedError = JSON.parse(errorText);
219 | 
220 | 					// Extract specific error details from various Bitbucket API response formats
221 | 					if (
222 | 						parsedError.type === 'error' &&
223 | 						parsedError.error &&
224 | 						parsedError.error.message
225 | 					) {
226 | 						// Format: {"type":"error", "error":{"message":"...", "detail":"..."}}
227 | 						parsedBitbucketError = parsedError.error;
228 | 						errorMessage = parsedBitbucketError.message;
229 | 						if (parsedBitbucketError.detail) {
230 | 							errorMessage += ` Detail: ${parsedBitbucketError.detail}`;
231 | 						}
232 | 					} else if (parsedError.error && parsedError.error.message) {
233 | 						// Alternative error format: {"error": {"message": "..."}}
234 | 						parsedBitbucketError = parsedError.error;
235 | 						errorMessage = parsedBitbucketError.message;
236 | 					} else if (
237 | 						parsedError.errors &&
238 | 						Array.isArray(parsedError.errors) &&
239 | 						parsedError.errors.length > 0
240 | 					) {
241 | 						// Format: {"errors":[{"status":400,"code":"INVALID_REQUEST_PARAMETER","title":"..."}]}
242 | 						const atlassianError = parsedError.errors[0];
243 | 						if (atlassianError.title) {
244 | 							errorMessage = atlassianError.title;
245 | 							parsedBitbucketError = atlassianError;
246 | 						}
247 | 					} else if (parsedError.message) {
248 | 						// Format: {"message":"Some error message"}
249 | 						errorMessage = parsedError.message;
250 | 						parsedBitbucketError = parsedError;
251 | 					}
252 | 				}
253 | 			} catch (parseError) {
254 | 				methodLogger.debug(`Error parsing error response:`, parseError);
255 | 				// Fall back to the default error message
256 | 			}
257 | 
258 | 			// Log the parsed error or raw error text
259 | 			methodLogger.debug(
260 | 				'Parsed Bitbucket error:',
261 | 				parsedBitbucketError || errorText,
262 | 			);
263 | 
264 | 			// Use parsedBitbucketError (or errorText if parsing failed) as originalError
265 | 			const originalErrorForMcp = parsedBitbucketError || errorText;
266 | 
267 | 			// Handle common Bitbucket API error status codes
268 | 			if (response.status === 401) {
269 | 				throw createAuthInvalidError(
270 | 					`Bitbucket API: Authentication failed - ${errorMessage}`,
271 | 					originalErrorForMcp,
272 | 				);
273 | 			}
274 | 
275 | 			if (response.status === 403) {
276 | 				throw createApiError(
277 | 					`Bitbucket API: Permission denied - ${errorMessage}`,
278 | 					403,
279 | 					originalErrorForMcp,
280 | 				);
281 | 			}
282 | 
283 | 			if (response.status === 404) {
284 | 				throw createApiError(
285 | 					`Bitbucket API: Resource not found - ${errorMessage}`,
286 | 					404,
287 | 					originalErrorForMcp,
288 | 				);
289 | 			}
290 | 
291 | 			if (response.status === 429) {
292 | 				throw createApiError(
293 | 					`Bitbucket API: Rate limit exceeded - ${errorMessage}`,
294 | 					429,
295 | 					originalErrorForMcp,
296 | 				);
297 | 			}
298 | 
299 | 			if (response.status >= 500) {
300 | 				throw createApiError(
301 | 					`Bitbucket API: Service error - ${errorMessage}`,
302 | 					response.status,
303 | 					originalErrorForMcp,
304 | 				);
305 | 			}
306 | 
307 | 			// For other API errors, preserve the original vendor message
308 | 			throw createApiError(
309 | 				`Bitbucket API Error: ${errorMessage}`,
310 | 				response.status,
311 | 				originalErrorForMcp,
312 | 			);
313 | 		}
314 | 
315 | 		// Check if the response is expected to be plain text
316 | 		const contentType = response.headers.get('content-type') || '';
317 | 		if (contentType.includes('text/plain')) {
318 | 			// If we're expecting text (like a diff), return the raw text
319 | 			const textResponse = await response.text();
320 | 			methodLogger.debug(
321 | 				`Text response received (truncated)`,
322 | 				textResponse.substring(0, 200) + '...',
323 | 			);
324 | 			return textResponse as unknown as T;
325 | 		}
326 | 
327 | 		// For JSON responses, proceed as before
328 | 		// Clone the response to log its content without consuming it
329 | 		const clonedResponse = response.clone();
330 | 		try {
331 | 			const responseJson = await clonedResponse.json();
332 | 			methodLogger.debug(`Response body:`, responseJson);
333 | 		} catch {
334 | 			methodLogger.debug(
335 | 				`Could not parse response as JSON, returning raw content`,
336 | 			);
337 | 		}
338 | 
339 | 		return response.json() as Promise<T>;
340 | 	} catch (error) {
341 | 		clearTimeout(timeoutId);
342 | 		methodLogger.error(`Request failed`, error);
343 | 
344 | 		// If it's already an McpError, just rethrow it
345 | 		if (error instanceof McpError) {
346 | 			throw error;
347 | 		}
348 | 
349 | 		// Handle timeout errors
350 | 		if (error instanceof Error && error.name === 'AbortError') {
351 | 			methodLogger.error(
352 | 				`Request timed out after ${timeoutMs}ms: ${url}`,
353 | 			);
354 | 			throw createApiError(
355 | 				`Request timeout: Bitbucket API did not respond within ${timeoutMs / 1000} seconds`,
356 | 				408,
357 | 				error,
358 | 			);
359 | 		}
360 | 
361 | 		// Handle network errors more explicitly
362 | 		if (error instanceof TypeError) {
363 | 			// TypeError is typically a network/fetch error in this context
364 | 			const errorMessage = error.message || 'Network error occurred';
365 | 			methodLogger.debug(`Network error details: ${errorMessage}`);
366 | 
367 | 			throw createApiError(
368 | 				`Network error while calling Bitbucket API: ${errorMessage}`,
369 | 				500, // This will be classified as NETWORK_ERROR by detectErrorType
370 | 				error,
371 | 			);
372 | 		}
373 | 
374 | 		// Handle JSON parsing errors
375 | 		if (error instanceof SyntaxError) {
376 | 			methodLogger.debug(`JSON parsing error: ${error.message}`);
377 | 
378 | 			throw createApiError(
379 | 				`Invalid response format from Bitbucket API: ${error.message}`,
380 | 				500,
381 | 				error,
382 | 			);
383 | 		}
384 | 
385 | 		// Generic error handler for any other types of errors
386 | 		throw createUnexpectedError(
387 | 			`Unexpected error while calling Bitbucket API: ${error instanceof Error ? error.message : String(error)}`,
388 | 			error,
389 | 		);
390 | 	}
391 | }
392 | 
```

--------------------------------------------------------------------------------
/src/tools/atlassian.pullrequests.types.ts:
--------------------------------------------------------------------------------

```typescript
  1 | import { z } from 'zod';
  2 | 
  3 | /**
  4 |  * Base pagination arguments for all tools
  5 |  */
  6 | const PaginationArgs = {
  7 | 	limit: z
  8 | 		.number()
  9 | 		.int()
 10 | 		.positive()
 11 | 		.max(100)
 12 | 		.optional()
 13 | 		.describe(
 14 | 			'Maximum number of items to return (1-100). Controls the response size. Defaults to 25 if omitted.',
 15 | 		),
 16 | 
 17 | 	cursor: z
 18 | 		.string()
 19 | 		.optional()
 20 | 		.describe(
 21 | 			'Pagination cursor for retrieving the next set of results. Obtained from previous response when more results are available.',
 22 | 		),
 23 | };
 24 | 
 25 | /**
 26 |  * Schema for list-pull-requests tool arguments
 27 |  */
 28 | export const ListPullRequestsToolArgs = z.object({
 29 | 	/**
 30 | 	 * Workspace slug containing the repository
 31 | 	 */
 32 | 	workspaceSlug: z
 33 | 		.string()
 34 | 		.optional()
 35 | 		.describe(
 36 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
 37 | 		),
 38 | 
 39 | 	/**
 40 | 	 * Repository slug containing the pull requests
 41 | 	 */
 42 | 	repoSlug: z
 43 | 		.string()
 44 | 		.min(1, 'Repository slug is required')
 45 | 		.describe(
 46 | 			'Repository slug containing the pull requests. This must be a valid repository in the specified workspace. Example: "project-api"',
 47 | 		),
 48 | 
 49 | 	/**
 50 | 	 * Filter by pull request state
 51 | 	 */
 52 | 	state: z
 53 | 		.enum(['OPEN', 'MERGED', 'DECLINED', 'SUPERSEDED'])
 54 | 		.optional()
 55 | 		.describe(
 56 | 			'Filter pull requests by state. Options: "OPEN" (active PRs), "MERGED" (completed PRs), "DECLINED" (rejected PRs), or "SUPERSEDED" (replaced PRs). If omitted, defaults to showing all states.',
 57 | 		),
 58 | 
 59 | 	/**
 60 | 	 * Filter query for pull requests
 61 | 	 */
 62 | 	query: z
 63 | 		.string()
 64 | 		.optional()
 65 | 		.describe(
 66 | 			'Filter pull requests by title, description, or author (text search). Uses Bitbucket query syntax.',
 67 | 		),
 68 | 
 69 | 	/**
 70 | 	 * Maximum number of pull requests to return (default: 50)
 71 | 	 */
 72 | 	...PaginationArgs,
 73 | });
 74 | 
 75 | export type ListPullRequestsToolArgsType = z.infer<
 76 | 	typeof ListPullRequestsToolArgs
 77 | >;
 78 | 
 79 | /**
 80 |  * Schema for get-pull-request tool arguments
 81 |  */
 82 | export const GetPullRequestToolArgs = z.object({
 83 | 	/**
 84 | 	 * Workspace slug containing the repository
 85 | 	 */
 86 | 	workspaceSlug: z
 87 | 		.string()
 88 | 		.optional()
 89 | 		.describe(
 90 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
 91 | 		),
 92 | 
 93 | 	/**
 94 | 	 * Repository slug containing the pull request
 95 | 	 */
 96 | 	repoSlug: z
 97 | 		.string()
 98 | 		.min(1, 'Repository slug is required')
 99 | 		.describe(
100 | 			'Repository slug containing the pull request. This must be a valid repository in the specified workspace. Example: "project-api"',
101 | 		),
102 | 
103 | 	/**
104 | 	 * Pull request identifier
105 | 	 */
106 | 	prId: z
107 | 		.string()
108 | 		.min(1, 'Pull request ID is required')
109 | 		.describe(
110 | 			'Numeric ID of the pull request to retrieve as a string. Must be a valid pull request ID in the specified repository. Example: "42"',
111 | 		),
112 | 
113 | 	/**
114 | 	 * Optional flag to request the full diff
115 | 	 */
116 | 	includeFullDiff: z
117 | 		.boolean()
118 | 		.optional()
119 | 		.describe(
120 | 			'Set to true to retrieve the full diff content instead of just the summary. Default: true (rich output by default)',
121 | 		)
122 | 		.default(true),
123 | 
124 | 	/**
125 | 	 * Optional flag to include comments
126 | 	 */
127 | 	includeComments: z
128 | 		.boolean()
129 | 		.optional()
130 | 		.describe(
131 | 			'Set to true to retrieve comments for the pull request. Default: false. Note: Enabling this may increase response time for pull requests with many comments due to additional API calls.',
132 | 		)
133 | 		.default(false),
134 | });
135 | 
136 | export type GetPullRequestToolArgsType = z.infer<typeof GetPullRequestToolArgs>;
137 | 
138 | /**
139 |  * Schema for list-pr-comments tool arguments
140 |  */
141 | export const ListPullRequestCommentsToolArgs = z.object({
142 | 	/**
143 | 	 * Workspace slug containing the repository
144 | 	 */
145 | 	workspaceSlug: z
146 | 		.string()
147 | 		.optional()
148 | 		.describe(
149 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
150 | 		),
151 | 
152 | 	/**
153 | 	 * Repository slug containing the pull request
154 | 	 */
155 | 	repoSlug: z
156 | 		.string()
157 | 		.min(1, 'Repository slug is required')
158 | 		.describe(
159 | 			'Repository slug containing the pull request. This must be a valid repository in the specified workspace. Example: "project-api"',
160 | 		),
161 | 
162 | 	/**
163 | 	 * Pull request identifier
164 | 	 */
165 | 	prId: z
166 | 		.string()
167 | 		.min(1, 'Pull request ID is required')
168 | 		.describe(
169 | 			'Numeric ID of the pull request to retrieve comments from as a string. Must be a valid pull request ID in the specified repository. Example: "42"',
170 | 		),
171 | 
172 | 	/**
173 | 	 * Pagination parameters
174 | 	 */
175 | 	...PaginationArgs,
176 | });
177 | 
178 | export type ListPullRequestCommentsToolArgsType = z.infer<
179 | 	typeof ListPullRequestCommentsToolArgs
180 | >;
181 | 
182 | /**
183 |  * Schema for create-pr-comment tool arguments
184 |  */
185 | export const CreatePullRequestCommentToolArgs = z.object({
186 | 	/**
187 | 	 * Workspace slug containing the repository
188 | 	 */
189 | 	workspaceSlug: z
190 | 		.string()
191 | 		.optional()
192 | 		.describe(
193 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
194 | 		),
195 | 
196 | 	/**
197 | 	 * Repository slug containing the pull request
198 | 	 */
199 | 	repoSlug: z
200 | 		.string()
201 | 		.min(1, 'Repository slug is required')
202 | 		.describe(
203 | 			'Repository slug containing the pull request. This must be a valid repository in the specified workspace. Example: "project-api"',
204 | 		),
205 | 
206 | 	/**
207 | 	 * Pull request identifier
208 | 	 */
209 | 	prId: z
210 | 		.string()
211 | 		.min(1, 'Pull request ID is required')
212 | 		.describe(
213 | 			'Numeric ID of the pull request to add a comment to as a string. Must be a valid pull request ID in the specified repository. Example: "42"',
214 | 		),
215 | 
216 | 	/**
217 | 	 * Comment content
218 | 	 */
219 | 	content: z
220 | 		.string()
221 | 		.min(1, 'Comment content is required')
222 | 		.describe(
223 | 			'The content of the comment to add to the pull request in Markdown format. Bitbucket Cloud natively accepts Markdown - supports headings, lists, code blocks, links, and other standard Markdown syntax.',
224 | 		),
225 | 
226 | 	/**
227 | 	 * Optional inline location for the comment
228 | 	 */
229 | 	inline: z
230 | 		.object({
231 | 			path: z
232 | 				.string()
233 | 				.min(1, 'File path is required for inline comments')
234 | 				.describe('The file path to add the comment to.'),
235 | 			line: z
236 | 				.number()
237 | 				.int()
238 | 				.positive()
239 | 				.describe('The line number to add the comment to.'),
240 | 		})
241 | 		.optional()
242 | 		.describe(
243 | 			'Optional inline location for the comment. If provided, this will create a comment on a specific line in a file.',
244 | 		),
245 | 
246 | 	parentId: z
247 | 		.string()
248 | 		.optional()
249 | 		.describe(
250 | 			'The ID of the parent comment to reply to. If not provided, the comment will be a top-level comment.',
251 | 		),
252 | });
253 | 
254 | /**
255 |  * Type for create pull request comment tool arguments (inferred from schema)
256 |  */
257 | export type CreatePullRequestCommentToolArgsType = z.infer<
258 | 	typeof CreatePullRequestCommentToolArgs
259 | >;
260 | 
261 | /**
262 |  * Arguments schema for the pull_requests_create tool
263 |  */
264 | export const CreatePullRequestToolArgs = z.object({
265 | 	/**
266 | 	 * Workspace slug containing the repository
267 | 	 */
268 | 	workspaceSlug: z
269 | 		.string()
270 | 		.optional()
271 | 		.describe(
272 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
273 | 		),
274 | 
275 | 	/**
276 | 	 * Repository slug to create the pull request in
277 | 	 */
278 | 	repoSlug: z
279 | 		.string()
280 | 		.min(1, 'Repository slug is required')
281 | 		.describe(
282 | 			'Repository slug to create the pull request in. This must be a valid repository in the specified workspace. Example: "project-api"',
283 | 		),
284 | 
285 | 	/**
286 | 	 * Title of the pull request
287 | 	 */
288 | 	title: z
289 | 		.string()
290 | 		.min(1, 'Pull request title is required')
291 | 		.describe('Title for the pull request. Example: "Add new feature"'),
292 | 
293 | 	/**
294 | 	 * Source branch name
295 | 	 */
296 | 	sourceBranch: z
297 | 		.string()
298 | 		.min(1, 'Source branch name is required')
299 | 		.describe(
300 | 			'Source branch name (the branch containing your changes). Example: "feature/new-login"',
301 | 		),
302 | 
303 | 	/**
304 | 	 * Destination branch name
305 | 	 */
306 | 	destinationBranch: z
307 | 		.string()
308 | 		.optional()
309 | 		.describe(
310 | 			'Destination branch name (the branch you want to merge into, defaults to main). Example: "develop"',
311 | 		),
312 | 
313 | 	/**
314 | 	 * Description for the pull request
315 | 	 */
316 | 	description: z
317 | 		.string()
318 | 		.optional()
319 | 		.describe(
320 | 			'Optional description for the pull request in Markdown format. Supports standard Markdown syntax including headings, lists, code blocks, and links.',
321 | 		),
322 | 
323 | 	/**
324 | 	 * Whether to close the source branch after merge
325 | 	 */
326 | 	closeSourceBranch: z
327 | 		.boolean()
328 | 		.optional()
329 | 		.describe(
330 | 			'Whether to close the source branch after the pull request is merged. Default: false',
331 | 		),
332 | });
333 | 
334 | export type CreatePullRequestToolArgsType = z.infer<
335 | 	typeof CreatePullRequestToolArgs
336 | >;
337 | 
338 | /**
339 |  * Schema for update-pull-request tool arguments
340 |  */
341 | export const UpdatePullRequestToolArgs = z.object({
342 | 	/**
343 | 	 * Workspace slug containing the repository
344 | 	 */
345 | 	workspaceSlug: z
346 | 		.string()
347 | 		.optional()
348 | 		.describe(
349 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
350 | 		),
351 | 
352 | 	/**
353 | 	 * Repository slug containing the pull request
354 | 	 */
355 | 	repoSlug: z
356 | 		.string()
357 | 		.min(1, 'Repository slug is required')
358 | 		.describe(
359 | 			'Repository slug containing the pull request. This must be a valid repository in the specified workspace. Example: "project-api"',
360 | 		),
361 | 
362 | 	/**
363 | 	 * Pull request ID
364 | 	 */
365 | 	pullRequestId: z
366 | 		.number()
367 | 		.int()
368 | 		.positive()
369 | 		.describe('Pull request ID to update. Example: 123'),
370 | 
371 | 	/**
372 | 	 * Updated title for the pull request
373 | 	 */
374 | 	title: z
375 | 		.string()
376 | 		.optional()
377 | 		.describe(
378 | 			'Updated title for the pull request. Example: "Updated Feature Implementation"',
379 | 		),
380 | 
381 | 	/**
382 | 	 * Updated description for the pull request
383 | 	 */
384 | 	description: z
385 | 		.string()
386 | 		.optional()
387 | 		.describe(
388 | 			'Updated description for the pull request in Markdown format. Supports standard Markdown syntax including headings, lists, code blocks, and links.',
389 | 		),
390 | });
391 | 
392 | export type UpdatePullRequestToolArgsType = z.infer<
393 | 	typeof UpdatePullRequestToolArgs
394 | >;
395 | 
396 | /**
397 |  * Schema for approve-pull-request tool arguments
398 |  */
399 | export const ApprovePullRequestToolArgs = z.object({
400 | 	/**
401 | 	 * Workspace slug containing the repository
402 | 	 */
403 | 	workspaceSlug: z
404 | 		.string()
405 | 		.optional()
406 | 		.describe(
407 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
408 | 		),
409 | 
410 | 	/**
411 | 	 * Repository slug containing the pull request
412 | 	 */
413 | 	repoSlug: z
414 | 		.string()
415 | 		.min(1, 'Repository slug is required')
416 | 		.describe(
417 | 			'Repository slug containing the pull request. This must be a valid repository in the specified workspace. Example: "project-api"',
418 | 		),
419 | 
420 | 	/**
421 | 	 * Pull request ID
422 | 	 */
423 | 	pullRequestId: z
424 | 		.number()
425 | 		.int()
426 | 		.positive()
427 | 		.describe('Pull request ID to approve. Example: 123'),
428 | });
429 | 
430 | export type ApprovePullRequestToolArgsType = z.infer<
431 | 	typeof ApprovePullRequestToolArgs
432 | >;
433 | 
434 | /**
435 |  * Schema for reject-pull-request tool arguments
436 |  */
437 | export const RejectPullRequestToolArgs = z.object({
438 | 	/**
439 | 	 * Workspace slug containing the repository
440 | 	 */
441 | 	workspaceSlug: z
442 | 		.string()
443 | 		.optional()
444 | 		.describe(
445 | 			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
446 | 		),
447 | 
448 | 	/**
449 | 	 * Repository slug containing the pull request
450 | 	 */
451 | 	repoSlug: z
452 | 		.string()
453 | 		.min(1, 'Repository slug is required')
454 | 		.describe(
455 | 			'Repository slug containing the pull request. This must be a valid repository in the specified workspace. Example: "project-api"',
456 | 		),
457 | 
458 | 	/**
459 | 	 * Pull request ID
460 | 	 */
461 | 	pullRequestId: z
462 | 		.number()
463 | 		.int()
464 | 		.positive()
465 | 		.describe('Pull request ID to request changes on. Example: 123'),
466 | });
467 | 
468 | export type RejectPullRequestToolArgsType = z.infer<
469 | 	typeof RejectPullRequestToolArgs
470 | >;
471 | 
```

--------------------------------------------------------------------------------
/src/utils/adf.util.ts:
--------------------------------------------------------------------------------

```typescript
  1 | /**
  2 |  * Utility functions for converting Atlassian Document Format (ADF) to Markdown
  3 |  *
  4 |  * NOTE: Unlike Jira, Bitbucket Cloud API natively accepts and returns Markdown format.
  5 |  * This utility only includes adfToMarkdown for potential edge cases where Bitbucket
  6 |  * might return ADF content (though this is rare).
  7 |  *
  8 |  * Functions like markdownToAdf and textToAdf (needed in the Jira project) are NOT needed
  9 |  * in the Bitbucket integration and have been removed to avoid confusion.
 10 |  */
 11 | 
 12 | import { Logger } from './logger.util.js';
 13 | // Placeholder for AdfDocument type if specific types are needed for Bitbucket
 14 | // For now, assuming a similar structure to Jira's AdfDocument
 15 | type AdfDocument = {
 16 | 	version: number;
 17 | 	type: 'doc';
 18 | 	content: AdfNode[];
 19 | };
 20 | 
 21 | // Create a contextualized logger for this file
 22 | const adfLogger = Logger.forContext('utils/adf.util.ts');
 23 | 
 24 | // Log ADF utility initialization
 25 | adfLogger.debug('ADF utility initialized');
 26 | 
 27 | /**
 28 |  * Interface for ADF node
 29 |  */
 30 | interface AdfNode {
 31 | 	type: string;
 32 | 	text?: string;
 33 | 	content?: AdfNode[];
 34 | 	attrs?: Record<string, unknown>;
 35 | 	marks?: Array<{ type: string; attrs?: Record<string, unknown> }>;
 36 | }
 37 | 
 38 | /**
 39 |  * Convert Atlassian Document Format (ADF) to Markdown
 40 |  *
 41 |  * @param adf - The ADF content to convert (can be string or object)
 42 |  * @returns The converted Markdown content
 43 |  */
 44 | export function adfToMarkdown(adf: unknown): string {
 45 | 	const methodLogger = Logger.forContext(
 46 | 		'utils/adf.util.ts',
 47 | 		'adfToMarkdown',
 48 | 	);
 49 | 
 50 | 	try {
 51 | 		// Handle empty or undefined input
 52 | 		if (!adf) {
 53 | 			return '';
 54 | 		}
 55 | 
 56 | 		// Parse ADF if it's a string
 57 | 		let adfDoc: AdfDocument;
 58 | 		if (typeof adf === 'string') {
 59 | 			try {
 60 | 				adfDoc = JSON.parse(adf);
 61 | 			} catch {
 62 | 				return adf; // Return as-is if not valid JSON
 63 | 			}
 64 | 		} else if (typeof adf === 'object') {
 65 | 			adfDoc = adf as AdfDocument;
 66 | 		} else {
 67 | 			return String(adf);
 68 | 		}
 69 | 
 70 | 		// Check if it's a valid ADF document
 71 | 		if (!adfDoc.content || !Array.isArray(adfDoc.content)) {
 72 | 			return '';
 73 | 		}
 74 | 
 75 | 		// Process the document
 76 | 		const markdown = processAdfContent(adfDoc.content);
 77 | 		methodLogger.debug(
 78 | 			`Converted ADF to Markdown, length: ${markdown.length}`,
 79 | 		);
 80 | 		return markdown;
 81 | 	} catch (error) {
 82 | 		methodLogger.error(
 83 | 			'[src/utils/adf.util.ts@adfToMarkdown] Error converting ADF to Markdown:',
 84 | 			error,
 85 | 		);
 86 | 		return '*Error converting description format*';
 87 | 	}
 88 | }
 89 | 
 90 | /**
 91 |  * Process ADF content nodes
 92 |  */
 93 | function processAdfContent(content: AdfNode[]): string {
 94 | 	if (!content || !Array.isArray(content)) {
 95 | 		return '';
 96 | 	}
 97 | 
 98 | 	return content.map((node) => processAdfNode(node)).join('\n\n');
 99 | }
100 | 
101 | /**
102 |  * Process mention node
103 |  */
104 | function processMention(node: AdfNode): string {
105 | 	if (!node.attrs) {
106 | 		return '';
107 | 	}
108 | 
109 | 	const text = node.attrs.text || node.attrs.displayName || '';
110 | 	if (!text) {
111 | 		return '';
112 | 	}
113 | 
114 | 	// Format as @username to preserve the mention format
115 | 	// Remove any existing @ symbol to avoid double @@ in the output
116 | 	const cleanText =
117 | 		typeof text === 'string' && text.startsWith('@')
118 | 			? text.substring(1)
119 | 			: text;
120 | 	return `@${cleanText}`;
121 | }
122 | 
123 | /**
124 |  * Process a single ADF node
125 |  */
126 | function processAdfNode(node: AdfNode): string {
127 | 	if (!node || !node.type) {
128 | 		return '';
129 | 	}
130 | 
131 | 	switch (node.type) {
132 | 		case 'paragraph':
133 | 			return processParagraph(node);
134 | 		case 'heading':
135 | 			return processHeading(node);
136 | 		case 'bulletList':
137 | 			return processBulletList(node);
138 | 		case 'orderedList':
139 | 			return processOrderedList(node);
140 | 		case 'listItem':
141 | 			return processListItem(node);
142 | 		case 'codeBlock':
143 | 			return processCodeBlock(node);
144 | 		case 'blockquote':
145 | 			return processBlockquote(node);
146 | 		case 'rule':
147 | 			return '---';
148 | 		case 'mediaGroup':
149 | 			return processMediaGroup(node);
150 | 		case 'media':
151 | 			return processMedia(node);
152 | 		case 'table':
153 | 			return processTable(node);
154 | 		case 'text':
155 | 			return processText(node);
156 | 		case 'mention':
157 | 			return processMention(node);
158 | 		case 'inlineCard':
159 | 			return processInlineCard(node);
160 | 		case 'emoji':
161 | 			return processEmoji(node);
162 | 		case 'date':
163 | 			return processDate(node);
164 | 		case 'status':
165 | 			return processStatus(node);
166 | 		default:
167 | 			// For unknown node types, try to process content if available
168 | 			if (node.content) {
169 | 				return processAdfContent(node.content);
170 | 			}
171 | 			return '';
172 | 	}
173 | }
174 | 
175 | /**
176 |  * Process paragraph node
177 |  */
178 | function processParagraph(node: AdfNode): string {
179 | 	if (!node.content) {
180 | 		return '';
181 | 	}
182 | 
183 | 	// Process each child node and join them with proper spacing
184 | 	return node.content
185 | 		.map((childNode, index) => {
186 | 			// Add a space between text nodes if needed
187 | 			const needsSpace =
188 | 				index > 0 &&
189 | 				childNode.type === 'text' &&
190 | 				node.content![index - 1].type === 'text' &&
191 | 				!childNode.text?.startsWith(' ') &&
192 | 				!node.content![index - 1].text?.endsWith(' ');
193 | 
194 | 			return (needsSpace ? ' ' : '') + processAdfNode(childNode);
195 | 		})
196 | 		.join('');
197 | }
198 | 
199 | /**
200 |  * Process heading node
201 |  */
202 | function processHeading(node: AdfNode): string {
203 | 	if (!node.content || !node.attrs) {
204 | 		return '';
205 | 	}
206 | 
207 | 	const level = typeof node.attrs.level === 'number' ? node.attrs.level : 1;
208 | 	const headingMarker = '#'.repeat(level);
209 | 	const content = node.content
210 | 		.map((childNode) => processAdfNode(childNode))
211 | 		.join('');
212 | 
213 | 	return `${headingMarker} ${content}`;
214 | }
215 | 
216 | /**
217 |  * Process bullet list node
218 |  */
219 | function processBulletList(node: AdfNode): string {
220 | 	if (!node.content) {
221 | 		return '';
222 | 	}
223 | 
224 | 	return node.content.map((item) => processAdfNode(item)).join('\n');
225 | }
226 | 
227 | /**
228 |  * Process ordered list node
229 |  */
230 | function processOrderedList(node: AdfNode): string {
231 | 	if (!node.content) {
232 | 		return '';
233 | 	}
234 | 
235 | 	return node.content
236 | 		.map((item, index) => {
237 | 			const processedItem = processAdfNode(item);
238 | 			// Replace the first "- " with "1. ", "2. ", etc.
239 | 			return processedItem.replace(/^- /, `${index + 1}. `);
240 | 		})
241 | 		.join('\n');
242 | }
243 | 
244 | /**
245 |  * Process list item node
246 |  */
247 | function processListItem(node: AdfNode): string {
248 | 	if (!node.content) {
249 | 		return '';
250 | 	}
251 | 
252 | 	const content = node.content
253 | 		.map((childNode) => {
254 | 			const processed = processAdfNode(childNode);
255 | 			// For nested lists, add indentation
256 | 			if (
257 | 				childNode.type === 'bulletList' ||
258 | 				childNode.type === 'orderedList'
259 | 			) {
260 | 				return processed
261 | 					.split('\n')
262 | 					.map((line) => `  ${line}`)
263 | 					.join('\n');
264 | 			}
265 | 			return processed;
266 | 		})
267 | 		.join('\n');
268 | 
269 | 	return `- ${content}`;
270 | }
271 | 
272 | /**
273 |  * Process code block node
274 |  */
275 | function processCodeBlock(node: AdfNode): string {
276 | 	if (!node.content) {
277 | 		return '```\n```';
278 | 	}
279 | 
280 | 	const language = node.attrs?.language || '';
281 | 	const code = node.content
282 | 		.map((childNode) => processAdfNode(childNode))
283 | 		.join('');
284 | 
285 | 	return `\`\`\`${language}\n${code}\n\`\`\``;
286 | }
287 | 
288 | /**
289 |  * Process blockquote node
290 |  */
291 | function processBlockquote(node: AdfNode): string {
292 | 	if (!node.content) {
293 | 		return '';
294 | 	}
295 | 
296 | 	const content = node.content
297 | 		.map((childNode) => processAdfNode(childNode))
298 | 		.join('\n\n');
299 | 
300 | 	// Add > to each line
301 | 	return content
302 | 		.split('\n')
303 | 		.map((line) => `> ${line}`)
304 | 		.join('\n');
305 | }
306 | 
307 | /**
308 |  * Process media group node
309 |  */
310 | function processMediaGroup(node: AdfNode): string {
311 | 	if (!node.content) {
312 | 		return '';
313 | 	}
314 | 
315 | 	return node.content
316 | 		.map((mediaNode) => {
317 | 			if (mediaNode.type === 'media' && mediaNode.attrs) {
318 | 				const { id, type } = mediaNode.attrs;
319 | 				if (type === 'file') {
320 | 					return `[Attachment: ${id}]`;
321 | 				} else if (type === 'link') {
322 | 					return `[External Link]`;
323 | 				}
324 | 			}
325 | 			return '';
326 | 		})
327 | 		.filter(Boolean)
328 | 		.join('\n');
329 | }
330 | 
331 | /**
332 |  * Process media node
333 |  */
334 | function processMedia(node: AdfNode): string {
335 | 	if (!node.attrs) {
336 | 		return '';
337 | 	}
338 | 
339 | 	// Handle file attachments
340 | 	if (node.attrs.type === 'file') {
341 | 		const id = node.attrs.id || '';
342 | 		const altText = node.attrs.alt ? node.attrs.alt : `Attachment: ${id}`;
343 | 		return `![${altText}](attachment:${id})`;
344 | 	}
345 | 
346 | 	// Handle external media (e.g., YouTube embeds)
347 | 	if (node.attrs.type === 'external' && node.attrs.url) {
348 | 		return `[External Media](${node.attrs.url})`;
349 | 	}
350 | 
351 | 	return '';
352 | }
353 | 
354 | /**
355 |  * Process table node
356 |  */
357 | function processTable(node: AdfNode): string {
358 | 	if (!node.content) {
359 | 		return '';
360 | 	}
361 | 
362 | 	const rows: string[][] = [];
363 | 
364 | 	// Process table rows
365 | 	node.content.forEach((row) => {
366 | 		if (row.type === 'tableRow' && row.content) {
367 | 			const cells: string[] = [];
368 | 
369 | 			row.content.forEach((cell) => {
370 | 				if (
371 | 					(cell.type === 'tableCell' ||
372 | 						cell.type === 'tableHeader') &&
373 | 					cell.content
374 | 				) {
375 | 					const cellContent = cell.content
376 | 						.map((cellNode) => processAdfNode(cellNode))
377 | 						.join('');
378 | 					cells.push(cellContent.trim());
379 | 				}
380 | 			});
381 | 
382 | 			if (cells.length > 0) {
383 | 				rows.push(cells);
384 | 			}
385 | 		}
386 | 	});
387 | 
388 | 	if (rows.length === 0) {
389 | 		return '';
390 | 	}
391 | 
392 | 	// Create markdown table
393 | 	const columnCount = Math.max(...rows.map((row) => row.length));
394 | 
395 | 	// Ensure all rows have the same number of columns
396 | 	const normalizedRows = rows.map((row) => {
397 | 		while (row.length < columnCount) {
398 | 			row.push('');
399 | 		}
400 | 		return row;
401 | 	});
402 | 
403 | 	// Create header row
404 | 	const headerRow = normalizedRows[0].map((cell) => cell || '');
405 | 
406 | 	// Create separator row
407 | 	const separatorRow = headerRow.map(() => '---');
408 | 
409 | 	// Create content rows
410 | 	const contentRows = normalizedRows.slice(1);
411 | 
412 | 	// Build the table
413 | 	const tableRows = [
414 | 		headerRow.join(' | '),
415 | 		separatorRow.join(' | '),
416 | 		...contentRows.map((row) => row.join(' | ')),
417 | 	];
418 | 
419 | 	return tableRows.join('\n');
420 | }
421 | 
422 | /**
423 |  * Process text node
424 |  */
425 | function processText(node: AdfNode): string {
426 | 	if (!node.text) {
427 | 		return '';
428 | 	}
429 | 
430 | 	let text = node.text;
431 | 
432 | 	// Apply marks if available
433 | 	if (node.marks && node.marks.length > 0) {
434 | 		// Sort marks to ensure consistent application (process links last)
435 | 		const sortedMarks = [...node.marks].sort((a, b) => {
436 | 			if (a.type === 'link') return 1;
437 | 			if (b.type === 'link') return -1;
438 | 			return 0;
439 | 		});
440 | 
441 | 		// Apply non-link marks first
442 | 		sortedMarks.forEach((mark) => {
443 | 			switch (mark.type) {
444 | 				case 'strong':
445 | 					text = `**${text}**`;
446 | 					break;
447 | 				case 'em':
448 | 					text = `*${text}*`;
449 | 					break;
450 | 				case 'code':
451 | 					text = `\`${text}\``;
452 | 					break;
453 | 				case 'strike':
454 | 					text = `~~${text}~~`;
455 | 					break;
456 | 				case 'underline':
457 | 					// Markdown doesn't support underline, use emphasis instead
458 | 					text = `_${text}_`;
459 | 					break;
460 | 				case 'textColor':
461 | 					// Ignore in Markdown (no equivalent)
462 | 					break;
463 | 				case 'superscript':
464 | 					// Some flavors of Markdown support ^superscript^
465 | 					text = `^${text}^`;
466 | 					break;
467 | 				case 'subscript':
468 | 					// Some flavors of Markdown support ~subscript~
469 | 					// but this conflicts with strikethrough
470 | 					text = `~${text}~`;
471 | 					break;
472 | 				case 'link':
473 | 					if (mark.attrs && mark.attrs.href) {
474 | 						text = `[${text}](${mark.attrs.href})`;
475 | 					}
476 | 					break;
477 | 			}
478 | 		});
479 | 	}
480 | 
481 | 	return text;
482 | }
483 | 
484 | /**
485 |  * Process inline card node (references to Jira issues, Confluence pages, etc.)
486 |  */
487 | function processInlineCard(node: AdfNode): string {
488 | 	if (!node.attrs) {
489 | 		return '[Link]';
490 | 	}
491 | 
492 | 	const url = (node.attrs.url as string) || '';
493 | 	// Extract the name/ID from the URL if possible
494 | 	const match = url.match(/\/([^/]+)$/);
495 | 	const name = match ? match[1] : 'Link';
496 | 
497 | 	return `[${name}](${url})`;
498 | }
499 | 
500 | /**
501 |  * Process emoji node
502 |  */
503 | function processEmoji(node: AdfNode): string {
504 | 	if (!node.attrs) {
505 | 		return '';
506 | 	}
507 | 
508 | 	// Return shortName if available, otherwise fallback
509 | 	return (
510 | 		(node.attrs.shortName as string) || (node.attrs.id as string) || '📝'
511 | 	);
512 | }
513 | 
514 | /**
515 |  * Process date node
516 |  */
517 | function processDate(node: AdfNode): string {
518 | 	if (!node.attrs) {
519 | 		return '';
520 | 	}
521 | 
522 | 	return (node.attrs.timestamp as string) || '';
523 | }
524 | 
525 | /**
526 |  * Process status node (status lozenges)
527 |  */
528 | function processStatus(node: AdfNode): string {
529 | 	if (!node.attrs) {
530 | 		return '[Status]';
531 | 	}
532 | 
533 | 	const text = (node.attrs.text as string) || 'Status';
534 | 	// Markdown doesn't support colored lozenges, so we use brackets
535 | 	return `[${text}]`;
536 | }
537 | 
538 | /**
539 |  * The following functions have been removed since they are not needed in Bitbucket:
540 |  * - textToAdf (removed)
541 |  * - markdownToAdf (removed)
542 |  *
543 |  * Unlike Jira, Bitbucket's API natively accepts Markdown content,
544 |  * so there's no need to convert Markdown to ADF when sending data.
545 |  * Instead, see formatter.util.ts and optimizeBitbucketMarkdown() for
546 |  * Bitbucket-specific markdown handling.
547 |  */
548 | 
```
Page 3/6FirstPrevNextLast