#
tokens: 47571/50000 24/114 files (page 2/4)
lines: off (toggle) GitHub
raw markdown copy
This is page 2 of 4. Use http://codebase.md/aashari/mcp-server-atlassian-bitbucket?lines=false&page={x} to view the full context.

# Directory Structure

```
├── .env.example
├── .github
│   ├── dependabot.yml
│   └── workflows
│       ├── ci-dependabot-auto-merge.yml
│       ├── ci-dependency-check.yml
│       └── ci-semantic-release.yml
├── .gitignore
├── .gitkeep
├── .npmignore
├── .npmrc
├── .prettierrc
├── .releaserc.json
├── .trigger-ci
├── CHANGELOG.md
├── eslint.config.mjs
├── jest.setup.js
├── package-lock.json
├── package.json
├── README.md
├── scripts
│   ├── ensure-executable.js
│   ├── package.json
│   └── update-version.js
├── src
│   ├── cli
│   │   ├── atlassian.diff.cli.ts
│   │   ├── atlassian.pullrequests.cli.test.ts
│   │   ├── atlassian.pullrequests.cli.ts
│   │   ├── atlassian.repositories.cli.test.ts
│   │   ├── atlassian.repositories.cli.ts
│   │   ├── atlassian.search.cli.test.ts
│   │   ├── atlassian.search.cli.ts
│   │   ├── atlassian.workspaces.cli.test.ts
│   │   ├── atlassian.workspaces.cli.ts
│   │   └── index.ts
│   ├── controllers
│   │   ├── atlassian.diff.controller.ts
│   │   ├── atlassian.diff.formatter.ts
│   │   ├── atlassian.pullrequests.approve.controller.ts
│   │   ├── atlassian.pullrequests.base.controller.ts
│   │   ├── atlassian.pullrequests.comments.controller.ts
│   │   ├── atlassian.pullrequests.controller.test.ts
│   │   ├── atlassian.pullrequests.controller.ts
│   │   ├── atlassian.pullrequests.create.controller.ts
│   │   ├── atlassian.pullrequests.formatter.ts
│   │   ├── atlassian.pullrequests.get.controller.ts
│   │   ├── atlassian.pullrequests.list.controller.ts
│   │   ├── atlassian.pullrequests.reject.controller.ts
│   │   ├── atlassian.pullrequests.update.controller.ts
│   │   ├── atlassian.repositories.branch.controller.ts
│   │   ├── atlassian.repositories.commit.controller.ts
│   │   ├── atlassian.repositories.content.controller.ts
│   │   ├── atlassian.repositories.controller.test.ts
│   │   ├── atlassian.repositories.details.controller.ts
│   │   ├── atlassian.repositories.formatter.ts
│   │   ├── atlassian.repositories.list.controller.ts
│   │   ├── atlassian.search.code.controller.ts
│   │   ├── atlassian.search.content.controller.ts
│   │   ├── atlassian.search.controller.test.ts
│   │   ├── atlassian.search.controller.ts
│   │   ├── atlassian.search.formatter.ts
│   │   ├── atlassian.search.pullrequests.controller.ts
│   │   ├── atlassian.search.repositories.controller.ts
│   │   ├── atlassian.workspaces.controller.test.ts
│   │   ├── atlassian.workspaces.controller.ts
│   │   └── atlassian.workspaces.formatter.ts
│   ├── index.ts
│   ├── services
│   │   ├── vendor.atlassian.pullrequests.service.ts
│   │   ├── vendor.atlassian.pullrequests.test.ts
│   │   ├── vendor.atlassian.pullrequests.types.ts
│   │   ├── vendor.atlassian.repositories.diff.service.ts
│   │   ├── vendor.atlassian.repositories.diff.types.ts
│   │   ├── vendor.atlassian.repositories.service.test.ts
│   │   ├── vendor.atlassian.repositories.service.ts
│   │   ├── vendor.atlassian.repositories.types.ts
│   │   ├── vendor.atlassian.search.service.ts
│   │   ├── vendor.atlassian.search.types.ts
│   │   ├── vendor.atlassian.workspaces.service.ts
│   │   ├── vendor.atlassian.workspaces.test.ts
│   │   └── vendor.atlassian.workspaces.types.ts
│   ├── tools
│   │   ├── atlassian.diff.tool.ts
│   │   ├── atlassian.diff.types.ts
│   │   ├── atlassian.pullrequests.tool.ts
│   │   ├── atlassian.pullrequests.types.test.ts
│   │   ├── atlassian.pullrequests.types.ts
│   │   ├── atlassian.repositories.tool.ts
│   │   ├── atlassian.repositories.types.ts
│   │   ├── atlassian.search.tool.ts
│   │   ├── atlassian.search.types.ts
│   │   ├── atlassian.workspaces.tool.ts
│   │   └── atlassian.workspaces.types.ts
│   ├── types
│   │   └── common.types.ts
│   └── utils
│       ├── adf.util.test.ts
│       ├── adf.util.ts
│       ├── atlassian.util.ts
│       ├── bitbucket-error-detection.test.ts
│       ├── cli.test.util.ts
│       ├── config.util.test.ts
│       ├── config.util.ts
│       ├── constants.util.ts
│       ├── defaults.util.ts
│       ├── diff.util.ts
│       ├── error-handler.util.test.ts
│       ├── error-handler.util.ts
│       ├── error.util.test.ts
│       ├── error.util.ts
│       ├── formatter.util.ts
│       ├── logger.util.ts
│       ├── markdown.util.test.ts
│       ├── markdown.util.ts
│       ├── pagination.util.ts
│       ├── path.util.test.ts
│       ├── path.util.ts
│       ├── query.util.ts
│       ├── shell.util.ts
│       ├── transport.util.test.ts
│       ├── transport.util.ts
│       └── workspace.util.ts
├── STYLE_GUIDE.md
└── tsconfig.json
```

# Files

--------------------------------------------------------------------------------
/src/utils/config.util.ts:
--------------------------------------------------------------------------------

```typescript
import fs from 'fs';
import path from 'path';
import { Logger } from './logger.util.js';
import dotenv from 'dotenv';
import os from 'os';

/**
 * Configuration loader that handles multiple sources with priority:
 * 1. Direct ENV pass (process.env)
 * 2. .env file in project root
 * 3. Global config file at $HOME/.mcp/configs.json
 */
class ConfigLoader {
	private packageName: string;
	private configLoaded: boolean = false;

	/**
	 * Create a new ConfigLoader instance
	 * @param packageName The package name to use for global config lookup
	 */
	constructor(packageName: string) {
		this.packageName = packageName;
	}

	/**
	 * Load configuration from all sources with proper priority
	 */
	load(): void {
		const methodLogger = Logger.forContext('utils/config.util.ts', 'load');
		if (this.configLoaded) {
			methodLogger.debug('Configuration already loaded, skipping');
			return;
		}

		methodLogger.debug('Loading configuration...');

		// Priority 3: Load from global config file
		this.loadFromGlobalConfig();

		// Priority 2: Load from .env file
		this.loadFromEnvFile();

		// Priority 1: Direct ENV pass is already in process.env
		// No need to do anything as it already has highest priority

		this.configLoaded = true;
		methodLogger.debug('Configuration loaded successfully');
	}

	/**
	 * Load configuration from .env file in project root
	 */
	private loadFromEnvFile(): void {
		const methodLogger = Logger.forContext(
			'utils/config.util.ts',
			'loadFromEnvFile',
		);
		try {
			// Use quiet mode to prevent dotenv from outputting to STDIO
			// which interferes with MCP's JSON-RPC communication
			const result = dotenv.config({ quiet: true });
			if (result.error) {
				methodLogger.debug('No .env file found or error reading it');
				return;
			}
			methodLogger.debug('Loaded configuration from .env file');
		} catch (error) {
			methodLogger.error('Error loading .env file', error);
		}
	}

	/**
	 * Load configuration from global config file at $HOME/.mcp/configs.json
	 */
	private loadFromGlobalConfig(): void {
		const methodLogger = Logger.forContext(
			'utils/config.util.ts',
			'loadFromGlobalConfig',
		);
		try {
			const homedir = os.homedir();
			const globalConfigPath = path.join(homedir, '.mcp', 'configs.json');

			if (!fs.existsSync(globalConfigPath)) {
				methodLogger.debug('Global config file not found');
				return;
			}

			const configContent = fs.readFileSync(globalConfigPath, 'utf8');
			const config = JSON.parse(configContent);

			// Determine the potential keys for the current package
			const shortKey = 'bitbucket'; // Project-specific short key
			const atlassianProductKey = 'atlassian-bitbucket'; // New supported key
			const fullPackageName = this.packageName; // e.g., '@aashari/mcp-server-atlassian-bitbucket'
			const unscopedPackageName =
				fullPackageName.split('/')[1] || fullPackageName; // e.g., 'mcp-server-atlassian-bitbucket'

			// Define the prioritized order of keys to check
			const potentialKeys = [
				shortKey,
				atlassianProductKey,
				fullPackageName,
				unscopedPackageName,
			];
			let foundConfigSection: {
				environments?: Record<string, unknown>;
			} | null = null;
			let usedKey: string | null = null;

			for (const key of potentialKeys) {
				if (
					config[key] &&
					typeof config[key] === 'object' &&
					config[key].environments
				) {
					foundConfigSection = config[key];
					usedKey = key;
					methodLogger.debug(`Found configuration using key: ${key}`);
					break; // Stop once found
				}
			}

			if (!foundConfigSection || !foundConfigSection.environments) {
				methodLogger.debug(
					`No configuration found for ${
						this.packageName
					} using keys: ${potentialKeys.join(', ')}`,
				);
				return;
			}

			const environments = foundConfigSection.environments;
			for (const [key, value] of Object.entries(environments)) {
				// Only set if not already defined in process.env
				if (process.env[key] === undefined) {
					process.env[key] = String(value);
				}
			}

			methodLogger.debug(
				`Loaded configuration from global config file using key: ${usedKey}`,
			);
		} catch (error) {
			methodLogger.error('Error loading global config file', error);
		}
	}

	/**
	 * Get a configuration value
	 * @param key The configuration key
	 * @param defaultValue The default value if the key is not found
	 * @returns The configuration value or the default value
	 */
	get(key: string, defaultValue?: string): string | undefined {
		return process.env[key] || defaultValue;
	}

	/**
	 * Get a boolean configuration value
	 * @param key The configuration key
	 * @param defaultValue The default value if the key is not found
	 * @returns The boolean configuration value or the default value
	 */
	getBoolean(key: string, defaultValue: boolean = false): boolean {
		const value = this.get(key);
		if (value === undefined) {
			return defaultValue;
		}
		return value.toLowerCase() === 'true';
	}

	/**
	 * Get a number configuration value
	 * @param key The configuration key
	 * @param defaultValue The default value if the key is not found
	 * @returns The number configuration value or the default value
	 */
	getNumber(key: string, defaultValue: number = 0): number {
		const value = this.get(key);
		if (value === undefined) {
			return defaultValue;
		}
		const parsed = parseInt(value, 10);
		return isNaN(parsed) ? defaultValue : parsed;
	}
}

// Create and export a singleton instance with the package name from package.json
export const config = new ConfigLoader(
	'@aashari/mcp-server-atlassian-bitbucket',
);

```

--------------------------------------------------------------------------------
/src/cli/atlassian.diff.cli.ts:
--------------------------------------------------------------------------------

```typescript
import { Command } from 'commander';
import { Logger } from '../utils/logger.util.js';
import { handleCliError } from '../utils/error.util.js';
import diffController from '../controllers/atlassian.diff.controller.js';

// Create a contextualized logger for this file
const cliLogger = Logger.forContext('cli/atlassian.diff.cli.ts');

// Log initialization
cliLogger.debug('Bitbucket diff CLI module initialized');

/**
 * Register diff-related CLI commands
 * @param program - Commander instance
 */
function register(program: Command) {
	const registerLogger = cliLogger.forMethod('register');
	registerLogger.debug('Registering Bitbucket Diff CLI commands...');

	// Branch diff command
	program
		.command('diff-branches')
		.description(
			'Display differences between two branches in a repository.\nIMPORTANT: The output shows changes as "destinationBranch → sourceBranch". For complete code changes (not just summary), try reversing the branch parameters if initial results show only summary.',
		)
		.option(
			'-w, --workspace-slug <workspaceSlug>',
			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account).',
		)
		.requiredOption(
			'-r, --repo-slug <repoSlug>',
			'Repository slug where the branches are located. Example: "my-repo"',
		)
		.requiredOption(
			'-s, --source-branch <sourceBranch>',
			'Name of the source branch (typically your feature branch). Example: "feature/my-feature"',
		)
		.option(
			'-d, --destination-branch <destinationBranch>',
			'Name of the destination branch (typically the main branch). Defaults to "main" if not provided.',
		)
		.option(
			'--full-diff <boolean>',
			'Whether to include the full diff in the response. Defaults to true.',
			(value) => value === 'true',
		)
		.option(
			'-l, --limit <number>',
			'Maximum number of files to show in the diff (1-100). Defaults to 25 if omitted.',
		)
		.option(
			'-p, --page <number>',
			'Page number for pagination. Starts at 1. Use with limit to paginate results.',
		)
		.option(
			'-t, --topic <boolean>',
			'Whether to treat the source ref as a topic branch. Defaults to false.',
			(value) => value === 'true',
		)
		.action(async (options) => {
			const actionLogger = cliLogger.forMethod('diff-branches');
			try {
				actionLogger.debug('Processing command options:', options);

				// Map CLI options to controller params - keep only type conversions
				const controllerOptions = {
					workspaceSlug: options.workspaceSlug,
					repoSlug: options.repoSlug,
					sourceBranch: options.sourceBranch,
					destinationBranch: options.destinationBranch,
					includeFullDiff:
						options.fullDiff !== undefined
							? options.fullDiff
							: true,
					limit: options.limit
						? parseInt(options.limit, 10)
						: undefined,
					cursor: options.page
						? parseInt(options.page, 10)
						: undefined,
					topic: options.topic,
				};

				actionLogger.debug(
					'Calling controller with parameters:',
					controllerOptions,
				);

				// Call controller directly
				const result =
					await diffController.branchDiff(controllerOptions);

				console.log(result.content);
			} catch (error) {
				actionLogger.error('Operation failed:', error);
				handleCliError(error);
			}
		});

	// Commit diff command
	program
		.command('diff-commits')
		.description(
			'Display differences between two commits in a repository.\nIMPORTANT: For proper results, the parameter order can matter. If you see "No changes detected", try reversing the commit order.',
		)
		.option(
			'-w, --workspace-slug <workspaceSlug>',
			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account).',
		)
		.requiredOption(
			'-r, --repo-slug <repoSlug>',
			'Repository slug where the commits are located. Example: "my-repo"',
		)
		.requiredOption(
			'-s, --since-commit <sinceCommit>',
			'Commit hash for the newer/later commit. Example: "a1b2c3d4"',
		)
		.requiredOption(
			'-u, --until-commit <untilCommit>',
			'Commit hash for the older/earlier commit. Example: "e5f6g7h8"',
		)
		.option(
			'--full-diff <boolean>',
			'Whether to include the full diff in the response. Defaults to true.',
			(value) => value === 'true',
		)
		.option(
			'-l, --limit <number>',
			'Maximum number of files to show in the diff (1-100). Defaults to 25 if omitted.',
		)
		.option(
			'-p, --page <number>',
			'Page number for pagination. Starts at 1. Use with limit to paginate results.',
		)
		.action(async (options) => {
			const actionLogger = cliLogger.forMethod('diff-commits');
			try {
				actionLogger.debug('Processing command options:', options);

				// Map CLI options to controller params - keep only type conversions
				const controllerOptions = {
					workspaceSlug: options.workspaceSlug,
					repoSlug: options.repoSlug,
					sinceCommit: options.sinceCommit,
					untilCommit: options.untilCommit,
					includeFullDiff:
						options.fullDiff !== undefined
							? options.fullDiff
							: true,
					limit: options.limit
						? parseInt(options.limit, 10)
						: undefined,
					cursor: options.page
						? parseInt(options.page, 10)
						: undefined,
				};

				actionLogger.debug(
					'Calling controller with parameters:',
					controllerOptions,
				);

				// Call controller directly
				const result =
					await diffController.commitDiff(controllerOptions);

				console.log(result.content);
			} catch (error) {
				actionLogger.error('Operation failed:', error);
				handleCliError(error);
			}
		});

	registerLogger.debug('CLI commands registered successfully');
}

export default { register };

```

--------------------------------------------------------------------------------
/src/utils/transport.util.test.ts:
--------------------------------------------------------------------------------

```typescript
import { getAtlassianCredentials, fetchAtlassian } from './transport.util.js';
import { config } from './config.util.js';

/**
 * Generic response type for testing
 */
interface TestResponse {
	values: Array<Record<string, unknown>>;
	next?: string;
	total?: number;
}

// NOTE: We are no longer mocking fetch or logger, using real implementations instead

describe('Transport Utility', () => {
	// Load configuration before all tests
	beforeAll(() => {
		// Load configuration from all sources
		config.load();
	});

	describe('getAtlassianCredentials', () => {
		it('should return credentials when environment variables are set', () => {
			// This test will be skipped if credentials are not available
			const credentials = getAtlassianCredentials();
			if (!credentials) {
				return; // Skip silently - no credentials available for testing
			}

			// Check if the credentials are for standard Atlassian or Bitbucket-specific
			if (credentials.useBitbucketAuth) {
				// Verify the Bitbucket-specific credentials
				expect(credentials).toHaveProperty('bitbucketUsername');
				expect(credentials).toHaveProperty('bitbucketAppPassword');
				expect(credentials).toHaveProperty('useBitbucketAuth');

				// Verify the credentials are not empty
				expect(credentials.bitbucketUsername).toBeTruthy();
				expect(credentials.bitbucketAppPassword).toBeTruthy();
				expect(credentials.useBitbucketAuth).toBe(true);
			} else {
				// Verify the standard Atlassian credentials
				expect(credentials).toHaveProperty('userEmail');
				expect(credentials).toHaveProperty('apiToken');

				// Verify the credentials are not empty
				expect(credentials.userEmail).toBeTruthy();
				expect(credentials.apiToken).toBeTruthy();
				// Note: siteName is optional for API tokens
			}
		});

		it('should return null and log a warning when environment variables are missing', () => {
			// Store original environment variables
			const originalEnv = { ...process.env };

			// Clear relevant environment variables to simulate missing credentials
			delete process.env.ATLASSIAN_SITE_NAME;
			delete process.env.ATLASSIAN_USER_EMAIL;
			delete process.env.ATLASSIAN_API_TOKEN;
			delete process.env.ATLASSIAN_BITBUCKET_USERNAME;
			delete process.env.ATLASSIAN_BITBUCKET_APP_PASSWORD;

			// Force reload configuration
			config.load();

			// Call the function
			const credentials = getAtlassianCredentials();

			// Verify the result is null
			expect(credentials).toBeNull();

			// Restore original environment
			process.env = originalEnv;

			// Reload config with original environment
			config.load();
		});
	});

	describe('fetchAtlassian', () => {
		it('should successfully fetch data from the Atlassian API', async () => {
			// This test will be skipped if credentials are not available
			const credentials = getAtlassianCredentials();
			if (!credentials) {
				return; // Skip silently - no credentials available for testing
			}

			// Make a call to a real API endpoint
			// For Bitbucket, we'll use the workspaces endpoint
			const result = await fetchAtlassian<TestResponse>(
				credentials,
				'/2.0/workspaces',
				{
					method: 'GET',
					headers: {
						'Content-Type': 'application/json',
					},
				},
			);

			// Verify the response structure from real API
			expect(result).toHaveProperty('values');
			expect(Array.isArray(result.values)).toBe(true);
			// Different property names than mocked data to match actual API response
			if (result.values.length > 0) {
				// Verify an actual workspace result
				const workspace = result.values[0];
				expect(workspace).toHaveProperty('uuid');
				expect(workspace).toHaveProperty('name');
				expect(workspace).toHaveProperty('slug');
			}
		}, 15000); // Increased timeout for real API call

		it('should handle API errors correctly', async () => {
			// This test will be skipped if credentials are not available
			const credentials = getAtlassianCredentials();
			if (!credentials) {
				return; // Skip silently - no credentials available for testing
			}

			// Call a non-existent endpoint and expect it to throw
			await expect(
				fetchAtlassian(credentials, '/2.0/non-existent-endpoint'),
			).rejects.toThrow();
		}, 15000); // Increased timeout for real API call

		it('should normalize paths that do not start with a slash', async () => {
			// This test will be skipped if credentials are not available
			const credentials = getAtlassianCredentials();
			if (!credentials) {
				return; // Skip silently - no credentials available for testing
			}

			// Call the function with a path that doesn't start with a slash
			const result = await fetchAtlassian<TestResponse>(
				credentials,
				'2.0/workspaces',
				{
					method: 'GET',
				},
			);

			// Verify the response structure from real API
			expect(result).toHaveProperty('values');
			expect(Array.isArray(result.values)).toBe(true);
		}, 15000); // Increased timeout for real API call

		it('should support custom request options', async () => {
			// This test will be skipped if credentials are not available
			const credentials = getAtlassianCredentials();
			if (!credentials) {
				return; // Skip silently - no credentials available for testing
			}

			// Custom request options with pagination
			const options = {
				method: 'GET' as const,
				headers: {
					Accept: 'application/json',
					'Content-Type': 'application/json',
				},
			};

			// Call a real endpoint with pagination parameter
			const result = await fetchAtlassian<TestResponse>(
				credentials,
				'/2.0/workspaces?pagelen=1',
				options,
			);

			// Verify the response structure from real API
			expect(result).toHaveProperty('values');
			expect(Array.isArray(result.values)).toBe(true);
			expect(result.values.length).toBeLessThanOrEqual(1); // Should respect pagelen=1
		}, 15000); // Increased timeout for real API call
	});
});

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.diff.formatter.ts:
--------------------------------------------------------------------------------

```typescript
import { DiffstatResponse } from '../services/vendor.atlassian.repositories.diff.types.js';
import {
	formatHeading,
	formatSeparator,
	formatDate,
	formatDiff,
} from '../utils/formatter.util.js';

/**
 * Format diffstat results into Markdown
 *
 * @param diffstat - Diffstat response containing file changes
 * @param baseBranchOrCommit - Name of the base branch or commit (source of the diff)
 * @param targetBranchOrCommit - Name of the target branch or commit (destination of the diff)
 * @returns Formatted Markdown string
 */
export function formatDiffstat(
	diffstat: DiffstatResponse,
	baseBranchOrCommit: string,
	targetBranchOrCommit: string,
): string {
	const lines: string[] = [];
	const files = diffstat.values || [];

	// Title section
	lines.push(
		formatHeading(
			`Diff: ${baseBranchOrCommit} → ${targetBranchOrCommit}`,
			1,
		),
	);
	lines.push('');

	if (files.length === 0) {
		lines.push(
			'*No changes detected in the diffstat response. This might occur when:*',
		);
		lines.push('- The commits or branches are identical');
		lines.push(
			'- The changes are purely structural (e.g., merge commits without content changes)',
		);
		lines.push(
			'- The parameters need to be specified in a different order',
		);
		lines.push('');
		lines.push('**Try the following:**');
		lines.push(
			'1. For branch comparisons: Reverse the source and destination branch parameters',
		);
		lines.push(
			'2. For commit comparisons: Ensure newer commit is `sinceCommit` and older commit is `untilCommit`',
		);
		lines.push(
			'3. Check that both references exist and you have access to them',
		);
		lines.push('');
		lines.push(formatSeparator());
		lines.push(`*Information retrieved at: ${formatDate(new Date())}*`);
		return lines.join('\n');
	}

	// Summary statistics
	let totalAdditions = 0;
	let totalDeletions = 0;
	let conflictedFiles = 0;

	// Collect statistics
	files.forEach((file) => {
		if (file.lines_added) totalAdditions += file.lines_added;
		if (file.lines_removed) totalDeletions += file.lines_removed;
		if (file.status === 'merge conflict') conflictedFiles++;
	});

	lines.push(formatHeading('Summary', 2));
	lines.push(
		`${files.length} file${files.length !== 1 ? 's' : ''} changed with ${totalAdditions} insertion${totalAdditions !== 1 ? 's' : ''} and ${totalDeletions} deletion${totalDeletions !== 1 ? 's' : ''}.`,
	);

	if (conflictedFiles > 0) {
		lines.push('');
		lines.push(
			`⚠️ **Merge conflicts detected in ${conflictedFiles} file${conflictedFiles !== 1 ? 's' : ''}.**`,
		);
	}

	lines.push('');

	// File changes section (limit to a reasonable number, like 20)
	const maxFilesToShow = 20;
	const hasMoreFiles = files.length > maxFilesToShow;
	const filesToDisplay = files.slice(0, maxFilesToShow);

	// File list with changes
	lines.push(formatHeading('Files Changed', 2));
	lines.push('');

	filesToDisplay.forEach((file) => {
		const changes = [];
		if (file.lines_added) changes.push(`+${file.lines_added}`);
		if (file.lines_removed) changes.push(`-${file.lines_removed}`);
		const changeStr = changes.length > 0 ? ` (${changes.join(', ')})` : '';

		// Handle potentially null old/new paths
		const filePath = file.new?.path || file.old?.path || '(unnamed file)';

		// Show path, changes, and status if it's a conflict
		let line = `- \`${filePath}\`${changeStr}`;
		if (file.status === 'merge conflict') {
			line += ' **CONFLICT**';
		}
		lines.push(line);
	});

	if (hasMoreFiles) {
		lines.push('');
		lines.push(`... and ${files.length - maxFilesToShow} more files`);
	}

	// Standard footer
	lines.push('');
	lines.push(formatSeparator());
	lines.push(`*Information retrieved at: ${formatDate(new Date())}*`);

	return lines.join('\n');
}

/**
 * Format complete diff results, including diffstat summary and raw diff
 *
 * @param diffstat - Diffstat response containing file changes
 * @param rawDiff - Raw unified diff text
 * @param baseBranchOrCommit - Name of the base branch or commit
 * @param targetBranchOrCommit - Name of the target branch or commit
 * @returns Formatted Markdown string
 */
export function formatFullDiff(
	diffstat: DiffstatResponse,
	rawDiff: string,
	baseBranchOrCommit: string,
	targetBranchOrCommit: string,
): string {
	const diffstatMd = formatDiffstat(
		diffstat,
		baseBranchOrCommit,
		targetBranchOrCommit,
	);

	// If there's a raw diff but empty diffstat, we should still show the raw diff
	// This can happen with structural changes like merges
	if (
		rawDiff &&
		rawDiff.trim() !== '' &&
		(diffstat.values || []).length === 0
	) {
		const lines = diffstatMd.split('\n');

		// Replace the "No changes detected" message with a more accurate one
		const messageStartIndex = lines.findIndex((line) =>
			line.includes('*No changes detected'),
		);
		if (messageStartIndex >= 0) {
			lines.splice(
				messageStartIndex,
				6,
				'*No file changes in diffstat but raw diff content was found. This often happens with:*',
				'- Merge commits or trivial merges',
				'- Rename-only changes without content modifications',
				'- Changes to file metadata or permissions without content changes',
				'',
				'If the diff content below is not what you expected, try reversing the parameter order:',
				'- For branch comparisons: swap source and destination branch values',
				'- For commit comparisons: swap sinceCommit and untilCommit values',
			);
		}

		// Insert section heading for the raw diff before the footer
		const separatorIndex = lines.findIndex((line) => line.includes('---'));
		if (separatorIndex >= 0) {
			lines.splice(
				separatorIndex,
				0,
				'',
				formatHeading('Raw Diff Content', 2),
				'',
			);
			lines.splice(separatorIndex + 3, 0, formatDiff(rawDiff));
		}

		return lines.join('\n');
	}

	if (!rawDiff || rawDiff.trim() === '') {
		return diffstatMd;
	}

	const lines = diffstatMd.split('\n');

	// Insert section heading for the raw diff
	// Insert before the standard footer (which is the last 2 lines)
	lines.splice(lines.length - 3, 0, '', formatHeading('Code Changes', 2), '');
	lines.splice(lines.length - 3, 0, formatDiff(rawDiff));

	return lines.join('\n');
}

```

--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------

```typescript
#!/usr/bin/env node
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import { Logger } from './utils/logger.util.js';
import { config } from './utils/config.util.js';
import { VERSION, PACKAGE_NAME } from './utils/constants.util.js';
import { runCli } from './cli/index.js';
import type { Request, Response } from 'express';
import express from 'express';
import cors from 'cors';

// Import Bitbucket-specific tools
import atlassianWorkspaces from './tools/atlassian.workspaces.tool.js';
import atlassianRepositories from './tools/atlassian.repositories.tool.js';
import atlassianPullRequests from './tools/atlassian.pullrequests.tool.js';
import atlassianSearch from './tools/atlassian.search.tool.js';
import atlassianDiff from './tools/atlassian.diff.tool.js';

// Create a contextualized logger for this file
const indexLogger = Logger.forContext('index.ts');

// Log initialization at debug level
indexLogger.debug('Bitbucket MCP server module loaded');

let serverInstance: McpServer | null = null;
let transportInstance:
	| StreamableHTTPServerTransport
	| StdioServerTransport
	| null = null;

/**
 * Start the MCP server with the specified transport mode
 *
 * @param mode The transport mode to use (stdio or http)
 * @returns Promise that resolves to the server instance when started successfully
 */
export async function startServer(
	mode: 'stdio' | 'http' = 'stdio',
): Promise<McpServer> {
	const serverLogger = Logger.forContext('index.ts', 'startServer');

	// Load configuration
	serverLogger.info('Starting MCP server initialization...');
	config.load();

	if (config.getBoolean('DEBUG')) {
		serverLogger.debug('Debug mode enabled');
	}

	serverLogger.info(`Initializing Bitbucket MCP server v${VERSION}`);
	serverInstance = new McpServer({
		name: PACKAGE_NAME,
		version: VERSION,
	});

	// Register all tools
	serverLogger.info('Registering MCP tools...');
	atlassianWorkspaces.registerTools(serverInstance);
	atlassianRepositories.registerTools(serverInstance);
	atlassianPullRequests.registerTools(serverInstance);
	atlassianSearch.registerTools(serverInstance);
	atlassianDiff.registerTools(serverInstance);
	serverLogger.info('All tools registered successfully');

	if (mode === 'stdio') {
		// STDIO Transport
		serverLogger.info('Using STDIO transport for MCP communication');
		transportInstance = new StdioServerTransport();

		try {
			await serverInstance.connect(transportInstance);
			serverLogger.info(
				'MCP server started successfully on STDIO transport',
			);
			setupGracefulShutdown();
			return serverInstance;
		} catch (err) {
			serverLogger.error(
				'Failed to start server on STDIO transport',
				err,
			);
			process.exit(1);
		}
	} else {
		// HTTP Transport with Express
		serverLogger.info(
			'Using Streamable HTTP transport for MCP communication',
		);

		const app = express();
		app.use(cors());
		app.use(express.json());

		const mcpEndpoint = '/mcp';
		serverLogger.debug(`MCP endpoint: ${mcpEndpoint}`);

		// Create transport instance
		const transport = new StreamableHTTPServerTransport({
			sessionIdGenerator: undefined,
		});

		// Connect server to transport
		await serverInstance.connect(transport);
		transportInstance = transport;

		// Handle all MCP requests
		app.all(mcpEndpoint, (req: Request, res: Response) => {
			transport
				.handleRequest(req, res, req.body)
				.catch((err: unknown) => {
					serverLogger.error('Error in transport.handleRequest', err);
					if (!res.headersSent) {
						res.status(500).json({
							error: 'Internal Server Error',
						});
					}
				});
		});

		// Health check endpoint
		app.get('/', (_req: Request, res: Response) => {
			res.send(`Bitbucket MCP Server v${VERSION} is running`);
		});

		// Start HTTP server
		const PORT = Number(process.env.PORT ?? 3000);
		await new Promise<void>((resolve) => {
			app.listen(PORT, () => {
				serverLogger.info(
					`HTTP transport listening on http://localhost:${PORT}${mcpEndpoint}`,
				);
				resolve();
			});
		});

		setupGracefulShutdown();
		return serverInstance;
	}
}

/**
 * Main entry point - this will run when executed directly
 * Determines whether to run in CLI or server mode based on command-line arguments
 */
async function main() {
	const mainLogger = Logger.forContext('index.ts', 'main');

	// Load configuration
	config.load();

	// CLI mode - if any arguments are provided
	if (process.argv.length > 2) {
		mainLogger.info('Starting in CLI mode');
		await runCli(process.argv.slice(2));
		mainLogger.info('CLI execution completed');
		return;
	}

	// Server mode - determine transport
	const transportMode = (process.env.TRANSPORT_MODE || 'stdio').toLowerCase();
	let mode: 'http' | 'stdio';

	if (transportMode === 'stdio') {
		mode = 'stdio';
	} else if (transportMode === 'http') {
		mode = 'http';
	} else {
		mainLogger.warn(
			`Unknown TRANSPORT_MODE "${transportMode}", defaulting to stdio`,
		);
		mode = 'stdio';
	}

	mainLogger.info(`Starting server with ${mode.toUpperCase()} transport`);
	await startServer(mode);
	mainLogger.info('Server is now running');
}

/**
 * Set up graceful shutdown handlers for the server
 */
function setupGracefulShutdown() {
	const shutdownLogger = Logger.forContext('index.ts', 'shutdown');

	const shutdown = async () => {
		try {
			shutdownLogger.info('Shutting down gracefully...');

			if (
				transportInstance &&
				'close' in transportInstance &&
				typeof transportInstance.close === 'function'
			) {
				await transportInstance.close();
			}

			if (serverInstance && typeof serverInstance.close === 'function') {
				await serverInstance.close();
			}

			process.exit(0);
		} catch (err) {
			shutdownLogger.error('Error during shutdown', err);
			process.exit(1);
		}
	};

	['SIGINT', 'SIGTERM'].forEach((signal) => {
		process.on(signal as NodeJS.Signals, shutdown);
	});
}

// If this file is being executed directly (not imported), run the main function
if (require.main === module) {
	main().catch((err) => {
		indexLogger.error('Unhandled error in main process', err);
		process.exit(1);
	});
}

```

--------------------------------------------------------------------------------
/src/services/vendor.atlassian.workspaces.service.ts:
--------------------------------------------------------------------------------

```typescript
import { z } from 'zod';
import {
	createAuthMissingError,
	createApiError,
	McpError,
} from '../utils/error.util.js';
import { Logger } from '../utils/logger.util.js';
import {
	fetchAtlassian,
	getAtlassianCredentials,
} from '../utils/transport.util.js';
import {
	WorkspaceDetailedSchema,
	WorkspacePermissionsResponseSchema,
	ListWorkspacesParamsSchema,
	type ListWorkspacesParams,
} from './vendor.atlassian.workspaces.types.js';

/**
 * Base API path for Bitbucket REST API v2
 * @see https://developer.atlassian.com/cloud/bitbucket/rest/api-group-workspaces/
 * @constant {string}
 */
const API_PATH = '/2.0';

/**
 * @namespace VendorAtlassianWorkspacesService
 * @description Service for interacting with Bitbucket Workspaces API.
 * Provides methods for listing workspaces and retrieving workspace details.
 * All methods require valid Atlassian credentials configured in the environment.
 */

// Create a contextualized logger for this file
const serviceLogger = Logger.forContext(
	'services/vendor.atlassian.workspaces.service.ts',
);

// Log service initialization
serviceLogger.debug('Bitbucket workspaces service initialized');

/**
 * List Bitbucket workspaces with optional filtering and pagination
 *
 * Retrieves a list of workspaces from Bitbucket with support for various filters
 * and pagination options.
 *
 * NOTE: The /2.0/user/permissions/workspaces endpoint does not support sorting,
 * despite the ListWorkspacesParams type including a sort parameter.
 *
 * @async
 * @memberof VendorAtlassianWorkspacesService
 * @param {ListWorkspacesParams} [params={}] - Optional parameters for customizing the request
 * @param {string} [params.q] - Filter by workspace name
 * @param {number} [params.page] - Page number
 * @param {number} [params.pagelen] - Number of items per page
 * @returns {Promise<z.infer<typeof WorkspacePermissionsResponseSchema>>} Promise containing the validated workspaces response
 * @throws {McpError} If validation fails, credentials are missing, or API request fails
 * @example
 * // List workspaces with pagination
 * const response = await list({
 *   pagelen: 10
 * });
 */
async function list(
	params: ListWorkspacesParams = {},
): Promise<z.infer<typeof WorkspacePermissionsResponseSchema>> {
	const methodLogger = Logger.forContext(
		'services/vendor.atlassian.workspaces.service.ts',
		'list',
	);
	methodLogger.debug('Listing Bitbucket workspaces with params:', params);

	// Validate params with Zod
	try {
		ListWorkspacesParamsSchema.parse(params);
	} catch (error) {
		if (error instanceof z.ZodError) {
			methodLogger.error(
				'Invalid parameters provided to list workspaces:',
				error.format(),
			);
			throw createApiError(
				`Invalid parameters for listing workspaces: ${error.issues.map((e) => e.message).join(', ')}`,
				400,
				error,
			);
		}
		throw error;
	}

	const credentials = getAtlassianCredentials();
	if (!credentials) {
		throw createAuthMissingError(
			'Atlassian credentials are required for this operation',
		);
	}

	// Build query parameters
	const queryParams = new URLSearchParams();

	// Add optional query parameters if provided
	// NOTE: Sort is intentionally not included as the /2.0/user/permissions/workspaces endpoint
	// does not support sorting on any field
	if (params.q) {
		queryParams.set('q', params.q);
	}
	if (params.pagelen) {
		queryParams.set('pagelen', params.pagelen.toString());
	}
	if (params.page) {
		queryParams.set('page', params.page.toString());
	}

	const queryString = queryParams.toString()
		? `?${queryParams.toString()}`
		: '';
	const path = `${API_PATH}/user/permissions/workspaces${queryString}`;

	methodLogger.debug(`Sending request to: ${path}`);
	try {
		const rawData = await fetchAtlassian(credentials, path);
		// Validate response with Zod schema
		try {
			const validatedData =
				WorkspacePermissionsResponseSchema.parse(rawData);
			return validatedData;
		} catch (error) {
			if (error instanceof z.ZodError) {
				methodLogger.error(
					'Invalid response from Bitbucket API:',
					error.format(),
				);
				throw createApiError(
					`Invalid response format from Bitbucket API for workspace list: ${error.message}`,
					500,
					error,
				);
			}
			throw error;
		}
	} catch (error) {
		if (error instanceof McpError) {
			throw error;
		}
		throw createApiError(
			`Failed to list workspaces: ${error instanceof Error ? error.message : String(error)}`,
			500,
			error,
		);
	}
}

/**
 * Get detailed information about a specific Bitbucket workspace
 *
 * Retrieves comprehensive details about a single workspace.
 *
 * @async
 * @memberof VendorAtlassianWorkspacesService
 * @param {string} workspace - The workspace slug
 * @returns {Promise<z.infer<typeof WorkspaceDetailedSchema>>} Promise containing the validated workspace information
 * @throws {McpError} If validation fails, credentials are missing, or API request fails
 * @example
 * // Get workspace details
 * const workspace = await get('my-workspace');
 */
async function get(
	workspace: string,
): Promise<z.infer<typeof WorkspaceDetailedSchema>> {
	const methodLogger = Logger.forContext(
		'services/vendor.atlassian.workspaces.service.ts',
		'get',
	);
	methodLogger.debug(`Getting Bitbucket workspace with slug: ${workspace}`);

	const credentials = getAtlassianCredentials();
	if (!credentials) {
		throw createAuthMissingError(
			'Atlassian credentials are required for this operation',
		);
	}

	// Currently no query parameters for workspace details API
	const path = `${API_PATH}/workspaces/${workspace}`;

	methodLogger.debug(`Sending request to: ${path}`);
	try {
		const rawData = await fetchAtlassian(credentials, path);
		// Validate response with Zod schema
		try {
			const validatedData = WorkspaceDetailedSchema.parse(rawData);
			return validatedData;
		} catch (error) {
			if (error instanceof z.ZodError) {
				methodLogger.error(
					'Invalid response from Bitbucket API:',
					error.format(),
				);
				throw createApiError(
					`Invalid response format from Bitbucket API for workspace details: ${error.message}`,
					500,
					error,
				);
			}
			throw error;
		}
	} catch (error) {
		if (error instanceof McpError) {
			throw error;
		}
		throw createApiError(
			`Failed to get workspace details: ${error instanceof Error ? error.message : String(error)}`,
			500,
			error,
		);
	}
}

export default { list, get };

```

--------------------------------------------------------------------------------
/src/utils/error.util.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, expect, test } from '@jest/globals';
import {
	ErrorType,
	McpError,
	createApiError,
	createAuthMissingError,
	createAuthInvalidError,
	createUnexpectedError,
	ensureMcpError,
	formatErrorForMcpTool,
	formatErrorForMcpResource,
	getDeepOriginalError,
} from './error.util.js';

describe('Error Utilities', () => {
	describe('Error creation functions', () => {
		test('createAuthMissingError creates an error with AUTH_MISSING type', () => {
			const error = createAuthMissingError('Missing credentials');
			expect(error).toBeInstanceOf(McpError);
			expect(error.type).toBe(ErrorType.AUTH_MISSING);
			expect(error.message).toBe('Missing credentials');
			expect(error.statusCode).toBeUndefined();
		});

		test('createAuthInvalidError creates an error with AUTH_INVALID type and 401 status', () => {
			const error = createAuthInvalidError('Invalid token');
			expect(error).toBeInstanceOf(McpError);
			expect(error.type).toBe(ErrorType.AUTH_INVALID);
			expect(error.message).toBe('Invalid token');
			expect(error.statusCode).toBe(401);
		});

		test('createApiError creates an error with API_ERROR type and specified status', () => {
			const error = createApiError('Not found', 404, {
				details: 'Resource missing',
			});
			expect(error).toBeInstanceOf(McpError);
			expect(error.type).toBe(ErrorType.API_ERROR);
			expect(error.message).toBe('Not found');
			expect(error.statusCode).toBe(404);
			expect(error.originalError).toEqual({
				details: 'Resource missing',
			});
		});

		test('createUnexpectedError creates an error with UNEXPECTED_ERROR type', () => {
			const originalError = new Error('Original error');
			const error = createUnexpectedError(
				'Something went wrong',
				originalError,
			);
			expect(error).toBeInstanceOf(McpError);
			expect(error.type).toBe(ErrorType.UNEXPECTED_ERROR);
			expect(error.message).toBe('Something went wrong');
			expect(error.statusCode).toBeUndefined();
			expect(error.originalError).toBe(originalError);
		});
	});

	describe('ensureMcpError function', () => {
		test('returns the error if it is already an McpError', () => {
			const error = createApiError('API error', 500);
			expect(ensureMcpError(error)).toBe(error);
		});

		test('wraps a standard Error with McpError', () => {
			const stdError = new Error('Standard error');
			const mcpError = ensureMcpError(stdError);
			expect(mcpError).toBeInstanceOf(McpError);
			expect(mcpError.message).toBe('Standard error');
			expect(mcpError.type).toBe(ErrorType.UNEXPECTED_ERROR);
			expect(mcpError.originalError).toBe(stdError);
		});

		test('wraps a string with McpError', () => {
			const mcpError = ensureMcpError('Error message');
			expect(mcpError).toBeInstanceOf(McpError);
			expect(mcpError.message).toBe('Error message');
			expect(mcpError.type).toBe(ErrorType.UNEXPECTED_ERROR);
		});

		test('wraps other types with McpError', () => {
			const mcpError = ensureMcpError({ message: 'Object error' });
			expect(mcpError).toBeInstanceOf(McpError);
			expect(mcpError.message).toBe('[object Object]');
			expect(mcpError.type).toBe(ErrorType.UNEXPECTED_ERROR);
		});
	});

	describe('getDeepOriginalError function', () => {
		test('returns the deepest error in a chain', () => {
			const deepestError = { message: 'Root cause' };
			const level3 = createApiError('Level 3', 500, deepestError);
			const level2 = createApiError('Level 2', 500, level3);
			const level1 = createApiError('Level 1', 500, level2);

			expect(getDeepOriginalError(level1)).toEqual(deepestError);
		});

		test('handles non-McpError values', () => {
			const originalValue = 'Original error text';
			expect(getDeepOriginalError(originalValue)).toBe(originalValue);
		});

		test('stops traversing at maximum depth', () => {
			// Create a circular error chain that would cause infinite recursion
			const circular1: any = new McpError(
				'Circular 1',
				ErrorType.API_ERROR,
			);
			const circular2: any = new McpError(
				'Circular 2',
				ErrorType.API_ERROR,
			);
			circular1.originalError = circular2;
			circular2.originalError = circular1;

			// Should not cause infinite recursion
			const result = getDeepOriginalError(circular1);

			// Expect either circular1 or circular2 depending on max depth
			expect([circular1, circular2]).toContain(result);
		});
	});

	describe('formatErrorForMcpTool function', () => {
		test('formats an McpError for MCP tool response', () => {
			const originalError = {
				code: 'NOT_FOUND',
				message: 'Repository does not exist',
			};
			const error = createApiError(
				'Resource not found',
				404,
				originalError,
			);

			const formatted = formatErrorForMcpTool(error);

			expect(formatted).toHaveProperty('content');
			expect(formatted.content[0].type).toBe('text');
			expect(formatted.content[0].text).toBe('Error: Resource not found');

			expect(formatted).toHaveProperty('metadata');
			expect(formatted.metadata?.errorType).toBe(ErrorType.API_ERROR);
			expect(formatted.metadata?.statusCode).toBe(404);
			expect(formatted.metadata?.errorDetails).toEqual(originalError);
		});

		test('formats a non-McpError for MCP tool response', () => {
			const error = new Error('Standard error');

			const formatted = formatErrorForMcpTool(error);

			expect(formatted).toHaveProperty('content');
			expect(formatted.content[0].type).toBe('text');
			expect(formatted.content[0].text).toBe('Error: Standard error');

			expect(formatted).toHaveProperty('metadata');
			expect(formatted.metadata?.errorType).toBe(
				ErrorType.UNEXPECTED_ERROR,
			);
		});

		test('extracts detailed error information from nested errors', () => {
			const deepError = {
				message: 'API quota exceeded',
				type: 'RateLimitError',
			};
			const midError = createApiError(
				'Rate limit exceeded',
				429,
				deepError,
			);
			const topError = createApiError('API error', 429, midError);

			const formatted = formatErrorForMcpTool(topError);

			expect(formatted.content[0].text).toBe('Error: API error');
			expect(formatted.metadata?.errorDetails).toEqual(deepError);
		});
	});

	describe('formatErrorForMcpResource', () => {
		it('should format an error for MCP resource response', () => {
			const error = createApiError('API error');
			const response = formatErrorForMcpResource(error, 'test://uri');

			expect(response).toHaveProperty('contents');
			expect(response.contents).toHaveLength(1);
			expect(response.contents[0]).toHaveProperty('uri', 'test://uri');
			expect(response.contents[0]).toHaveProperty(
				'text',
				'Error: API error',
			);
			expect(response.contents[0]).toHaveProperty(
				'mimeType',
				'text/plain',
			);
			expect(response.contents[0]).toHaveProperty(
				'description',
				'Error: API_ERROR',
			);
		});
	});
});

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.search.formatter.ts:
--------------------------------------------------------------------------------

```typescript
import { ContentType } from '../utils/atlassian.util.js';
import { CodeSearchResult } from '../services/vendor.atlassian.search.service.js';
import {
	formatSeparator,
	formatDate,
	formatUrl,
} from '../utils/formatter.util.js';
import path from 'path';
import { getContentTypeDisplay } from '../utils/atlassian.util.js';

/**
 * Try to guess the language from the file path
 */
function getLanguageHint(filePath: string): string {
	const ext = path.extname(filePath).toLowerCase();
	const langMap: Record<string, string> = {
		'.js': 'javascript',
		'.jsx': 'jsx',
		'.ts': 'typescript',
		'.tsx': 'tsx',
		'.py': 'python',
		'.java': 'java',
		'.rb': 'ruby',
		'.php': 'php',
		'.cs': 'csharp',
		'.go': 'go',
		'.rs': 'rust',
		'.c': 'c',
		'.cpp': 'cpp',
		'.h': 'c',
		'.hpp': 'cpp',
		'.tf': 'terraform',
		'.hcl': 'hcl',
		'.sh': 'bash',
		'.zsh': 'zsh',
		'.json': 'json',
		'.yaml': 'yaml',
		'.yml': 'yaml',
		'.xml': 'xml',
		'.md': 'markdown',
		'.sql': 'sql',
		'.dockerfile': 'dockerfile',
		dockerfile: 'dockerfile',
		'.gitignore': 'gitignore',
	};
	return langMap[ext] || '';
}

/**
 * Format a single code search result into markdown
 *
 * @param result The code search result to format
 * @returns Formatted markdown string
 */
function formatCodeSearchResult(result: CodeSearchResult): string {
	// Format the file path - No highlighting needed here
	const filePath = result.file.path || 'Unknown File'; // <-- Use direct path

	// Fix the link text
	const fileLink = result.file.links?.self?.href
		? formatUrl(result.file.links.self.href, filePath) // Use filePath for link text
		: filePath;

	// Build markdown output
	let markdown = `### ${fileLink}\n\n`; // Use fixed fileLink

	// Add match summary
	markdown += `${result.content_match_count} ${
		result.content_match_count === 1 ? 'match' : 'matches'
	} found\n\n`;

	// Get language hint for code block
	const langHint = getLanguageHint(filePath);
	markdown += '```' + langHint + '\n'; // Add language hint

	// Process each content match
	result.content_matches.forEach((contentMatch) => {
		// Process each line in the content match
		contentMatch.lines.forEach((line) => {
			// Add line number
			markdown += `${line.line}: `;

			// Process segments (some may be highlighted matches)
			if (line.segments.length) {
				line.segments.forEach((segment) => {
					// Use standard bold markdown for highlighting
					markdown += segment.match
						? `\\\`${segment.text}\\\`` // <-- Changed highlighting to backticks
						: segment.text;
				});
			}

			markdown += '\n';
		});

		// Add space between match groups only if there are multiple lines shown
		if (contentMatch.lines.length > 1) {
			markdown += '\n';
		}
	});

	markdown += '```\n\n';

	return markdown;
}

/**
 * Format code search results into markdown
 *
 * @param response The code search response from the API
 * @returns Markdown formatted string of code search results
 */
export function formatCodeSearchResults(searchResponse: {
	values?: CodeSearchResult[];
	size: number;
}): string {
	const results = searchResponse.values || [];

	if (!results || results.length === 0) {
		// Add standard footer even for empty state
		return (
			'**No code matches found.**\n\n' +
			'\n\n' +
			formatSeparator() +
			'\n' +
			`*Information retrieved at: ${formatDate(new Date())}*`
		);
	}

	// Start with a summary
	let markdown = `## Code Search Results\n\nFound ${searchResponse.size} matches for the code search query.\n\n`;

	// Format each result
	results.forEach((result: CodeSearchResult) => {
		markdown += formatCodeSearchResult(result);
	});

	// Add standard footer with timestamp
	markdown += '\n\n' + formatSeparator();
	markdown += `\n*Information retrieved at: ${formatDate(new Date())}*`;

	return markdown;
}

/**
 * Format content search results into markdown
 *
 * @param response The content search response from the API
 * @param contentType Optional content type filter that was applied
 * @returns Markdown formatted string of content search results
 */
export function formatContentSearchResults(
	response: { values?: unknown[]; size: number },
	contentType?: ContentType,
): string {
	const results = response.values || [];

	if (!results || results.length === 0) {
		// Add standard footer even for empty state
		return (
			'**No content matches found.**\n\n' +
			'\n\n' +
			formatSeparator() +
			'\n' +
			`*Information retrieved at: ${formatDate(new Date())}*`
		);
	}

	// Start with a summary
	const typeStr = contentType
		? getContentTypeDisplay(contentType)
		: 'Content';
	let markdown = `## ${typeStr} Search Results\n\nFound ${response.size} matches for the content search query.\n\n`;

	// Format each result - this is generic as content results can vary widely
	results.forEach((result) => {
		// We need to handle result as a generic object since content types vary
		const typedResult = result as Record<string, unknown>;

		// Try to determine the type from the result
		const type = (typedResult.type as string) || 'Unknown';

		// Try to get a title/name
		let title = 'Untitled';
		if (typedResult.title) {
			title = String(typedResult.title);
		} else if (typedResult.name) {
			title = String(typedResult.name);
		} else if (typedResult.summary) {
			const summary = String(typedResult.summary);
			title = summary.slice(0, 80) + (summary.length > 80 ? '...' : '');
		}

		// Try to get a link
		let link = '';
		const links = typedResult.links as
			| Record<string, { href?: string }>
			| undefined;
		if (links?.html?.href) {
			link = links.html.href;
		} else if (links?.self?.href) {
			link = links.self.href;
		}

		markdown += '### ';
		if (link) {
			markdown += formatUrl(link, title);
		} else {
			markdown += title;
		}
		markdown += '\n\n';

		// Add type information
		markdown += `**Type**: ${type}\n`;

		// Add update/created date if available
		if (typedResult.updated_on) {
			markdown += `**Updated**: ${formatDate(typedResult.updated_on as string | Date)}\n`;
		} else if (typedResult.created_on) {
			markdown += `**Created**: ${formatDate(typedResult.created_on as string | Date)}\n`;
		}

		// Add description/content if available (limited to preserve readability)
		if (typedResult.description) {
			const description = String(typedResult.description);
			const limitedDesc =
				description.length > 500
					? description.slice(0, 500) + '...'
					: description;
			markdown += `\n${limitedDesc}\n\n`;
		} else if (typedResult.content) {
			const content = String(typedResult.content);
			const limitedContent =
				content.length > 500 ? content.slice(0, 500) + '...' : content;
			markdown += `\n${limitedContent}\n\n`;
		}

		markdown += '\n';
	});

	// Add standard footer with timestamp
	markdown += '\n' + formatSeparator();
	markdown += `\n*Information retrieved at: ${formatDate(new Date())}*`;

	return markdown;
}

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.repositories.formatter.ts:
--------------------------------------------------------------------------------

```typescript
import {
	Repository,
	RepositoriesResponse,
	PaginatedCommits,
	Commit,
} from '../services/vendor.atlassian.repositories.types.js';
import { PullRequestsResponse } from '../services/vendor.atlassian.pullrequests.types.js';
import {
	formatUrl,
	formatHeading,
	formatBulletList,
	formatSeparator,
	formatNumberedList,
	formatDate,
} from '../utils/formatter.util.js';

/**
 * Format a list of repositories for display
 * @param repositoriesData - Raw repositories data from the API
 * @returns Formatted string with repositories information in markdown format
 */
export function formatRepositoriesList(
	repositoriesData: RepositoriesResponse,
): string {
	const repositories = repositoriesData.values || [];

	if (repositories.length === 0) {
		return 'No repositories found matching your criteria.';
	}

	const lines: string[] = [formatHeading('Bitbucket Repositories', 1), ''];

	// Format each repository with its details
	const formattedList = formatNumberedList(repositories, (repo, _index) => {
		const itemLines: string[] = [];
		itemLines.push(formatHeading(repo.name, 2));

		// Basic information
		const properties: Record<string, unknown> = {
			Name: repo.name,
			'Full Name': repo.full_name,
			Owner:
				repo.owner?.display_name || repo.owner?.username || 'Unknown',
			Description: repo.description || 'No description provided',
			'Project Key': repo.project?.key || 'N/A',
			Private: repo.is_private ? 'Yes' : 'No',
			Created: repo.created_on ? formatDate(repo.created_on) : 'N/A',
			Updated: repo.updated_on ? formatDate(repo.updated_on) : 'N/A',
			URL: repo.links?.html?.href
				? formatUrl(repo.links.html.href, repo.full_name)
				: 'N/A',
		};

		// Format as a bullet list
		itemLines.push(formatBulletList(properties, (key) => key));

		return itemLines.join('\n');
	});

	lines.push(formattedList);

	// Add standard footer with timestamp
	lines.push('\n\n' + formatSeparator());
	lines.push(`*Information retrieved at: ${formatDate(new Date())}*`);

	return lines.join('\n');
}

/**
 * Format detailed repository information for display
 * @param repositoryData - Raw repository data from the API
 * @param pullRequestsData - Optional pull requests data for this repository
 * @returns Formatted string with repository details in markdown format
 */
export function formatRepositoryDetails(
	repositoryData: Repository,
	pullRequestsData?: PullRequestsResponse | null,
): string {
	// Create URL
	const repoUrl = repositoryData.links?.html?.href || '';

	const lines: string[] = [
		formatHeading(`Repository: ${repositoryData.name}`, 1),
		'',
		`> A ${repositoryData.is_private ? 'private' : 'public'} repository in the \`${repositoryData.full_name}\` workspace.`,
		'',
		formatHeading('Basic Information', 2),
	];

	// Format basic information as a bullet list
	const basicProperties: Record<string, unknown> = {
		Name: repositoryData.name,
		'Full Name': repositoryData.full_name,
		UUID: repositoryData.uuid,
		Description: repositoryData.description || 'No description provided',
		Language: repositoryData.language || 'Not specified',
		'Main Branch': repositoryData.mainbranch?.name || 'N/A',
		Private: repositoryData.is_private ? 'Yes' : 'No',
		Size: repositoryData.size
			? `${(repositoryData.size / 1024).toFixed(2)} KB`
			: 'Unknown',
		'Created On': repositoryData.created_on
			? formatDate(repositoryData.created_on)
			: 'N/A',
		'Updated On': repositoryData.updated_on
			? formatDate(repositoryData.updated_on)
			: 'N/A',
	};

	lines.push(formatBulletList(basicProperties, (key) => key));

	// Owner information
	if (repositoryData.owner) {
		lines.push('');
		lines.push(formatHeading('Owner', 2));

		const ownerProperties: Record<string, unknown> = {
			Name:
				repositoryData.owner.display_name ||
				repositoryData.owner.username ||
				'Unknown',
			Type: repositoryData.owner.type || 'Not specified',
		};

		lines.push(formatBulletList(ownerProperties, (key) => key));
	}

	// Links section
	lines.push('');
	lines.push(formatHeading('Links', 2));

	if (repoUrl) {
		lines.push(`- ${formatUrl(repoUrl, 'Open in Bitbucket')}`);
	}

	// Add recent pull requests section if available
	if (
		pullRequestsData &&
		pullRequestsData.values &&
		pullRequestsData.values.length > 0
	) {
		lines.push('');
		lines.push(formatHeading('Recent Pull Requests', 2));

		const prList = pullRequestsData.values.slice(0, 25); // Ensure max 25
		const formattedPrList = formatNumberedList(prList, (pr) => {
			return `**#${pr.id}**: [${pr.title}](${pr.links.html?.href || '#'}) - ${pr.state} by ${pr.author.display_name || 'Unknown'} (${formatDate(pr.updated_on)})`;
		});

		lines.push(formattedPrList);

		if (repoUrl) {
			lines.push(
				`*View all pull requests in Bitbucket: [${repositoryData.full_name}/pull-requests](${repoUrl}/pull-requests)*`,
			);
		}
	} else {
		// Add the section even if no PRs are available
		lines.push('');
		lines.push(formatHeading('Recent Pull Requests', 2));
		lines.push('No open pull requests found for this repository.');

		if (repoUrl) {
			lines.push(
				`*View all pull requests in Bitbucket: [${repositoryData.full_name}/pull-requests](${repoUrl}/pull-requests)*`,
			);
		}
	}

	// Add standard footer with timestamp
	lines.push('\n\n' + formatSeparator());
	lines.push(`*Information retrieved at: ${formatDate(new Date())}*`);

	// Optionally keep the direct link
	if (repoUrl) {
		lines.push(`*View this repository in Bitbucket: ${repoUrl}*`);
	}

	return lines.join('\n');
}

/**
 * Format commit history for display.
 * @param commitsData - Raw paginated commits data from the API.
 * @param options - Filtering options used to retrieve the history.
 * @returns Formatted string with commit history in markdown format.
 */
export function formatCommitHistory(
	commitsData: PaginatedCommits,
	options: { revision?: string; path?: string } = {},
): string {
	const commits = commitsData.values || [];

	if (commits.length === 0) {
		return 'No commits found matching your criteria.';
	}

	const headerParts = ['Commit History'];
	if (options.revision) {
		headerParts.push(`for revision \`${options.revision}\``);
	}
	if (options.path) {
		headerParts.push(`on path \`${options.path}\``);
	}

	const lines: string[] = [formatHeading(headerParts.join(' '), 1), ''];

	const formattedList = formatNumberedList(commits, (commit: Commit) => {
		const commitLines: string[] = [];
		const author =
			commit.author?.user?.display_name ||
			commit.author?.raw ||
			'Unknown';
		const commitUrl = commit.links?.html?.href;
		const shortHash = commit.hash.substring(0, 7);

		// Header: Hash (linked) - Date
		commitLines.push(
			`**${commitUrl ? formatUrl(commitUrl, shortHash) : shortHash}** - ${formatDate(commit.date)}`,
		);

		// Author
		commitLines.push(` Author: ${author}`);

		// Message (indented blockquote)
		const message = commit.message.trim().replace(/\n/g, '\n > ');
		commitLines.push(' >');
		commitLines.push(` > ${message}`);

		return commitLines.join('\n');
	});

	lines.push(formattedList);

	// Add standard footer with timestamp
	lines.push('\n\n' + formatSeparator());
	lines.push(`*Information retrieved at: ${formatDate(new Date())}*`);

	return lines.join('\n');
}

```

--------------------------------------------------------------------------------
/src/utils/pagination.util.ts:
--------------------------------------------------------------------------------

```typescript
import { Logger } from './logger.util.js';
import { DATA_LIMITS } from './constants.util.js';
import { ResponsePagination } from '../types/common.types.js';

/**
 * Represents the possible pagination types.
 */
export enum PaginationType {
	CURSOR = 'cursor', // Confluence, Bitbucket (some endpoints)
	OFFSET = 'offset', // Jira
	PAGE = 'page', // Bitbucket (most endpoints)
}

/**
 * Interface representing the common structure of paginated data from APIs.
 * This union type covers properties used by offset, cursor, and page-based pagination.
 */
interface PaginationData {
	// Shared
	results?: unknown[];
	values?: unknown[];
	count?: number;
	size?: number; // Total count in Bitbucket page responses
	hasMore?: boolean;
	_links?: { next?: string }; // Confluence cursor
	// Offset-based (Jira)
	startAt?: number;
	maxResults?: number;
	total?: number;
	nextPage?: string; // Alternative next indicator for offset
	// Page-based (Bitbucket)
	page?: number;
	pagelen?: number;
	next?: string; // Bitbucket page URL
}

/**
 * Extract pagination information from API response
 * @param data The API response containing pagination information
 * @param paginationType The type of pagination mechanism used
 * @returns Object with nextCursor, hasMore, and count properties
 */
export function extractPaginationInfo<T extends Partial<PaginationData>>(
	data: T,
	paginationType: PaginationType,
): ResponsePagination | undefined {
	if (!data) {
		return undefined;
	}

	let pagination: ResponsePagination | undefined;
	const methodLogger = Logger.forContext(
		'utils/pagination.util.ts',
		'extractPaginationInfo',
	);

	switch (paginationType) {
		case PaginationType.PAGE: {
			// Bitbucket page-based pagination (page, pagelen, size, next)
			if (data.page !== undefined && data.pagelen !== undefined) {
				const hasMore = !!data.next;
				let nextCursorValue: string | undefined = undefined;

				if (hasMore) {
					try {
						// First attempt to parse the full URL if it looks like one
						if (
							typeof data.next === 'string' &&
							data.next.includes('://')
						) {
							const nextUrl = new URL(data.next);
							nextCursorValue =
								nextUrl.searchParams.get('page') || undefined;
							methodLogger.debug(
								`Successfully extracted page from URL: ${nextCursorValue}`,
							);
						} else if (data.next === 'available') {
							// Handle the 'available' placeholder used in some transformedResponses
							nextCursorValue = String(Number(data.page) + 1);
							methodLogger.debug(
								`Using calculated next page from 'available': ${nextCursorValue}`,
							);
						} else if (typeof data.next === 'string') {
							// Try to use data.next directly if it's not a URL but still a string
							nextCursorValue = data.next;
							methodLogger.debug(
								`Using next value directly: ${nextCursorValue}`,
							);
						}
					} catch (e) {
						// If URL parsing fails, calculate the next page based on current page
						nextCursorValue = String(Number(data.page) + 1);
						methodLogger.debug(
							`Calculated next page after URL parsing error: ${nextCursorValue}`,
						);
						methodLogger.warn(
							`Failed to parse next URL: ${data.next}`,
							e,
						);
					}
				}

				pagination = {
					hasMore,
					count: data.values?.length ?? 0,
					page: data.page,
					size: data.pagelen,
					total: data.size,
					nextCursor: nextCursorValue, // Store next page number as cursor
				};
			}
			break;
		}

		case PaginationType.OFFSET: {
			// Jira offset-based pagination
			const countOffset = data.values?.length;
			if (
				data.startAt !== undefined &&
				data.maxResults !== undefined &&
				data.total !== undefined &&
				data.startAt + data.maxResults < data.total
			) {
				pagination = {
					hasMore: true,
					count: countOffset,
					total: data.total,
					nextCursor: String(data.startAt + data.maxResults),
				};
			} else if (data.nextPage) {
				pagination = {
					hasMore: true,
					count: countOffset,
					nextCursor: data.nextPage,
				};
			}
			break;
		}

		case PaginationType.CURSOR: {
			// Confluence cursor-based pagination
			const countCursor = data.results?.length;
			if (data._links && data._links.next) {
				const nextUrl = data._links.next;
				const cursorMatch = nextUrl.match(/cursor=([^&]+)/);
				if (cursorMatch && cursorMatch[1]) {
					pagination = {
						hasMore: true,
						count: countCursor,
						nextCursor: decodeURIComponent(cursorMatch[1]),
					};
				}
			}
			break;
		}

		default:
			methodLogger.warn(`Unknown pagination type: ${paginationType}`);
	}

	// Ensure a default pagination object if none was created but data exists
	if (!pagination && (data.results || data.values)) {
		pagination = {
			hasMore: false,
			count: data.results?.length ?? data.values?.length ?? 0,
		};
	}

	return pagination;
}

/**
 * Validates and enforces page size limits to prevent excessive data exposure (CWE-770)
 * @param requestedPageSize The requested page size from the client
 * @param contextInfo Optional context for logging (e.g., endpoint name)
 * @returns The validated page size (clamped to maximum allowed)
 */
export function validatePageSize(
	requestedPageSize?: number,
	contextInfo?: string,
): number {
	const methodLogger = Logger.forContext(
		'utils/pagination.util.ts',
		'validatePageSize',
	);

	// Use default if not specified
	if (!requestedPageSize || requestedPageSize <= 0) {
		const defaultSize = DATA_LIMITS.DEFAULT_PAGE_SIZE;
		methodLogger.debug(
			`Using default page size: ${defaultSize}${contextInfo ? ` for ${contextInfo}` : ''}`,
		);
		return defaultSize;
	}

	// Enforce maximum page size limit
	if (requestedPageSize > DATA_LIMITS.MAX_PAGE_SIZE) {
		const clampedSize = DATA_LIMITS.MAX_PAGE_SIZE;
		methodLogger.warn(
			`Page size ${requestedPageSize} exceeds maximum limit. Clamped to ${clampedSize}${contextInfo ? ` for ${contextInfo}` : ''}`,
		);
		return clampedSize;
	}

	methodLogger.debug(
		`Using requested page size: ${requestedPageSize}${contextInfo ? ` for ${contextInfo}` : ''}`,
	);
	return requestedPageSize;
}

/**
 * Validates pagination data to ensure it doesn't exceed configured limits
 * @param paginationData The pagination data to validate
 * @param contextInfo Optional context for logging
 * @returns True if data is within limits, false otherwise
 */
export function validatePaginationLimits(
	paginationData: { count?: number; size?: number; pagelen?: number },
	contextInfo?: string,
): boolean {
	const methodLogger = Logger.forContext(
		'utils/pagination.util.ts',
		'validatePaginationLimits',
	);

	// Check if the response contains more items than our maximum allowed
	const itemCount = paginationData.count ?? 0;
	const pageSize = paginationData.size ?? paginationData.pagelen ?? 0;

	if (itemCount > DATA_LIMITS.MAX_PAGE_SIZE) {
		methodLogger.warn(
			`Response contains ${itemCount} items, exceeding maximum of ${DATA_LIMITS.MAX_PAGE_SIZE}${contextInfo ? ` for ${contextInfo}` : ''}`,
		);
		return false;
	}

	if (pageSize > DATA_LIMITS.MAX_PAGE_SIZE) {
		methodLogger.warn(
			`Response page size ${pageSize} exceeds maximum of ${DATA_LIMITS.MAX_PAGE_SIZE}${contextInfo ? ` for ${contextInfo}` : ''}`,
		);
		return false;
	}

	return true;
}

```

--------------------------------------------------------------------------------
/src/utils/error-handler.util.test.ts:
--------------------------------------------------------------------------------

```typescript
import { describe, expect, test } from '@jest/globals';
import {
	ErrorCode,
	buildErrorContext,
	detectErrorType,
	createUserFriendlyErrorMessage,
	handleControllerError,
} from './error-handler.util.js';
import { McpError, ErrorType, createApiError } from './error.util.js';

describe('Error Handler Utilities', () => {
	describe('buildErrorContext function', () => {
		test('builds a complete error context object', () => {
			const context = buildErrorContext(
				'Repository',
				'retrieving',
				'controllers/repositories.controller.ts@get',
				{ workspaceSlug: 'atlassian', repoSlug: 'bitbucket' },
				{ queryParams: { sort: 'name' } },
			);

			expect(context).toEqual({
				entityType: 'Repository',
				operation: 'retrieving',
				source: 'controllers/repositories.controller.ts@get',
				entityId: { workspaceSlug: 'atlassian', repoSlug: 'bitbucket' },
				additionalInfo: { queryParams: { sort: 'name' } },
			});
		});

		test('handles minimal required parameters', () => {
			const context = buildErrorContext(
				'Repository',
				'listing',
				'controllers/repositories.controller.ts@list',
			);

			expect(context).toEqual({
				entityType: 'Repository',
				operation: 'listing',
				source: 'controllers/repositories.controller.ts@list',
			});
			expect(context.entityId).toBeUndefined();
			expect(context.additionalInfo).toBeUndefined();
		});
	});

	describe('detectErrorType function', () => {
		test('detects network errors', () => {
			const error = new Error('network error: connection refused');
			const result = detectErrorType(error);
			expect(result).toEqual({
				code: ErrorCode.NETWORK_ERROR,
				statusCode: 500,
			});
		});

		test('detects rate limit errors', () => {
			const error = new Error('too many requests');
			const result = detectErrorType(error);
			expect(result).toEqual({
				code: ErrorCode.RATE_LIMIT_ERROR,
				statusCode: 429,
			});
		});

		test('detects not found errors', () => {
			const error = new Error('resource not found');
			const result = detectErrorType(error);
			expect(result).toEqual({
				code: ErrorCode.NOT_FOUND,
				statusCode: 404,
			});
		});

		test('detects access denied errors', () => {
			const error = new Error('insufficient permissions');
			const result = detectErrorType(error);
			expect(result).toEqual({
				code: ErrorCode.ACCESS_DENIED,
				statusCode: 403,
			});
		});

		test('detects validation errors', () => {
			const error = new Error('validation failed: invalid input');
			const result = detectErrorType(error);
			expect(result).toEqual({
				code: ErrorCode.VALIDATION_ERROR,
				statusCode: 400,
			});
		});

		test('defaults to unexpected error', () => {
			const error = new Error('something unexpected happened');
			const result = detectErrorType(error);
			expect(result).toEqual({
				code: ErrorCode.UNEXPECTED_ERROR,
				statusCode: 500,
			});
		});

		test('respects explicit status code from error', () => {
			const error = new McpError(
				'Custom error',
				ErrorType.API_ERROR,
				418,
			);
			const result = detectErrorType(error);
			expect(result.statusCode).toBe(418);
		});

		test('detects Bitbucket-specific repository not found errors', () => {
			const bitbucketError = {
				error: {
					message: 'repository not found',
				},
			};
			const mcpError = createApiError('API Error', 404, bitbucketError);
			const result = detectErrorType(mcpError);
			expect(result).toEqual({
				code: ErrorCode.NOT_FOUND,
				statusCode: 404,
			});
		});

		test('detects Bitbucket-specific permission errors', () => {
			const bitbucketError = {
				error: {
					message: 'access denied for this repository',
				},
			};
			const mcpError = createApiError('API Error', 403, bitbucketError);
			const result = detectErrorType(mcpError);
			expect(result).toEqual({
				code: ErrorCode.ACCESS_DENIED,
				statusCode: 403,
			});
		});

		test('detects Bitbucket-specific validation errors', () => {
			const bitbucketError = {
				error: {
					message: 'invalid parameter: repository name',
				},
			};
			const mcpError = createApiError('API Error', 400, bitbucketError);
			const result = detectErrorType(mcpError);
			expect(result).toEqual({
				code: ErrorCode.VALIDATION_ERROR,
				statusCode: 400,
			});
		});
	});

	describe('createUserFriendlyErrorMessage function', () => {
		test('creates NOT_FOUND message with entityId string', () => {
			const message = createUserFriendlyErrorMessage(
				ErrorCode.NOT_FOUND,
				{
					entityType: 'Repository',
					entityId: 'atlassian/bitbucket',
				},
			);
			expect(message).toContain(
				'Repository atlassian/bitbucket not found',
			);
		});

		test('creates NOT_FOUND message with entityId object', () => {
			const message = createUserFriendlyErrorMessage(
				ErrorCode.NOT_FOUND,
				{
					entityType: 'Repository',
					entityId: {
						workspaceSlug: 'atlassian',
						repoSlug: 'bitbucket',
					},
				},
			);
			expect(message).toContain(
				'Repository atlassian/bitbucket not found',
			);
		});

		test('creates ACCESS_DENIED message', () => {
			const message = createUserFriendlyErrorMessage(
				ErrorCode.ACCESS_DENIED,
				{
					entityType: 'Repository',
					entityId: 'atlassian/bitbucket',
				},
			);
			expect(message).toContain(
				'Access denied for repository atlassian/bitbucket',
			);
		});

		test('creates VALIDATION_ERROR message', () => {
			const originalMessage = 'Invalid repository name';
			const message = createUserFriendlyErrorMessage(
				ErrorCode.VALIDATION_ERROR,
				{
					entityType: 'Repository',
					operation: 'creating',
				},
				originalMessage,
			);
			expect(message).toBe(
				`${originalMessage} Error details: ${originalMessage}`,
			);
		});

		test('creates NETWORK_ERROR message', () => {
			const message = createUserFriendlyErrorMessage(
				ErrorCode.NETWORK_ERROR,
				{
					entityType: 'Repository',
					operation: 'retrieving',
				},
			);
			expect(message).toContain('Network error');
			expect(message).toContain('Bitbucket API');
		});

		test('creates RATE_LIMIT_ERROR message', () => {
			const message = createUserFriendlyErrorMessage(
				ErrorCode.RATE_LIMIT_ERROR,
			);
			expect(message).toContain('Bitbucket API rate limit exceeded');
		});

		test('includes original message for non-specific errors', () => {
			const message = createUserFriendlyErrorMessage(
				ErrorCode.UNEXPECTED_ERROR,
				{
					entityType: 'Repository',
					operation: 'processing',
				},
				'Something went wrong',
			);
			expect(message).toContain('unexpected error');
			expect(message).toContain('Something went wrong');
		});
	});

	describe('handleControllerError function', () => {
		test('throws appropriate API error with user-friendly message', () => {
			const originalError = new Error('Repository not found');
			const context = buildErrorContext(
				'Repository',
				'retrieving',
				'controllers/repositories.controller.ts@get',
				'atlassian/bitbucket',
			);

			expect(() => {
				handleControllerError(originalError, context);
			}).toThrow(McpError);

			try {
				handleControllerError(originalError, context);
			} catch (error) {
				expect(error).toBeInstanceOf(McpError);
				expect((error as McpError).type).toBe(ErrorType.API_ERROR);
				expect((error as McpError).statusCode).toBe(404);
				expect((error as McpError).message).toContain(
					'Repository atlassian/bitbucket not found',
				);
				expect((error as McpError).originalError).toBe(originalError);
			}
		});
	});
});

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.pullrequests.comments.controller.ts:
--------------------------------------------------------------------------------

```typescript
import { ControllerResponse } from '../types/common.types.js';
import {
	ListPullRequestCommentsToolArgsType,
	CreatePullRequestCommentToolArgsType,
} from '../tools/atlassian.pullrequests.types.js';
import {
	atlassianPullRequestsService,
	Logger,
	handleControllerError,
	extractPaginationInfo,
	PaginationType,
	formatPagination,
	formatPullRequestComments,
	DEFAULT_PAGE_SIZE,
	applyDefaults,
	enhanceCommentsWithSnippets,
	optimizeBitbucketMarkdown,
	getDefaultWorkspace,
	ListCommentsParams,
	CreateCommentParams,
} from './atlassian.pullrequests.base.controller.js';

/**
 * List comments on a Bitbucket pull request
 * @param options - Options including workspace slug, repo slug, and pull request ID
 * @returns Promise with formatted pull request comments as Markdown content
 */
async function listComments(
	options: ListPullRequestCommentsToolArgsType,
): Promise<ControllerResponse> {
	const methodLogger = Logger.forContext(
		'controllers/atlassian.pullrequests.comments.controller.ts',
		'listComments',
	);

	try {
		// Create defaults object
		const defaults: Partial<ListPullRequestCommentsToolArgsType> = {
			limit: DEFAULT_PAGE_SIZE,
		};

		// Apply defaults
		const mergedOptions =
			applyDefaults<ListPullRequestCommentsToolArgsType>(
				options,
				defaults,
			);

		// Handle optional workspaceSlug - get default if not provided
		if (!mergedOptions.workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'Could not determine a default workspace. Please provide a workspaceSlug.',
				);
			}
			mergedOptions.workspaceSlug = defaultWorkspace;
			methodLogger.debug(
				`Using default workspace: ${mergedOptions.workspaceSlug}`,
			);
		}

		const { workspaceSlug, repoSlug, prId } = mergedOptions;

		// Validate required parameters
		if (!workspaceSlug || !repoSlug || !prId) {
			throw new Error(
				'Workspace slug, repository slug, and pull request ID are required',
			);
		}

		methodLogger.debug(
			`Listing comments for PR ${workspaceSlug}/${repoSlug}/${prId}`,
			{ limit: mergedOptions.limit, cursor: mergedOptions.cursor },
		);

		// Map controller options to service parameters
		const serviceParams: ListCommentsParams = {
			workspace: workspaceSlug,
			repo_slug: repoSlug,
			pull_request_id: parseInt(prId, 10),
			pagelen: mergedOptions.limit,
			page: mergedOptions.cursor
				? parseInt(mergedOptions.cursor, 10)
				: undefined,
		};

		// Get comments from the service
		const commentsData =
			await atlassianPullRequestsService.getComments(serviceParams);

		methodLogger.debug(
			`Retrieved ${commentsData.values?.length || 0} comments`,
		);

		// If no comments found, return a simple message
		if (!commentsData.values || commentsData.values.length === 0) {
			return { content: 'No comments found on this pull request.' };
		}

		// Extract pagination information
		const pagination = extractPaginationInfo(
			commentsData,
			PaginationType.PAGE,
		);

		// Enhance comments with code snippets (for inline comments)
		const enhancedComments = await enhanceCommentsWithSnippets(
			commentsData,
			'listComments',
		);

		// Format the comments using the formatter
		const formattedComments = formatPullRequestComments(
			enhancedComments,
			prId,
		);

		// Create the final content by combining formatted comments with pagination info
		let finalContent = formattedComments;

		// Add pagination information if available
		if (
			pagination &&
			(pagination.hasMore || pagination.count !== undefined)
		) {
			const paginationString = formatPagination(pagination);
			finalContent += '\n\n' + paginationString;
		}

		return {
			content: finalContent,
		};
	} catch (error) {
		// Use the standardized error handler
		throw handleControllerError(error, {
			entityType: 'Pull Request Comments',
			operation: 'listing',
			source: 'controllers/atlassian.pullrequests.comments.controller.ts@listComments',
			additionalInfo: { options },
		});
	}
}

/**
 * Add a comment to a Bitbucket pull request
 * @param options - Options including workspace slug, repo slug, PR ID, and comment content
 * @returns Promise with a success message as content
 */
async function addComment(
	options: CreatePullRequestCommentToolArgsType,
): Promise<ControllerResponse> {
	const methodLogger = Logger.forContext(
		'controllers/atlassian.pullrequests.comments.controller.ts',
		'addComment',
	);

	try {
		// Apply defaults if needed (none for this operation)
		const mergedOptions =
			applyDefaults<CreatePullRequestCommentToolArgsType>(options, {});

		// Handle optional workspaceSlug - get default if not provided
		if (!mergedOptions.workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'Could not determine a default workspace. Please provide a workspaceSlug.',
				);
			}
			mergedOptions.workspaceSlug = defaultWorkspace;
			methodLogger.debug(
				`Using default workspace: ${mergedOptions.workspaceSlug}`,
			);
		}

		const { workspaceSlug, repoSlug, prId, content, inline } =
			mergedOptions;

		// Validate required parameters
		if (!workspaceSlug || !repoSlug || !prId || !content) {
			throw new Error(
				'Workspace slug, repository slug, pull request ID, and comment content are required',
			);
		}

		// For inline comments, both file path and line number are required
		if (inline && (!inline.path || inline.line === undefined)) {
			throw new Error(
				'Both file path and line number are required for inline comments',
			);
		}

		// Prepare the raw content, applying any Bitbucket-specific markdown optimizations
		const optimizedContent = optimizeBitbucketMarkdown(content);

		methodLogger.debug(
			`Adding${
				inline ? ' inline' : ''
			} comment to PR ${workspaceSlug}/${repoSlug}/${prId}`,
			{
				contentLength: optimizedContent.length,
				isInline: !!inline,
				inlinePath: inline?.path,
				inlineLine: inline?.line,
			},
		);

		// Map controller options to service parameters
		const serviceParams: CreateCommentParams = {
			workspace: workspaceSlug,
			repo_slug: repoSlug,
			pull_request_id: parseInt(prId, 10),
			content: {
				raw: optimizedContent,
			},
		};

		// For inline comments, add the inline property
		if (inline) {
			serviceParams.inline = {
				path: inline.path,
				to: inline.line,
			};
		}

		// For replies, add the parent property
		if (mergedOptions.parentId) {
			serviceParams.parent = {
				id: parseInt(mergedOptions.parentId, 10),
			};
		}

		// Create the comment through the service
		const commentResult =
			await atlassianPullRequestsService.createComment(serviceParams);

		methodLogger.debug('Comment created successfully', {
			commentId: commentResult.id,
			isInline: !!inline,
		});

		// Return a success message
		const commentType = inline ? 'inline' : '';
		return {
			content: `${commentType} Comment successfully added to pull request #${prId}. Comment ID: ${commentResult.id}`,
		};
	} catch (error) {
		// Use the standardized error handler
		throw handleControllerError(error, {
			entityType: 'Pull Request Comment',
			operation: 'adding',
			source: 'controllers/atlassian.pullrequests.comments.controller.ts@addComment',
			additionalInfo: { options },
		});
	}
}

// Export the controller functions
export default { listComments, addComment };

```

--------------------------------------------------------------------------------
/src/tools/atlassian.repositories.types.ts:
--------------------------------------------------------------------------------

```typescript
import { z } from 'zod';

/**
 * Base pagination arguments for all tools
 */
const PaginationArgs = {
	limit: z
		.number()
		.int()
		.positive()
		.max(100)
		.optional()
		.describe(
			'Maximum number of items to return (1-100). Controls the response size. Defaults to 25 if omitted.',
		),

	cursor: z
		.string()
		.optional()
		.describe(
			'Pagination cursor for retrieving the next set of results. Obtained from previous response when more results are available.',
		),
};

/**
 * Schema for list-repositories tool arguments
 */
export const ListRepositoriesToolArgs = z.object({
	/**
	 * Workspace slug containing the repositories
	 */
	workspaceSlug: z
		.string()
		.optional()
		.describe(
			'Workspace slug containing the repositories. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
		),

	/**
	 * Optional query to filter repositories
	 */
	query: z
		.string()
		.optional()
		.describe(
			'Query string to filter repositories by name or other properties (text search). Example: "api" for repositories with "api" in the name/description. If omitted, returns all repositories.',
		),

	/**
	 * Optional sort parameter
	 */
	sort: z
		.string()
		.optional()
		.describe(
			'Field to sort results by. Common values: "name", "created_on", "updated_on". Prefix with "-" for descending order. Example: "-updated_on" for most recently updated first.',
		),

	/**
	 * Optional role filter
	 */
	role: z
		.string()
		.optional()
		.describe(
			'Filter repositories by the authenticated user\'s role. Common values: "owner", "admin", "contributor", "member". If omitted, returns repositories of all roles.',
		),

	/**
	 * Optional project key filter
	 */
	projectKey: z
		.string()
		.optional()
		.describe('Filter repositories by project key. Example: "project-api"'),

	/**
	 * Maximum number of repositories to return (default: 25)
	 */
	...PaginationArgs,
});

export type ListRepositoriesToolArgsType = z.infer<
	typeof ListRepositoriesToolArgs
>;

/**
 * Schema for get-repository tool arguments
 */
export const GetRepositoryToolArgs = z.object({
	/**
	 * Workspace slug containing the repository
	 */
	workspaceSlug: z
		.string()
		.optional()
		.describe(
			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
		),

	/**
	 * Repository slug to retrieve
	 */
	repoSlug: z
		.string()
		.min(1, 'Repository slug is required')
		.describe(
			'Repository slug to retrieve. This must be a valid repository in the specified workspace. Example: "project-api"',
		),
});

export type GetRepositoryToolArgsType = z.infer<typeof GetRepositoryToolArgs>;

/**
 * Schema for get-commit-history tool arguments.
 */
export const GetCommitHistoryToolArgs = z.object({
	workspaceSlug: z
		.string()
		.optional()
		.describe(
			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
		),
	repoSlug: z
		.string()
		.min(1, 'Repository slug is required')
		.describe(
			'Repository slug whose commit history is to be retrieved. Example: "project-api"',
		),
	revision: z
		.string()
		.optional()
		.describe(
			'Optional branch name, tag, or commit hash to view history from. If omitted, uses the default branch.',
		),
	path: z
		.string()
		.optional()
		.describe(
			'Optional file path to filter commit history. Only shows commits affecting this file.',
		),
	...PaginationArgs, // Includes limit and cursor
});

export type GetCommitHistoryToolArgsType = z.infer<
	typeof GetCommitHistoryToolArgs
>;

/**
 * Schema for create-branch tool arguments.
 */
export const CreateBranchToolArgsSchema = z.object({
	workspaceSlug: z
		.string()
		.optional()
		.describe(
			'Workspace slug containing the repository. If not provided, the system will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
		),
	repoSlug: z
		.string()
		.min(1, 'Repository slug is required')
		.describe('Repository slug where the branch will be created.'),
	newBranchName: z
		.string()
		.min(1, 'New branch name is required')
		.describe('The name for the new branch.'),
	sourceBranchOrCommit: z
		.string()
		.min(1, 'Source branch or commit is required')
		.describe('The name of the branch or the commit hash to branch from.'),
});

export type CreateBranchToolArgsType = z.infer<
	typeof CreateBranchToolArgsSchema
>;

/**
 * Schema for clone-repository tool arguments.
 */
export const CloneRepositoryToolArgs = z.object({
	workspaceSlug: z
		.string()
		.optional()
		.describe(
			'Bitbucket workspace slug containing the repository. If not provided, the tool will use your default workspace (either configured via BITBUCKET_DEFAULT_WORKSPACE or the first workspace in your account). Example: "myteam"',
		),
	repoSlug: z
		.string()
		.min(1, 'Repository slug is required')
		.describe(
			'Repository name/slug to clone. This is the short name of the repository. Example: "project-api"',
		),
	targetPath: z
		.string()
		.min(1, 'Target path is required')
		.describe(
			'Directory path where the repository will be cloned. IMPORTANT: Absolute paths are strongly recommended (e.g., "/home/user/projects" or "C:\\Users\\name\\projects"). Relative paths will be resolved relative to the server\'s working directory, which may not be what you expect. The repository will be cloned into a subdirectory at targetPath/repoSlug. Make sure you have write permissions to this location.',
		),
});

export type CloneRepositoryToolArgsType = z.infer<
	typeof CloneRepositoryToolArgs
>;

/**
 * Schema for get-file-content tool arguments.
 */
export const GetFileContentToolArgs = z.object({
	workspaceSlug: z
		.string()
		.optional()
		.describe(
			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
		),
	repoSlug: z
		.string()
		.min(1, 'Repository slug is required')
		.describe(
			'Repository slug containing the file. Example: "project-api"',
		),
	filePath: z
		.string()
		.min(1, 'File path is required')
		.describe(
			'Path to the file within the repository. Example: "README.md" or "src/main.js"',
		),
	revision: z
		.string()
		.optional()
		.describe(
			'Optional branch name, tag, or commit hash to retrieve the file from. If omitted, uses the default branch.',
		),
});

export type GetFileContentToolArgsType = z.infer<typeof GetFileContentToolArgs>;

/**
 * Schema for list-branches tool arguments
 */
export const ListBranchesToolArgs = z.object({
	/**
	 * Workspace slug containing the repository
	 */
	workspaceSlug: z
		.string()
		.optional()
		.describe(
			'Workspace slug containing the repository. If not provided, the system will use your default workspace. Example: "myteam"',
		),

	/**
	 * Repository slug to list branches from
	 */
	repoSlug: z
		.string()
		.min(1, 'Repository slug is required')
		.describe(
			'Repository slug to list branches from. Must be a valid repository slug in the specified workspace. Example: "project-api"',
		),

	/**
	 * Optional query to filter branches
	 */
	query: z
		.string()
		.optional()
		.describe(
			'Query string to filter branches by name or other properties (text search).',
		),

	/**
	 * Optional sort parameter
	 */
	sort: z
		.string()
		.optional()
		.describe(
			'Field to sort branches by. Common values: "name" (default), "-name", "target.date". Prefix with "-" for descending order.',
		),

	/**
	 * Maximum number of branches to return (default: 25)
	 */
	...PaginationArgs,
});

export type ListBranchesToolArgsType = z.infer<typeof ListBranchesToolArgs>;

```

--------------------------------------------------------------------------------
/src/utils/error.util.ts:
--------------------------------------------------------------------------------

```typescript
import { Logger } from './logger.util.js';
import { formatSeparator } from './formatter.util.js';

/**
 * Error types for MCP errors
 */
export type McpErrorType =
	| 'AUTHENTICATION_REQUIRED'
	| 'NOT_FOUND'
	| 'VALIDATION_ERROR'
	| 'RATE_LIMIT_EXCEEDED'
	| 'API_ERROR'
	| 'UNEXPECTED_ERROR';

/**
 * Error types for classification
 */
export enum ErrorType {
	AUTH_MISSING = 'AUTH_MISSING',
	AUTH_INVALID = 'AUTH_INVALID',
	API_ERROR = 'API_ERROR',
	UNEXPECTED_ERROR = 'UNEXPECTED_ERROR',
}

/**
 * Custom error class with type classification
 */
export class McpError extends Error {
	type: ErrorType;
	errorType?: McpErrorType; // Add errorType property used by error-handler.util.ts
	statusCode?: number;
	originalError?: unknown;

	constructor(
		message: string,
		type: ErrorType,
		statusCode?: number,
		originalError?: unknown,
	) {
		super(message);
		this.name = 'McpError';
		this.type = type;
		this.statusCode = statusCode;
		this.originalError = originalError;

		// Set errorType based on type
		switch (type) {
			case ErrorType.AUTH_MISSING:
			case ErrorType.AUTH_INVALID:
				this.errorType = 'AUTHENTICATION_REQUIRED';
				break;
			case ErrorType.API_ERROR:
				this.errorType =
					statusCode === 404
						? 'NOT_FOUND'
						: statusCode === 429
							? 'RATE_LIMIT_EXCEEDED'
							: 'API_ERROR';
				break;
			case ErrorType.UNEXPECTED_ERROR:
			default:
				this.errorType = 'UNEXPECTED_ERROR';
				break;
		}
	}
}

/**
 * Helper to unwrap nested McpErrors and return the deepest original error.
 * This is useful when an McpError contains another McpError as `originalError`
 * which in turn may wrap the vendor (Bitbucket) error text or object.
 */
export function getDeepOriginalError(error: unknown): unknown {
	if (!error) {
		return error;
	}

	let current = error;
	let depth = 0;
	const maxDepth = 10; // Prevent infinite recursion

	while (
		depth < maxDepth &&
		current instanceof Error &&
		'originalError' in current &&
		current.originalError
	) {
		current = current.originalError;
		depth++;
	}

	return current;
}

/**
 * Create an authentication missing error
 */
export function createAuthMissingError(
	message: string = 'Authentication credentials are missing',
	originalError?: unknown,
): McpError {
	return new McpError(
		message,
		ErrorType.AUTH_MISSING,
		undefined,
		originalError,
	);
}

/**
 * Create an authentication invalid error
 */
export function createAuthInvalidError(
	message: string = 'Authentication credentials are invalid',
	originalError?: unknown,
): McpError {
	return new McpError(message, ErrorType.AUTH_INVALID, 401, originalError);
}

/**
 * Create an API error
 */
export function createApiError(
	message: string,
	statusCode?: number,
	originalError?: unknown,
): McpError {
	return new McpError(
		message,
		ErrorType.API_ERROR,
		statusCode,
		originalError,
	);
}

/**
 * Create an unexpected error
 */
export function createUnexpectedError(
	message: string = 'An unexpected error occurred',
	originalError?: unknown,
): McpError {
	return new McpError(
		message,
		ErrorType.UNEXPECTED_ERROR,
		undefined,
		originalError,
	);
}

/**
 * Ensure an error is an McpError
 */
export function ensureMcpError(error: unknown): McpError {
	if (error instanceof McpError) {
		return error;
	}

	if (error instanceof Error) {
		return createUnexpectedError(error.message, error);
	}

	return createUnexpectedError(String(error));
}

/**
 * Format error for MCP tool response
 */
export function formatErrorForMcpTool(error: unknown): {
	content: Array<{ type: 'text'; text: string }>;
	metadata?: {
		errorType: ErrorType;
		statusCode?: number;
		errorDetails?: unknown;
	};
} {
	const methodLogger = Logger.forContext(
		'utils/error.util.ts',
		'formatErrorForMcpTool',
	);
	const mcpError = ensureMcpError(error);
	methodLogger.error(`${mcpError.type} error`, mcpError);

	// Get the deep original error for additional context
	const originalError = getDeepOriginalError(mcpError.originalError);

	// Safely extract details from the original error
	const errorDetails =
		originalError instanceof Error
			? { message: originalError.message }
			: originalError;

	return {
		content: [
			{
				type: 'text' as const,
				text: `Error: ${mcpError.message}`,
			},
		],
		metadata: {
			errorType: mcpError.type,
			statusCode: mcpError.statusCode,
			errorDetails,
		},
	};
}

/**
 * Format error for MCP resource response
 */
export function formatErrorForMcpResource(
	error: unknown,
	uri: string,
): {
	contents: Array<{
		uri: string;
		text: string;
		mimeType: string;
		description?: string;
	}>;
} {
	const methodLogger = Logger.forContext(
		'utils/error.util.ts',
		'formatErrorForMcpResource',
	);
	const mcpError = ensureMcpError(error);
	methodLogger.error(`${mcpError.type} error`, mcpError);

	return {
		contents: [
			{
				uri,
				text: `Error: ${mcpError.message}`,
				mimeType: 'text/plain',
				description: `Error: ${mcpError.type}`,
			},
		],
	};
}

/**
 * Handle error in CLI context with improved user feedback
 */
export function handleCliError(error: unknown): never {
	const methodLogger = Logger.forContext(
		'utils/error.util.ts',
		'handleCliError',
	);
	const mcpError = ensureMcpError(error);
	methodLogger.error(`${mcpError.type} error`, mcpError);

	// Get the deep original error for more context
	const originalError = getDeepOriginalError(mcpError.originalError);

	// Build a well-formatted CLI output using markdown-style helpers
	const cliLines: string[] = [];

	// Primary error headline
	cliLines.push(`❌  ${mcpError.message}`);

	// Status code (if any)
	if (mcpError.statusCode) {
		cliLines.push(`HTTP Status: ${mcpError.statusCode}`);
	}

	// Separator
	cliLines.push(formatSeparator());

	// Provide helpful context based on error type
	if (mcpError.type === ErrorType.AUTH_MISSING) {
		cliLines.push(
			'Tip: Make sure to set up your Atlassian credentials in the configuration file or environment variables:',
		);
		cliLines.push(
			'- ATLASSIAN_SITE_NAME, ATLASSIAN_USER_EMAIL, and ATLASSIAN_API_TOKEN; or',
		);
		cliLines.push(
			'- ATLASSIAN_BITBUCKET_USERNAME and ATLASSIAN_BITBUCKET_APP_PASSWORD',
		);
	} else if (mcpError.type === ErrorType.AUTH_INVALID) {
		cliLines.push(
			'Tip: Check that your Atlassian API token or app password is correct and has not expired.',
		);
		cliLines.push(
			'Also verify that the configured user has access to the requested resource.',
		);
	} else if (mcpError.type === ErrorType.API_ERROR) {
		if (mcpError.statusCode === 429) {
			cliLines.push(
				'Tip: You may have exceeded your Bitbucket API rate limits. Try again later.',
			);
		}
	}

	// Vendor error details (if available)
	if (originalError) {
		cliLines.push('Bitbucket API Error:');
		cliLines.push('```');
		if (typeof originalError === 'object' && originalError !== null) {
			// Try to extract the most useful parts of Bitbucket's error response
			const origErr = originalError as Record<string, unknown>;
			if (origErr.error && typeof origErr.error === 'object') {
				// Format {"error": {"message": "..."}} structure
				const bitbucketError = origErr.error as Record<string, unknown>;
				cliLines.push(
					`Message: ${bitbucketError.message || 'Unknown error'}`,
				);
				if (bitbucketError.detail)
					cliLines.push(`Detail: ${bitbucketError.detail}`);
			} else if (origErr.message) {
				// Simple message
				cliLines.push(`${String(origErr.message)}`);
			} else {
				// Fall back to JSON representation for anything else
				cliLines.push(JSON.stringify(originalError, null, 2));
			}
		} else {
			cliLines.push(String(originalError).trim());
		}
		cliLines.push('```');
	}

	// Display DEBUG tip
	if (!process.env.DEBUG || !process.env.DEBUG.includes('mcp:')) {
		cliLines.push(
			'For more detailed error information, run with DEBUG=mcp:* environment variable.',
		);
	}

	console.error(cliLines.join('\n'));
	process.exit(1);
}

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.repositories.branch.controller.ts:
--------------------------------------------------------------------------------

```typescript
import atlassianRepositoriesService from '../services/vendor.atlassian.repositories.service.js';
import { Logger } from '../utils/logger.util.js';
import { handleControllerError } from '../utils/error-handler.util.js';
import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js';
import {
	extractPaginationInfo,
	PaginationType,
} from '../utils/pagination.util.js';
import { formatPagination } from '../utils/formatter.util.js';
import { ControllerResponse } from '../types/common.types.js';
import {
	CreateBranchToolArgsType,
	ListBranchesToolArgsType,
} from '../tools/atlassian.repositories.types.js';
import { CreateBranchParams } from '../services/vendor.atlassian.repositories.types.js';
import { getDefaultWorkspace } from '../utils/workspace.util.js';

// Logger instance for this module
const logger = Logger.forContext(
	'controllers/atlassian.repositories.branch.controller.ts',
);

/**
 * Creates a new branch in a repository.
 * @param options Options including workspace, repo, new branch name, and source target.
 * @returns Confirmation message.
 */
export async function handleCreateBranch(
	options: CreateBranchToolArgsType,
): Promise<ControllerResponse> {
	const { repoSlug, newBranchName, sourceBranchOrCommit } = options;
	let { workspaceSlug } = options;
	const methodLogger = logger.forMethod('handleCreateBranch');

	try {
		methodLogger.debug('Creating new branch with options:', options);

		// Handle optional workspaceSlug
		if (!workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'No default workspace found. Please provide a workspace slug.',
				);
			}
			workspaceSlug = defaultWorkspace;
			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
		}

		if (!repoSlug) {
			throw new Error('Repository slug is required');
		}

		if (!newBranchName) {
			throw new Error('New branch name is required');
		}

		if (!sourceBranchOrCommit) {
			throw new Error(
				'Source branch or commit is required as the starting point',
			);
		}

		// First, check if branch already exists to avoid potential errors
		methodLogger.debug('Checking if branch already exists');
		try {
			// Call API to check if branch exists
			// Note: this is a simulation as the actual API might not have this specific endpoint
			// We'll make a call using the list branches endpoint with a filter
			const existingBranches =
				await atlassianRepositoriesService.listBranches({
					workspace: workspaceSlug,
					repo_slug: repoSlug,
					q: `name="${newBranchName}"`,
				});

			// If we get matching branches, assume the branch exists
			if (existingBranches.values && existingBranches.values.length > 0) {
				methodLogger.warn(
					`Branch '${newBranchName}' already exists in ${workspaceSlug}/${repoSlug}`,
				);
				return {
					content: `⚠️ Branch \`${newBranchName}\` already exists in the repository.`,
				};
			}
		} catch (error) {
			// If error is 404, branch doesn't exist and we can proceed
			if ((error as { statusCode?: number }).statusCode !== 404) {
				throw error; // Other errors should be propagated
			}
			methodLogger.debug(
				`Branch '${newBranchName}' does not exist, proceeding with creation`,
			);
		}

		// Prepare the branch creation parameters
		const createParams: CreateBranchParams = {
			workspace: workspaceSlug,
			repo_slug: repoSlug,
			name: newBranchName,
			target: {
				hash: sourceBranchOrCommit,
			},
		};

		// Create the branch
		methodLogger.debug('Creating branch with params:', createParams);
		const result =
			await atlassianRepositoriesService.createBranch(createParams);

		// Confirm success with a meaningful message
		methodLogger.debug('Branch created successfully:', result);
		return {
			content: `✅ Successfully created branch \`${newBranchName}\` from \`${sourceBranchOrCommit}\` in ${workspaceSlug}/${repoSlug}.`,
		};
	} catch (error) {
		throw handleControllerError(error, {
			entityType: 'Branch',
			operation: 'create',
			source: 'controllers/atlassian.repositories.branch.controller.ts@handleCreateBranch',
			additionalInfo: options,
		});
	}
}

/**
 * Lists branches in a repository with optional filtering
 *
 * @param options - Options containing workspaceSlug, repoSlug, and filters
 * @returns Formatted list of branches and pagination information
 */
export async function handleListBranches(
	options: ListBranchesToolArgsType,
): Promise<ControllerResponse> {
	const methodLogger = logger.forMethod('handleListBranches');
	methodLogger.debug('Listing branches with options:', options);

	try {
		// Apply defaults
		const defaults: Partial<ListBranchesToolArgsType> = {
			limit: DEFAULT_PAGE_SIZE,
			sort: 'name', // Default sort by name
		};
		const params = applyDefaults<ListBranchesToolArgsType>(
			options,
			defaults,
		);

		// Handle optional workspaceSlug
		if (!params.workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'No default workspace found. Please provide a workspace slug.',
				);
			}
			params.workspaceSlug = defaultWorkspace;
			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
		}

		// Required parameters check
		if (!params.repoSlug) {
			throw new Error('Repository slug is required');
		}

		// Call the service to list branches
		methodLogger.debug('Listing branches with params:', {
			workspace: params.workspaceSlug,
			repo_slug: params.repoSlug,
			q: params.query ? `name ~ "${params.query}"` : undefined,
			sort: params.sort,
			pagelen: params.limit,
			page: params.cursor ? parseInt(params.cursor, 10) : undefined,
		});

		const branchesData = await atlassianRepositoriesService.listBranches({
			workspace: params.workspaceSlug,
			repo_slug: params.repoSlug,
			q: params.query ? `name ~ "${params.query}"` : undefined,
			sort: params.sort,
			pagelen: params.limit,
			page: params.cursor ? parseInt(params.cursor, 10) : undefined,
		});

		methodLogger.debug(
			`Retrieved ${branchesData.values?.length || 0} branches`,
		);

		// Extract pagination information
		const pagination = extractPaginationInfo(
			branchesData,
			PaginationType.PAGE,
		);

		// Format branches data into Markdown
		let content = '';

		if (!branchesData.values || branchesData.values.length === 0) {
			content = 'No branches found in this repository.';
		} else {
			content = `# Branches in \`${params.workspaceSlug}/${params.repoSlug}\`\n\n`;

			if (params.query) {
				content += `Filtered by query: "${params.query}"\n\n`;
			}

			branchesData.values.forEach((branch) => {
				// Using target.hash to get the commit hash this branch points to
				const commitHash =
					branch.target?.hash?.substring(0, 8) || 'N/A';
				content += `## ${branch.name}\n\n`;
				content += `- **Latest Commit**: ${commitHash}\n`;
				content += `- **Type**: ${branch.type || 'branch'}\n`;

				if (branch.default_merge_strategy) {
					content += `- **Default Merge Strategy**: ${branch.default_merge_strategy}\n`;
				}

				if (branch.merge_strategies && branch.merge_strategies.length) {
					content += `- **Available Merge Strategies**: ${branch.merge_strategies.join(
						', ',
					)}\n`;
				}

				content += '\n';
			});
		}

		// Add pagination information if available
		if (
			pagination &&
			(pagination.hasMore || pagination.count !== undefined)
		) {
			const paginationString = formatPagination(pagination);
			content += paginationString;
		}

		return { content };
	} catch (error) {
		throw handleControllerError(error, {
			entityType: 'Branches',
			operation: 'listing',
			source: 'controllers/atlassian.repositories.branch.controller.ts@handleListBranches',
			additionalInfo: options,
		});
	}
}

```

--------------------------------------------------------------------------------
/src/services/vendor.atlassian.search.service.ts:
--------------------------------------------------------------------------------

```typescript
import { fetchAtlassian } from '../utils/transport.util.js';
import { Logger } from '../utils/logger.util.js';
import { NETWORK_TIMEOUTS } from '../utils/constants.util.js';
import { URLSearchParams } from 'url';
import { getAtlassianCredentials } from '../utils/transport.util.js';
import {
	ContentSearchParams,
	ContentSearchResponse,
} from './vendor.atlassian.search.types.js';

const logger = Logger.forContext('services/vendor.atlassian.search.service.ts');

/**
 * Search options for code search in a workspace
 */
export interface SearchCodeParams {
	workspaceSlug: string;
	searchQuery: string;
	page?: number;
	pageLen?: number;
	repoSlug?: string;
	fields?: string;
	language?: string;
	extension?: string;
}

/**
 * Search options for commit search in a repository
 */
export interface SearchCommitsParams {
	workspaceSlug: string;
	repoSlug: string;
	searchQuery: string;
	page?: number;
	pageLen?: number;
	fields?: string;
}

/**
 * Response type for code search API
 */
export interface CodeSearchResponse {
	size: number;
	page: number;
	pagelen: number;
	query_substituted: boolean;
	values: CodeSearchResult[];
}

/**
 * Response type for commits API
 */
export interface CommitsResponse {
	size: number;
	page: number;
	pagelen: number;
	next?: string;
	previous?: string;
	values: CommitResult[];
}

/**
 * Commit result type
 */
export interface CommitResult {
	hash: string;
	date: string;
	message: string;
	type: string;
	author: {
		raw: string;
		type: string;
		user?: {
			display_name: string;
			account_id: string;
			links: {
				self: { href: string };
				avatar: { href: string };
			};
		};
	};
	links: {
		self: { href: string };
		html: { href: string };
	};
	repository?: {
		name: string;
		full_name: string;
		links: {
			self: { href: string };
			html: { href: string };
		};
	};
}

/**
 * Code search result type
 */
export interface CodeSearchResult {
	type: string;
	content_match_count: number;
	content_matches: ContentMatch[];
	path_matches: PathMatch[];
	file: {
		path: string;
		type: string;
		links: {
			self: {
				href: string;
			};
		};
	};
}

/**
 * Content match type
 */
export interface ContentMatch {
	lines: {
		line: number;
		segments: {
			text: string;
			match?: boolean;
		}[];
	}[];
}

/**
 * Path match type
 */
export interface PathMatch {
	text: string;
	match?: boolean;
}

/**
 * Search for commits in a repository using the Bitbucket API
 *
 * @param {SearchCommitsParams} params - Parameters for the commit search
 * @returns {Promise<CommitsResponse>} The search results from the Bitbucket API
 */
export async function searchCommits(
	params: SearchCommitsParams,
): Promise<CommitsResponse> {
	// Build the query parameters - the Bitbucket API allows searching commits by message
	const queryParams = new URLSearchParams();

	// If search query is provided, add it as a q parameter
	if (params.searchQuery) {
		queryParams.append('q', `message ~ "${params.searchQuery}"`);
	}

	// Add optional pagination parameters
	if (params.page) {
		queryParams.append('page', params.page.toString());
	}

	if (params.pageLen) {
		queryParams.append('pagelen', params.pageLen.toString());
	}

	// Add optional fields parameter for enhanced responses
	if (params.fields) {
		queryParams.append('fields', params.fields);
	}

	// Get credentials for API call
	const credentials = getAtlassianCredentials();
	if (!credentials) {
		throw new Error('No Atlassian credentials available');
	}

	// Set useBitbucketAuth to true since we're calling the Bitbucket API
	credentials.useBitbucketAuth = true;

	// Create API path for Bitbucket commits
	const path = `/2.0/repositories/${params.workspaceSlug}/${params.repoSlug}/commits${
		queryParams.toString() ? '?' + queryParams.toString() : ''
	}`;

	// Track searching commits in repository
	logger.debug(
		`Searching commits in repository: ${params.workspaceSlug}/${params.repoSlug}`,
		{
			searchQuery: params.searchQuery,
			path,
		},
	);

	// Call Bitbucket API with credentials and path
	return fetchAtlassian(credentials, path, {
		timeout: NETWORK_TIMEOUTS.SEARCH_REQUEST_TIMEOUT,
	});
}

/**
 * Search for code in a workspace using the Bitbucket API
 *
 * @param {SearchCodeParams} params - Parameters for the code search
 * @returns {Promise<CodeSearchResponse>} The search results from the Bitbucket API
 */
export async function searchCode(
	params: SearchCodeParams,
): Promise<CodeSearchResponse> {
	// If repoSlug is provided, enhance the search query with repo: syntax
	const searchQuery = params.repoSlug
		? `${params.searchQuery} repo:${params.repoSlug}`
		: params.searchQuery;

	// Language mapping to handle common alternative names
	const languageMapping: Record<string, string> = {
		hcl: 'terraform',
		tf: 'terraform',
		typescript: 'ts',
		javascript: 'js',
		python: 'py',
		// Add more mappings as needed
	};

	// Append language and extension filters if provided
	let finalSearchQuery = searchQuery;
	if (params.language) {
		// Use the mapped language name if available, otherwise use the original
		const mappedLanguage = params.language.toLowerCase();
		const apiLanguage = languageMapping[mappedLanguage] || mappedLanguage;

		logger.debug(
			`Language mapping: "${mappedLanguage}" -> "${apiLanguage}"`,
		);
		finalSearchQuery += ` lang:${apiLanguage}`;
	}
	if (params.extension) {
		finalSearchQuery += ` ext:${params.extension}`;
	}

	// Build the query parameters
	const queryParams = new URLSearchParams({
		search_query: finalSearchQuery,
	});

	// Add optional pagination parameters
	if (params.page) {
		queryParams.append('page', params.page.toString());
	}

	if (params.pageLen) {
		queryParams.append('pagelen', params.pageLen.toString());
	}

	// Add optional fields parameter for enhanced responses
	if (params.fields) {
		queryParams.append('fields', params.fields);
	}

	// Get credentials for API call
	const credentials = getAtlassianCredentials();
	if (!credentials) {
		throw new Error('No Atlassian credentials available');
	}

	// Set useBitbucketAuth to true since we're calling the Bitbucket API
	credentials.useBitbucketAuth = true;

	// Create API path for Bitbucket code search
	const path = `/2.0/workspaces/${params.workspaceSlug}/search/code?${queryParams.toString()}`;

	// Track searching code in workspace
	logger.debug(`Searching code in workspace: ${params.workspaceSlug}`, {
		searchQuery: finalSearchQuery,
		path,
	});

	// Call Bitbucket API with credentials and path
	return fetchAtlassian(credentials, path, {
		timeout: NETWORK_TIMEOUTS.SEARCH_REQUEST_TIMEOUT,
	});
}

/**
 * Search for content in Bitbucket
 *
 * @param params Search parameters
 * @returns Content search response
 */
async function searchContent(
	params: ContentSearchParams,
): Promise<ContentSearchResponse> {
	const logger = Logger.forContext(
		'services/vendor.atlassian.search.service.ts',
		'searchContent',
	);

	try {
		const credentials = getAtlassianCredentials();
		if (!credentials) {
			throw new Error(
				'Atlassian credentials are required for content search',
			);
		}

		// Build query parameters
		const queryParams = new URLSearchParams();

		// Format the query
		queryParams.set('q', params.query);

		// Add pagination parameters
		queryParams.set('pagelen', String(params.limit || 25));
		queryParams.set('page', String(params.page || 1));

		// Add content type filter if specified
		if (params.contentType) {
			queryParams.set('content_type', params.contentType);
		}

		// Construct URL based on whether a repository is specified
		let url = `/2.0/search/${params.workspaceSlug}`;
		if (params.repoSlug) {
			url = `/2.0/search/${params.workspaceSlug}/${params.repoSlug}`;
		}

		// Add query parameters
		url += `?${queryParams.toString()}`;

		logger.debug(`Content search request URL: ${url}`);

		// Make the request
		const response = await fetchAtlassian<ContentSearchResponse>(
			credentials,
			url,
			{
				timeout: NETWORK_TIMEOUTS.SEARCH_REQUEST_TIMEOUT,
			},
		);

		return response;
	} catch (error) {
		logger.error('Content search failed:', error);
		throw error;
	}
}

export default {
	searchCode,
	searchCommits,
	searchContent,
};

```

--------------------------------------------------------------------------------
/src/services/vendor.atlassian.repositories.types.ts:
--------------------------------------------------------------------------------

```typescript
import { z } from 'zod';

/**
 * Types for Atlassian Bitbucket Repositories API
 */

// Link href schema
const LinkSchema = z.object({
	href: z.string(),
	name: z.string().optional(),
});

/**
 * Repository SCM type
 */
export const RepositorySCMSchema = z.enum(['git', 'hg']);

/**
 * Repository fork policy
 */
export const RepositoryForkPolicySchema = z.enum([
	'allow_forks',
	'no_public_forks',
	'no_forks',
]);

/**
 * Repository links object
 */
export const RepositoryLinksSchema = z.object({
	self: LinkSchema.optional(),
	html: LinkSchema.optional(),
	avatar: LinkSchema.optional(),
	pullrequests: LinkSchema.optional(),
	commits: LinkSchema.optional(),
	forks: LinkSchema.optional(),
	watchers: LinkSchema.optional(),
	downloads: LinkSchema.optional(),
	clone: z.array(LinkSchema).optional(),
	hooks: LinkSchema.optional(),
	issues: LinkSchema.optional(),
});

/**
 * Repository owner links schema
 */
const OwnerLinksSchema = z.object({
	self: LinkSchema.optional(),
	html: LinkSchema.optional(),
	avatar: LinkSchema.optional(),
});

/**
 * Repository owner object
 */
export const RepositoryOwnerSchema = z.object({
	type: z.enum(['user', 'team']),
	username: z.string().optional(),
	display_name: z.string().optional(),
	uuid: z.string().optional(),
	links: OwnerLinksSchema.optional(),
});

/**
 * Repository branch object
 */
export const RepositoryBranchSchema = z.object({
	type: z.literal('branch'),
	name: z.string(),
});

/**
 * Repository project links schema
 */
const ProjectLinksSchema = z.object({
	self: LinkSchema.optional(),
	html: LinkSchema.optional(),
});

/**
 * Repository project object
 */
export const RepositoryProjectSchema = z.object({
	type: z.literal('project'),
	key: z.string(),
	uuid: z.string(),
	name: z.string(),
	links: ProjectLinksSchema.optional(),
});

/**
 * Repository object returned from the API
 */
export const RepositorySchema = z.object({
	type: z.literal('repository'),
	uuid: z.string(),
	full_name: z.string(),
	name: z.string(),
	description: z.string().optional(),
	is_private: z.boolean(),
	fork_policy: RepositoryForkPolicySchema.optional(),
	created_on: z.string().optional(),
	updated_on: z.string().optional(),
	size: z.number().optional(),
	language: z.string().optional(),
	has_issues: z.boolean().optional(),
	has_wiki: z.boolean().optional(),
	scm: RepositorySCMSchema,
	owner: RepositoryOwnerSchema,
	mainbranch: RepositoryBranchSchema.optional(),
	project: RepositoryProjectSchema.optional(),
	links: RepositoryLinksSchema,
});
export type Repository = z.infer<typeof RepositorySchema>;

/**
 * Parameters for listing repositories
 */
export const ListRepositoriesParamsSchema = z.object({
	workspace: z.string(),
	q: z.string().optional(),
	sort: z.string().optional(),
	page: z.number().optional(),
	pagelen: z.number().optional(),
	role: z.string().optional(),
});
export type ListRepositoriesParams = z.infer<
	typeof ListRepositoriesParamsSchema
>;

/**
 * Parameters for getting a repository by identifier
 */
export const GetRepositoryParamsSchema = z.object({
	workspace: z.string(),
	repo_slug: z.string(),
});
export type GetRepositoryParams = z.infer<typeof GetRepositoryParamsSchema>;

/**
 * API response for listing repositories
 */
export const RepositoriesResponseSchema = z.object({
	pagelen: z.number(),
	page: z.number(),
	size: z.number(),
	next: z.string().optional(),
	previous: z.string().optional(),
	values: z.array(RepositorySchema),
});
export type RepositoriesResponse = z.infer<typeof RepositoriesResponseSchema>;

// --- Commit History Types ---

/**
 * Parameters for listing commits.
 */
export const ListCommitsParamsSchema = z.object({
	workspace: z.string(),
	repo_slug: z.string(),
	include: z.string().optional(), // Branch, tag, or hash to include history from
	exclude: z.string().optional(), // Branch, tag, or hash to exclude history up to
	path: z.string().optional(), // File path to filter commits by
	page: z.number().optional(),
	pagelen: z.number().optional(),
});
export type ListCommitsParams = z.infer<typeof ListCommitsParamsSchema>;

/**
 * Commit author user links schema
 */
const CommitAuthorUserLinksSchema = z.object({
	self: LinkSchema.optional(),
	avatar: LinkSchema.optional(),
});

/**
 * Commit author user schema
 */
const CommitAuthorUserSchema = z.object({
	display_name: z.string().optional(),
	nickname: z.string().optional(),
	account_id: z.string().optional(),
	uuid: z.string().optional(),
	type: z.string(), // Usually 'user'
	links: CommitAuthorUserLinksSchema.optional(),
});

/**
 * Commit author schema
 */
export const CommitAuthorSchema = z.object({
	raw: z.string(),
	type: z.string(), // Usually 'author'
	user: CommitAuthorUserSchema.optional(),
});

/**
 * Commit links schema
 */
const CommitLinksSchema = z.object({
	self: LinkSchema.optional(),
	html: LinkSchema.optional(),
	diff: LinkSchema.optional(),
	approve: LinkSchema.optional(),
	comments: LinkSchema.optional(),
});

/**
 * Commit summary schema
 */
const CommitSummarySchema = z.object({
	raw: z.string().optional(),
	markup: z.string().optional(),
	html: z.string().optional(),
});

/**
 * Commit parent schema
 */
const CommitParentSchema = z.object({
	hash: z.string(),
	type: z.string(),
	links: z.unknown(),
});

/**
 * Represents a single commit in the history.
 */
export const CommitSchema = z.object({
	hash: z.string(),
	type: z.string(), // Usually 'commit'
	author: CommitAuthorSchema,
	date: z.string(), // ISO 8601 format date string
	message: z.string(),
	links: CommitLinksSchema,
	summary: CommitSummarySchema.optional(),
	parents: z.array(CommitParentSchema),
});
export type Commit = z.infer<typeof CommitSchema>;

/**
 * API response for listing commits (paginated).
 */
export const PaginatedCommitsSchema = z.object({
	pagelen: z.number(),
	page: z.number().optional(),
	size: z.number().optional(),
	next: z.string().optional(),
	previous: z.string().optional(),
	values: z.array(CommitSchema),
});
export type PaginatedCommits = z.infer<typeof PaginatedCommitsSchema>;

/**
 * Parameters for creating a branch.
 */
export const CreateBranchParamsSchema = z.object({
	workspace: z.string(),
	repo_slug: z.string(),
	name: z.string(), // New branch name
	target: z.object({
		hash: z.string(), // Source branch name or commit hash
	}),
});
export type CreateBranchParams = z.infer<typeof CreateBranchParamsSchema>;

/**
 * Response object when creating a branch.
 * Contains details about the newly created branch reference.
 */
export const BranchRefSchema = z.object({
	type: z.literal('branch'),
	name: z.string(),
	target: z.object({
		hash: z.string(),
		type: z.string(), // e.g., 'commit'
	}),
});
export type BranchRef = z.infer<typeof BranchRefSchema>;

/**
 * Parameters for getting a file's content from a repository.
 */
export const GetFileContentParamsSchema = z.object({
	workspace: z.string(),
	repo_slug: z.string(),
	commit: z.string(), // Branch name, tag, or commit hash
	path: z.string(), // File path within the repository
});
export type GetFileContentParams = z.infer<typeof GetFileContentParamsSchema>;

/**
 * Represents a branch target (usually a commit).
 */
export const BranchTargetSchema = z.object({
	hash: z.string(),
	type: z.string(), // Usually 'commit'
});

/**
 * Represents a branch in a Bitbucket repository.
 */
export const BranchSchema = z.object({
	name: z.string(),
	type: z.literal('branch'),
	target: BranchTargetSchema,
	merge_strategies: z.array(z.string()).optional(),
	default_merge_strategy: z.string().optional(),
	links: z.record(z.string(), z.unknown()).optional(),
});

/**
 * Parameters for listing branches in a repository.
 */
export const ListBranchesParamsSchema = z.object({
	workspace: z.string(),
	repo_slug: z.string(),
	page: z.number().optional(),
	pagelen: z.number().optional(),
	q: z.string().optional(), // Query for filtering branches
	sort: z.string().optional(), // Sort field
});
export type ListBranchesParams = z.infer<typeof ListBranchesParamsSchema>;

/**
 * API response for listing branches (paginated).
 */
export const BranchesResponseSchema = z.object({
	pagelen: z.number(),
	page: z.number().optional(),
	size: z.number().optional(),
	next: z.string().optional(),
	previous: z.string().optional(),
	values: z.array(BranchSchema),
});
export type BranchesResponse = z.infer<typeof BranchesResponseSchema>;

```

--------------------------------------------------------------------------------
/src/services/vendor.atlassian.pullrequests.types.ts:
--------------------------------------------------------------------------------

```typescript
/**
 * Types for Atlassian Bitbucket Pull Requests API
 */

import { Repository } from './vendor.atlassian.repositories.types.js';

/**
 * Pull request state
 */
export type PullRequestState = 'OPEN' | 'MERGED' | 'DECLINED' | 'SUPERSEDED';

/**
 * Pull request author or user reference
 */
export interface PullRequestUser {
	type: 'user' | 'team';
	uuid?: string;
	display_name?: string;
	nickname?: string;
	account_id?: string;
	links?: {
		self?: { href: string };
		html?: { href: string };
		avatar?: { href: string };
	};
}

/**
 * Content representation for rendering
 */
export interface ContentRepresentation {
	raw: string;
	markup: string;
	html: string;
}

/**
 * Rendered content fields
 */
export interface RenderedContent {
	title?: ContentRepresentation;
	description?: ContentRepresentation;
	reason?: ContentRepresentation;
}

/**
 * Pull request summary
 */
export interface PullRequestSummary {
	raw: string;
	markup: string;
	html: string;
}

/**
 * Pull request links object
 */
export interface PullRequestLinks {
	self?: { href: string; name?: string };
	html?: { href: string; name?: string };
	commits?: { href: string; name?: string };
	approve?: { href: string; name?: string };
	diff?: { href: string; name?: string };
	diffstat?: { href: string; name?: string };
	comments?: { href: string; name?: string };
	activity?: { href: string; name?: string };
	merge?: { href: string; name?: string };
	decline?: { href: string; name?: string };
}

/**
 * Pull request branch reference
 */
export interface PullRequestBranchRef {
	repository: Partial<Repository>;
	branch: {
		name: string;
		merge_strategies?: string[];
		default_merge_strategy?: string;
	};
	commit?: {
		hash: string;
	};
}

/**
 * Pull request object returned from the API
 */
export interface PullRequest {
	type: 'pullrequest';
	id: number;
	title: string;
	rendered?: RenderedContent;
	summary?: PullRequestSummary;
	state: PullRequestState;
	author: PullRequestUser;
	source: PullRequestBranchRef;
	destination: PullRequestBranchRef;
	merge_commit?: {
		hash: string;
	};
	comment_count?: number;
	task_count?: number;
	close_source_branch?: boolean;
	closed_by?: PullRequestUser;
	reason?: string;
	created_on: string;
	updated_on: string;
	reviewers?: PullRequestUser[];
	participants?: PullRequestUser[];
	links: PullRequestLinks;
}

/**
 * Extended pull request object with optional fields
 * @remarks Currently identical to PullRequest, but allows for future extension
 */
export type PullRequestDetailed = PullRequest;

/**
 * Parameters for listing pull requests
 */
export interface ListPullRequestsParams {
	workspace: string;
	repo_slug: string;
	state?: PullRequestState | PullRequestState[];
	q?: string;
	sort?: string;
	page?: number;
	pagelen?: number;
}

/**
 * Parameters for getting a pull request by ID
 */
export interface GetPullRequestParams {
	workspace: string;
	repo_slug: string;
	pull_request_id: number;
}

/**
 * API response for listing pull requests
 */
export interface PullRequestsResponse {
	pagelen: number;
	page: number;
	size: number;
	next?: string;
	previous?: string;
	values: PullRequest[];
}

/**
 * Parameters for getting pull request comments
 */
export interface GetPullRequestCommentsParams {
	/**
	 * The workspace slug or UUID
	 */
	workspace: string;

	/**
	 * The repository slug or UUID
	 */
	repo_slug: string;

	/**
	 * The pull request ID
	 */
	pull_request_id: number;

	/**
	 * Page number for pagination
	 */
	page?: number;

	/**
	 * Number of items per page
	 */
	pagelen?: number;

	/**
	 * Property to sort by (e.g., 'created_on', '-updated_on')
	 */
	sort?: string;
}

/**
 * Parameters for creating a comment to a pull request
 */
export interface CreatePullRequestCommentParams {
	/**
	 * The workspace slug or UUID
	 */
	workspace: string;

	/**
	 * The repository slug or UUID
	 */
	repo_slug: string;

	/**
	 * The pull request ID
	 */
	pull_request_id: number;

	/**
	 * The content of the comment
	 */
	content: {
		/**
		 * Raw comment text (can contain markdown)
		 */
		raw: string;
	};

	/**
	 * Optional inline comment location
	 */
	inline?: {
		/**
		 * The file path for the inline comment
		 */
		path: string;

		/**
		 * The line number in the file
		 */
		to?: number;
	};

	/**
	 * For threaded comments, ID of the parent comment
	 */
	parent?: {
		id: number;
	};
}

/**
 * Inline comment position information
 */
export interface InlineCommentPosition {
	/**
	 * The file path the comment is on
	 */
	path: string;

	/**
	 * The original file path if renamed/moved
	 */
	from_path?: string;

	/**
	 * Line number in the "from" file
	 */
	from?: number;

	/**
	 * Line number in the "to" file
	 */
	to?: number;
}

/**
 * Pull request comment object
 */
export interface PullRequestComment {
	/**
	 * Comment ID
	 */
	id: number;

	/**
	 * Comment content
	 */
	content: {
		raw: string;
		markup?: string;
		html?: string;
		type?: string;
	};

	/**
	 * User who created the comment
	 */
	user: PullRequestUser;

	/**
	 * When the comment was created
	 */
	created_on: string;

	/**
	 * When the comment was last updated
	 */
	updated_on: string;

	/**
	 * Whether the comment has been deleted
	 */
	deleted?: boolean;

	/**
	 * For inline comments, contains file and line information
	 */
	inline?: InlineCommentPosition;

	/**
	 * For threaded comments, ID of the parent comment
	 */
	parent?: {
		id: number;
	};

	/**
	 * Links related to this comment
	 */
	links?: {
		self?: { href: string };
		html?: { href: string };
		code?: { href: string };
	};

	/**
	 * Type of the object
	 */
	type: 'pullrequest_comment';
}

/**
 * API response for listing pull request comments
 */
export interface PullRequestCommentsResponse {
	/**
	 * Number of items per page
	 */
	pagelen: number;

	/**
	 * Current page number
	 */
	page: number;

	/**
	 * Total number of items
	 */
	size: number;

	/**
	 * URL for the next page, if available
	 */
	next?: string;

	/**
	 * URL for the previous page, if available
	 */
	previous?: string;

	/**
	 * Array of comment objects
	 */
	values: PullRequestComment[];

	/**
	 * Reference to the pull request these comments belong to
	 */
	pullrequest?: {
		id: number;
		title?: string;
	};
}

/**
 * Parameters for creating a pull request
 */
export interface CreatePullRequestParams {
	/**
	 * The workspace slug or UUID
	 */
	workspace: string;

	/**
	 * The repository slug or UUID
	 */
	repo_slug: string;

	/**
	 * Title of the pull request
	 */
	title: string;

	/**
	 * Source branch information
	 */
	source: {
		branch: {
			name: string;
		};
	};

	/**
	 * Destination branch information
	 */
	destination: {
		branch: {
			name: string;
		};
	};

	/**
	 * Optional description for the pull request
	 */
	description?: string;

	/**
	 * Whether to close the source branch after merge
	 */
	close_source_branch?: boolean;
}

/**
 * Diffstat response representing changes in a pull request
 */
export interface DiffstatResponse {
	pagelen?: number;
	values: DiffstatFileChange[];
	page?: number;
	size?: number;
}

/**
 * Individual file change in a diffstat
 */
export interface DiffstatFileChange {
	status: string;
	old?: {
		path: string;
		type?: string;
	};
	new?: {
		path: string;
		type?: string;
	};
	lines_added?: number;
	lines_removed?: number;
}

/**
 * Parameters for updating a pull request
 */
export interface UpdatePullRequestParams {
	/**
	 * The workspace slug or UUID
	 */
	workspace: string;

	/**
	 * The repository slug or UUID
	 */
	repo_slug: string;

	/**
	 * The pull request ID
	 */
	pull_request_id: number;

	/**
	 * Updated title of the pull request
	 */
	title?: string;

	/**
	 * Updated description for the pull request
	 */
	description?: string;
}

/**
 * Parameters for approving a pull request
 */
export interface ApprovePullRequestParams {
	/**
	 * The workspace slug or UUID
	 */
	workspace: string;

	/**
	 * The repository slug or UUID
	 */
	repo_slug: string;

	/**
	 * The pull request ID
	 */
	pull_request_id: number;
}

/**
 * Parameters for requesting changes on a pull request
 */
export interface RejectPullRequestParams {
	/**
	 * The workspace slug or UUID
	 */
	workspace: string;

	/**
	 * The repository slug or UUID
	 */
	repo_slug: string;

	/**
	 * The pull request ID
	 */
	pull_request_id: number;
}

/**
 * Pull request participant representing approval/rejection status
 */
export interface PullRequestParticipant {
	/**
	 * Type of the object
	 */
	type: 'participant';

	/**
	 * User information
	 */
	user: PullRequestUser;

	/**
	 * Participant role
	 */
	role: 'PARTICIPANT' | 'REVIEWER';

	/**
	 * Whether the participant has approved the PR
	 */
	approved: boolean;

	/**
	 * Participant state
	 */
	state: 'approved' | 'changes_requested' | null;

	/**
	 * When the participant last participated
	 */
	participated_on: string;
}

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.repositories.list.controller.ts:
--------------------------------------------------------------------------------

```typescript
import atlassianRepositoriesService from '../services/vendor.atlassian.repositories.service.js';
import { Logger } from '../utils/logger.util.js';
import { handleControllerError } from '../utils/error-handler.util.js';
import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js';
import {
	extractPaginationInfo,
	PaginationType,
} from '../utils/pagination.util.js';
import { formatPagination } from '../utils/formatter.util.js';
import { ControllerResponse } from '../types/common.types.js';
import { ListRepositoriesToolArgsType } from '../tools/atlassian.repositories.types.js';
import { formatRepositoriesList } from './atlassian.repositories.formatter.js';
import { ListRepositoriesParams } from '../services/vendor.atlassian.repositories.types.js';
import { getDefaultWorkspace } from '../utils/workspace.util.js';
import { formatBitbucketQuery } from '../utils/query.util.js';

// Create a contextualized logger for this file
const logger = Logger.forContext(
	'controllers/atlassian.repositories.list.controller.ts',
);

/**
 * Lists repositories for a specific workspace with pagination and filtering options
 * @param options - Options for listing repositories including workspaceSlug
 * @returns Formatted list of repositories with pagination information
 */
export async function handleRepositoriesList(
	options: ListRepositoriesToolArgsType,
): Promise<ControllerResponse> {
	const methodLogger = logger.forMethod('handleRepositoriesList');
	methodLogger.debug('Listing Bitbucket repositories...', options);

	try {
		// Get workspace slug from options or default
		let workspaceSlug = options.workspaceSlug;
		if (!workspaceSlug) {
			methodLogger.debug(
				'No workspace slug provided, using default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();

			if (!defaultWorkspace) {
				throw new Error(
					'No workspace slug provided and no default workspace could be determined. Please provide a workspace slug or configure a default workspace.',
				);
			}

			workspaceSlug = defaultWorkspace;
			methodLogger.debug(`Using default workspace: ${workspaceSlug}`);
		}

		// Create defaults object with proper typing
		const defaults: Partial<ListRepositoriesToolArgsType> = {
			limit: DEFAULT_PAGE_SIZE,
			sort: '-updated_on',
		};

		// Apply defaults
		const mergedOptions = applyDefaults<ListRepositoriesToolArgsType>(
			{ ...options, workspaceSlug },
			defaults,
		);

		// Format the query for Bitbucket API if provided
		// Combine query and projectKey if both are present
		const queryParts: string[] = [];
		if (mergedOptions.query) {
			// Assuming formatBitbucketQuery handles basic name/description search
			queryParts.push(formatBitbucketQuery(mergedOptions.query));
		}
		if (mergedOptions.projectKey) {
			queryParts.push(`project.key = "${mergedOptions.projectKey}"`);
		}
		const combinedQuery = queryParts.join(' AND '); // Combine with AND

		if (combinedQuery) {
			logger.info(`Searching repositories with query: ${combinedQuery}`);
		}

		// Map controller options to service parameters
		const serviceParams: ListRepositoriesParams = {
			// Required workspace
			workspace: workspaceSlug,
			// Handle limit with default value
			pagelen: mergedOptions.limit,
			// Map cursor to page for page-based pagination
			page: mergedOptions.cursor
				? parseInt(mergedOptions.cursor, 10)
				: undefined,
			// Set default sort to updated_on descending if not specified
			sort: mergedOptions.sort,
			// Optional filter parameters
			...(combinedQuery && { q: combinedQuery }), // <-- Use combined query
			...(mergedOptions.role && { role: mergedOptions.role }),
		};

		methodLogger.debug('Using service parameters:', serviceParams);

		const repositoriesData =
			await atlassianRepositoriesService.list(serviceParams);
		// Log only the count of repositories returned instead of the entire response
		methodLogger.debug(
			`Retrieved ${repositoriesData.values?.length || 0} repositories`,
		);

		// Post-filter by project key if provided and Bitbucket API returned extra results
		if (mergedOptions.projectKey && repositoriesData.values) {
			const originalCount = repositoriesData.values.length;

			// Only keep repositories with exact project key match
			// NOTE: This filtering is done client-side since Bitbucket API doesn't directly support
			// filtering by project key in its query parameters. This means all repositories are first
			// fetched and then filtered locally, which may result in fewer results than expected
			// if the limit parameter is also used.
			repositoriesData.values = repositoriesData.values.filter(
				(repo) => repo.project?.key === mergedOptions.projectKey,
			);

			const filteredCount = repositoriesData.values.length;

			// Log filtering results to help with debugging
			if (filteredCount !== originalCount) {
				methodLogger.debug(
					`Post-filtered repositories by projectKey=${mergedOptions.projectKey}: ${filteredCount} of ${originalCount} matched.`,
				);

				// Adjust the size to reflect the actual filtered count (matters for pagination)
				if (repositoriesData.size) {
					// Adjust total size proportionally based on how many were filtered out
					const filterRatio = filteredCount / originalCount;
					const estimatedTotalSize = Math.ceil(
						repositoriesData.size * filterRatio,
					);
					repositoriesData.size = Math.max(
						filteredCount,
						estimatedTotalSize,
					);

					methodLogger.debug(
						`Adjusted size from ${repositoriesData.size} to ${estimatedTotalSize} based on filtering ratio`,
					);
				}

				// If this is the first page and we have fewer results than requested, try to fetch more
				if (
					filteredCount <
						(serviceParams.pagelen || DEFAULT_PAGE_SIZE) &&
					repositoriesData.next
				) {
					methodLogger.debug(
						`After filtering, only ${filteredCount} items remain. Fetching more pages to supplement...`,
					);

					// Keep fetching next pages until we have enough items or no more pages
					let nextPageUrl: string | undefined = repositoriesData.next;
					let totalItemsNeeded =
						(serviceParams.pagelen || DEFAULT_PAGE_SIZE) -
						filteredCount;

					while (nextPageUrl && totalItemsNeeded > 0) {
						try {
							// Extract the next page number
							let nextPage: number | undefined;
							try {
								const nextUrl = new URL(nextPageUrl);
								const pageParam =
									nextUrl.searchParams.get('page');
								if (pageParam) {
									nextPage = parseInt(pageParam, 10);
								}
							} catch (e) {
								methodLogger.warn(
									`Could not extract next page from URL: ${nextPageUrl}`,
									e,
								);
								break;
							}

							if (!nextPage) break;

							// Fetch the next page
							const nextPageParams = {
								...serviceParams,
								page: nextPage,
							};

							const nextPageData =
								await atlassianRepositoriesService.list(
									nextPageParams,
								);

							// Filter the next page results
							if (nextPageData.values) {
								const nextPageFiltered =
									nextPageData.values.filter(
										(repo) =>
											repo.project?.key ===
											mergedOptions.projectKey,
									);

								// Add items to reach the requested limit
								const itemsToAdd = nextPageFiltered.slice(
									0,
									totalItemsNeeded,
								);

								if (itemsToAdd.length > 0) {
									repositoriesData.values = [
										...repositoriesData.values,
										...itemsToAdd,
									];

									totalItemsNeeded -= itemsToAdd.length;

									methodLogger.debug(
										`Added ${itemsToAdd.length} items from page ${nextPage} to reach requested limit. ${totalItemsNeeded} more needed.`,
									);
								}

								// Update next page URL for the loop
								nextPageUrl = nextPageData.next || undefined;

								// If we've fetched all filtered items from this page but there are more pages
								// and we still need more items, continue to the next page
								if (
									nextPageFiltered.length <=
										itemsToAdd.length &&
									totalItemsNeeded > 0
								) {
									continue;
								}

								// If we got all the items we need, update pagination accordingly
								if (totalItemsNeeded <= 0) {
									// We have enough items now, but there are more available
									if (nextPageData.next) {
										repositoriesData.next =
											nextPageData.next;
									}
									break;
								}
							} else {
								// No values in the response, stop fetching
								nextPageUrl = undefined;
							}
						} catch (fetchError) {
							// Log the error but continue with what we have
							methodLogger.warn(
								`Error fetching page to supplement filtered results:`,
								fetchError,
							);
							break;
						}
					}
				}
			}
		}

		// Extract pagination information using the utility
		const pagination = extractPaginationInfo(
			repositoriesData,
			PaginationType.PAGE,
		);

		// Format the repositories data for display using the formatter
		const formattedRepositories = formatRepositoriesList(repositoriesData);

		// Create the final content by combining the formatted repositories with pagination information
		let finalContent = formattedRepositories;

		// Add pagination information if available
		if (
			pagination &&
			(pagination.hasMore || pagination.count !== undefined)
		) {
			const paginationString = formatPagination(pagination);
			finalContent += '\n\n' + paginationString;
		}

		return {
			content: finalContent,
		};
	} catch (error) {
		// Use the standardized error handler
		throw handleControllerError(error, {
			entityType: 'Repositories',
			operation: 'listing',
			source: 'controllers/atlassian.repositories.list.controller.ts@handleRepositoriesList',
			additionalInfo: { options },
		});
	}
}

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.diff.controller.ts:
--------------------------------------------------------------------------------

```typescript
import { Logger } from '../utils/logger.util.js';
import { handleControllerError } from '../utils/error-handler.util.js';
import { DEFAULT_PAGE_SIZE, applyDefaults } from '../utils/defaults.util.js';
import { ControllerResponse } from '../types/common.types.js';
import {
	extractPaginationInfo,
	PaginationType,
} from '../utils/pagination.util.js';
import { formatPagination } from '../utils/formatter.util.js';
import * as diffService from '../services/vendor.atlassian.repositories.diff.service.js';
import { formatDiffstat, formatFullDiff } from './atlassian.diff.formatter.js';
import { getDefaultWorkspace } from '../utils/workspace.util.js';

const controllerLogger = Logger.forContext(
	'controllers/atlassian.diff.controller.ts',
);
controllerLogger.debug('Bitbucket diff controller initialized');

/**
 * Base interface that extends Record<string, unknown> for error handling compatibility
 */
interface BaseDiffOptions extends Record<string, unknown> {
	workspaceSlug?: string;
	repoSlug: string;
	includeFullDiff?: boolean;
	limit?: number;
	cursor?: number;
	topic?: boolean;
}

/**
 * Interface for branch diff options
 */
interface BranchDiffOptions extends BaseDiffOptions {
	sourceBranch: string;
	destinationBranch?: string;
}

/**
 * Interface for commit diff options
 */
interface CommitDiffOptions extends BaseDiffOptions {
	sinceCommit: string;
	untilCommit: string;
}

/**
 * Compare two branches and return the differences
 *
 * @param options - Options for branch comparison
 * @returns Promise with formatted diff content and pagination
 */
async function branchDiff(
	options: BranchDiffOptions,
): Promise<ControllerResponse> {
	const methodLogger = controllerLogger.forMethod('branchDiff');

	try {
		methodLogger.debug('Comparing branches', options);

		// Apply defaults
		const defaults = {
			limit: DEFAULT_PAGE_SIZE,
			includeFullDiff: true,
			destinationBranch: 'main', // Default to main if not provided
			topic: false, // Default to topic=false which shows all changes between branches
		};

		// Explicitly cast the result of applyDefaults to preserve the original types
		const params = applyDefaults(options, defaults) as typeof options &
			typeof defaults;

		// Handle optional workspaceSlug
		if (!params.workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'Could not determine a default workspace. Please provide a workspaceSlug.',
				);
			}
			params.workspaceSlug = defaultWorkspace;
			methodLogger.debug(
				`Using default workspace: ${params.workspaceSlug}`,
			);
		}

		// Construct the spec (e.g., "main..feature")
		// NOTE: Bitbucket API expects the destination branch first, then the source branch
		// This is the opposite of what some Git tools use (e.g., git diff source..destination)
		// The diff shows changes that would need to be applied to destination to match source
		//
		// IMPORTANT: This behavior is counterintuitive in two ways:
		// 1. The parameter names "sourceBranch" and "destinationBranch" suggest a certain direction,
		//    but the output is displayed as "destinationBranch → sourceBranch"
		// 2. When comparing branches with newer content in the feature branch (source), full diffs
		//    might only show when using parameters in one order, and only summaries in the other order
		//
		// We document this behavior clearly in the CLI and Tool interfaces
		const spec = `${params.destinationBranch}..${params.sourceBranch}`;

		methodLogger.debug(`Using diff spec: ${spec}`);

		try {
			// Fetch diffstat for the branches
			const diffstat = await diffService.getDiffstat({
				workspace: params.workspaceSlug,
				repo_slug: params.repoSlug,
				spec,
				pagelen: params.limit,
				cursor: params.cursor,
				topic: params.topic,
			});

			// Extract pagination info
			const pagination = extractPaginationInfo(
				diffstat,
				PaginationType.PAGE,
			);

			// Fetch full diff if requested
			let rawDiff: string | null = null;
			if (params.includeFullDiff) {
				rawDiff = await diffService.getRawDiff({
					workspace: params.workspaceSlug,
					repo_slug: params.repoSlug,
					spec,
				});
			}

			// Format the results
			let content =
				params.includeFullDiff && rawDiff
					? formatFullDiff(
							diffstat,
							rawDiff,
							params.destinationBranch,
							params.sourceBranch,
						)
					: formatDiffstat(
							diffstat,
							params.destinationBranch,
							params.sourceBranch,
						);

			// Add pagination information if available
			if (
				pagination &&
				(pagination.hasMore || pagination.count !== undefined)
			) {
				const paginationString = formatPagination(pagination);
				content += '\n\n' + paginationString;
			}

			return {
				content,
			};
		} catch (error) {
			// Enhance error handling for common diff-specific errors
			if (
				error instanceof Error &&
				error.message.includes(
					'source or destination could not be found',
				)
			) {
				// Create a more user-friendly error message
				throw new Error(
					`Unable to generate diff between '${params.sourceBranch}' and '${params.destinationBranch}'. ` +
						`One or both of these branches may not exist in the repository. ` +
						`Please verify both branch names and ensure you have access to view them.`,
				);
			}
			// Re-throw other errors to be handled by the outer catch block
			throw error;
		}
	} catch (error) {
		throw handleControllerError(error, {
			entityType: 'Branch Diff',
			operation: 'comparing branches',
			source: 'controllers/atlassian.diff.controller.ts@branchDiff',
			additionalInfo: options,
		});
	}
}

/**
 * Compare two commits and return the differences
 *
 * @param options - Options for commit comparison
 * @returns Promise with formatted diff content and pagination
 */
async function commitDiff(
	options: CommitDiffOptions,
): Promise<ControllerResponse> {
	const methodLogger = controllerLogger.forMethod('commitDiff');

	try {
		methodLogger.debug('Comparing commits', options);

		// Apply defaults
		const defaults = {
			limit: DEFAULT_PAGE_SIZE,
			includeFullDiff: true,
			topic: false, // Default to topic=false which shows all changes between commits
		};

		// Explicitly cast the result of applyDefaults to preserve the original types
		const params = applyDefaults(options, defaults) as typeof options &
			typeof defaults;

		// Handle optional workspaceSlug
		if (!params.workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'Could not determine a default workspace. Please provide a workspaceSlug.',
				);
			}
			params.workspaceSlug = defaultWorkspace;
			methodLogger.debug(
				`Using default workspace: ${params.workspaceSlug}`,
			);
		}

		// Construct the spec (e.g., "a1b2c3d..e4f5g6h")
		// NOTE: Bitbucket API expects the base/since commit first, then the target/until commit
		// The diff shows changes that would need to be applied to base to match target
		//
		// IMPORTANT: The parameter names are counterintuitive to how they must be used:
		// 1. For proper results with full code changes, sinceCommit should be the NEWER commit,
		//    and untilCommit should be the OLDER commit (reverse chronological order)
		// 2. If used with chronological order (older → newer), the result may show "No changes detected"
		//
		// We document this behavior clearly in the CLI and Tool interfaces
		const spec = `${params.sinceCommit}..${params.untilCommit}`;

		methodLogger.debug(`Using diff spec: ${spec}`);

		try {
			// Fetch diffstat for the commits
			const diffstat = await diffService.getDiffstat({
				workspace: params.workspaceSlug,
				repo_slug: params.repoSlug,
				spec,
				pagelen: params.limit,
				cursor: params.cursor,
			});

			// Extract pagination info
			const pagination = extractPaginationInfo(
				diffstat,
				PaginationType.PAGE,
			);

			// Fetch full diff if requested
			let rawDiff: string | null = null;
			if (params.includeFullDiff) {
				rawDiff = await diffService.getRawDiff({
					workspace: params.workspaceSlug,
					repo_slug: params.repoSlug,
					spec,
				});
			}

			// Format the results
			let content =
				params.includeFullDiff && rawDiff
					? formatFullDiff(
							diffstat,
							rawDiff,
							params.sinceCommit,
							params.untilCommit,
						)
					: formatDiffstat(
							diffstat,
							params.sinceCommit,
							params.untilCommit,
						);

			// Add pagination information if available
			if (
				pagination &&
				(pagination.hasMore || pagination.count !== undefined)
			) {
				const paginationString = formatPagination(pagination);
				content += '\n\n' + paginationString;
			}

			return {
				content,
			};
		} catch (error) {
			// Enhance error handling for common diff-specific errors
			if (
				error instanceof Error &&
				error.message.includes(
					'source or destination could not be found',
				)
			) {
				// Create a more user-friendly error message
				throw new Error(
					`Unable to generate diff between commits '${params.sinceCommit}' and '${params.untilCommit}'. ` +
						`One or both of these commits may not exist in the repository or may be in the wrong order. ` +
						`Please verify both commit hashes and ensure you have access to view them.`,
				);
			}
			// Re-throw other errors to be handled by the outer catch block
			throw error;
		}
	} catch (error) {
		throw handleControllerError(error, {
			entityType: 'Commit Diff',
			operation: 'comparing commits',
			source: 'controllers/atlassian.diff.controller.ts@commitDiff',
			additionalInfo: options,
		});
	}
}

export default { branchDiff, commitDiff };

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.search.controller.test.ts:
--------------------------------------------------------------------------------

```typescript
import atlassianSearchController from './atlassian.search.controller.js';
import { getAtlassianCredentials } from '../utils/transport.util.js';
import { config } from '../utils/config.util.js';
import { handleRepositoriesList } from './atlassian.repositories.list.controller.js';
import atlassianWorkspacesController from './atlassian.workspaces.controller.js';

describe('Atlassian Search Controller', () => {
	// Load configuration and check for credentials before all tests
	beforeAll(() => {
		config.load(); // Ensure config is loaded
		const credentials = getAtlassianCredentials();
		if (!credentials) {
			console.warn(
				'Skipping Atlassian Search Controller tests: No credentials available',
			);
		}
	});

	// Helper function to skip tests when credentials are missing
	const skipIfNoCredentials = () => !getAtlassianCredentials();

	// Helper to get valid repository information for testing
	async function getRepositoryInfo(): Promise<{
		workspaceSlug: string;
		repoSlug: string;
	} | null> {
		if (skipIfNoCredentials()) return null;

		try {
			// First get a workspace
			const workspacesResult = await atlassianWorkspacesController.list({
				limit: 1,
			});

			if (workspacesResult.content === 'No Bitbucket workspaces found.') {
				return null;
			}

			// Extract workspace slug
			const workspaceMatch = workspacesResult.content.match(
				/\*\*Slug\*\*:\s+([^\s\n]+)/,
			);
			const workspaceSlug = workspaceMatch ? workspaceMatch[1] : null;

			if (!workspaceSlug) return null;

			// Get a repository from this workspace
			const reposResult = await handleRepositoriesList({
				workspaceSlug,
				limit: 1,
			});

			if (
				reposResult.content ===
				'No repositories found in this workspace.'
			) {
				return null;
			}

			// Extract repo slug
			const repoSlugMatch = reposResult.content.match(
				/\*\*Slug\*\*:\s+([^\s\n]+)/,
			);
			const repoSlug = repoSlugMatch ? repoSlugMatch[1] : null;

			if (!repoSlug) return null;

			return { workspaceSlug, repoSlug };
		} catch (error) {
			console.warn(
				'Could not fetch repository info for search tests:',
				error,
			);
			return null;
		}
	}

	describe('search', () => {
		it('should search across all scopes when type=code', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			const result = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				repo: repoInfo.repoSlug,
				type: 'code',
				query: 'initial commit',
			});

			// Verify the response structure
			expect(result).toHaveProperty('content');
			expect(typeof result.content).toBe('string');

			// Should include code search results header
			expect(result.content).toContain('Code Search Results');
		}, 30000);

		it('should search only repositories when type=repositories', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			const result = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				type: 'repositories',
				query: repoInfo.repoSlug,
			});

			// Verify the response structure
			expect(result).toHaveProperty('content');
			expect(typeof result.content).toBe('string');

			// Should include only repository section
			expect(result.content).toContain('Repository Search Results');
			expect(result.content).not.toContain('Pull Request Search Results');
		}, 30000);

		it('should search only pull requests when type=pullrequests', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			const result = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				repo: repoInfo.repoSlug,
				type: 'pullrequests',
				query: 'test',
			});

			// Verify the response structure
			expect(result).toHaveProperty('content');
			expect(typeof result.content).toBe('string');

			// Should include only PR section
			expect(result.content).not.toContain('Repository Search Results');
			expect(result.content).toContain('Pull Request Search Results');
		}, 30000);

		it('should filter results with query parameter', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			// Use a query that might match something (repository name itself often works)
			const result = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				query: repoInfo.repoSlug,
				type: 'repositories',
			});

			// Verify the response structure
			expect(result).toHaveProperty('content');
			expect(typeof result.content).toBe('string');

			// If results are found, content should include the query term
			const resultsFound = !result.content.includes('No results found');
			if (resultsFound) {
				expect(result.content.toLowerCase()).toContain(
					repoInfo.repoSlug.toLowerCase(),
				);
			}
		}, 30000);

		it('should handle pagination options (limit/cursor)', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			// Fetch first page with limit 1
			const result1 = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				type: 'repositories',
				limit: 1,
				query: repoInfo.repoSlug,
			});

			// Extract pagination information from content
			const hasMoreResults = result1.content.includes(
				'More results are available.',
			);
			const cursorMatch = result1.content.match(
				/\*Next cursor: `([^`]+)`\*/,
			);
			const nextCursor = cursorMatch ? cursorMatch[1] : null;

			// If pagination is possible, test cursor-based pagination
			if (hasMoreResults && nextCursor) {
				const result2 = await atlassianSearchController.search({
					workspace: repoInfo.workspaceSlug,
					type: 'repositories',
					limit: 1,
					cursor: nextCursor,
					query: repoInfo.repoSlug,
				});

				// Both responses should have proper structure
				expect(result2).toHaveProperty('content');

				// The content should be different
				expect(result1.content).not.toEqual(result2.content);
			} else {
				console.warn(
					'Skipping cursor part of pagination test: Either no second page available or no items found.',
				);
			}
		}, 30000);

		it('should give an error when workspace is missing or empty', async () => {
			if (skipIfNoCredentials()) return;

			// Empty workspace should return an error message
			const result = await atlassianSearchController.search({
				type: 'repositories',
				workspace: '', // Empty workspace should trigger error
				query: 'test',
			});

			// Verify the response structure
			expect(result).toHaveProperty('content');
			expect(typeof result.content).toBe('string');

			// Content should include error message
			expect(result.content).toContain('Error:');
			expect(result.content).toContain('workspace');
		}, 30000);

		it('should work without a repo when type=repositories', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			// Should not throw an error when repo is missing but type is repositories
			const result = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				type: 'repositories',
				query: repoInfo.repoSlug,
			});

			// Verify the response structure
			expect(result).toHaveProperty('content');
			expect(typeof result.content).toBe('string');
		}, 30000);

		it('should require repo when type=pullrequests', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			// When searching pull requests without a repo, should return an error message
			const result = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				type: 'pullrequests',
				query: 'test',
				// Intentionally omit repo
			});

			// Content should include an error message
			expect(result.content).toContain('Error:');
			expect(result.content).toContain('required');
		}, 30000);

		it('should handle no results scenario', async () => {
			if (skipIfNoCredentials()) return;

			const repoInfo = await getRepositoryInfo();
			if (!repoInfo) {
				return; // Skip silently - no repository info available for testing
			}

			// Use a query string that will definitely not match anything
			const noMatchQuery = 'xzqwxtrv12345xyz987nonexistentstring';

			const result = await atlassianSearchController.search({
				workspace: repoInfo.workspaceSlug,
				query: noMatchQuery,
				type: 'code',
				repo: repoInfo.repoSlug,
			});

			// Verify the response structure
			expect(result).toHaveProperty('content');
			expect(typeof result.content).toBe('string');

			// Content should show no results
			expect(result.content).toContain('No code matches found');
		}, 30000);

		it('should handle errors for invalid workspace', async () => {
			if (skipIfNoCredentials()) return;

			const invalidWorkspace =
				'this-workspace-definitely-does-not-exist-12345';

			// Expect the controller call to reject when underlying controllers fail
			await expect(
				atlassianSearchController.search({
					workspace: invalidWorkspace,
					type: 'repositories',
					query: 'test-query', // Add a query to avoid the query validation error
				}),
			).rejects.toThrow();
		}, 30000);
	});
});

```

--------------------------------------------------------------------------------
/src/controllers/atlassian.repositories.content.controller.ts:
--------------------------------------------------------------------------------

```typescript
import atlassianRepositoriesService from '../services/vendor.atlassian.repositories.service.js';
import { Logger } from '../utils/logger.util.js';
import { handleControllerError } from '../utils/error-handler.util.js';
import { ControllerResponse } from '../types/common.types.js';
import { CloneRepositoryToolArgsType } from '../tools/atlassian.repositories.types.js';
import { getDefaultWorkspace } from '../utils/workspace.util.js';
import { executeShellCommand } from '../utils/shell.util.js';
import * as path from 'path';
import * as fs from 'fs/promises';
import { constants } from 'fs';

// Logger instance for this module
const logger = Logger.forContext(
	'controllers/atlassian.repositories.content.controller.ts',
);

/**
 * Clones a Bitbucket repository to the local filesystem
 * @param options Options including repository identifiers and target path
 * @returns Information about the cloned repository
 */
export async function handleCloneRepository(
	options: CloneRepositoryToolArgsType,
): Promise<ControllerResponse> {
	const methodLogger = logger.forMethod('handleCloneRepository');
	methodLogger.debug('Cloning repository with options:', options);

	try {
		// Handle optional workspaceSlug
		let { workspaceSlug } = options;
		if (!workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'No default workspace found. Please provide a workspace slug.',
				);
			}
			workspaceSlug = defaultWorkspace;
			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
		}

		// Required parameters check
		const { repoSlug, targetPath } = options;
		if (!repoSlug) {
			throw new Error('Repository slug is required');
		}
		if (!targetPath) {
			throw new Error('Target path is required');
		}

		// Normalize and resolve the target path
		// If it's a relative path, convert it to absolute based on current working directory
		const processedTargetPath = path.isAbsolute(targetPath)
			? targetPath
			: path.resolve(process.cwd(), targetPath);

		methodLogger.debug(
			`Normalized target path: ${processedTargetPath} (original: ${targetPath})`,
		);

		// Validate directory access and permissions before proceeding
		try {
			// Check if target directory exists
			try {
				await fs.access(processedTargetPath, constants.F_OK);
				methodLogger.debug(
					`Target directory exists: ${processedTargetPath}`,
				);

				// If it exists, check if we have write permission
				try {
					await fs.access(processedTargetPath, constants.W_OK);
					methodLogger.debug(
						`Have write permission to: ${processedTargetPath}`,
					);
				} catch {
					throw new Error(
						`Permission denied: You don't have write access to the target directory: ${processedTargetPath}`,
					);
				}
			} catch {
				// Directory doesn't exist, try to create it
				methodLogger.debug(
					`Target directory doesn't exist, creating: ${processedTargetPath}`,
				);
				try {
					await fs.mkdir(processedTargetPath, { recursive: true });
					methodLogger.debug(
						`Successfully created directory: ${processedTargetPath}`,
					);
				} catch (mkdirError) {
					throw new Error(
						`Failed to create target directory ${processedTargetPath}: ${(mkdirError as Error).message}. Please ensure you have write permissions to the parent directory.`,
					);
				}
			}
		} catch (accessError) {
			methodLogger.error('Path access error:', accessError);
			throw accessError;
		}

		// Get repository details to determine clone URL
		methodLogger.debug(
			`Getting repository details for ${workspaceSlug}/${repoSlug}`,
		);
		const repoDetails = await atlassianRepositoriesService.get({
			workspace: workspaceSlug,
			repo_slug: repoSlug,
		});

		// Find SSH clone URL (preferred) or fall back to HTTPS
		let cloneUrl: string | undefined;
		let cloneProtocol: string = 'SSH'; // Default to SSH

		if (repoDetails.links?.clone) {
			// First try to find SSH clone URL
			const sshClone = repoDetails.links.clone.find(
				(link) => link.name === 'ssh',
			);

			if (sshClone) {
				cloneUrl = sshClone.href;
			} else {
				// Fall back to HTTPS if SSH is not available
				const httpsClone = repoDetails.links.clone.find(
					(link) => link.name === 'https',
				);

				if (httpsClone) {
					cloneUrl = httpsClone.href;
					cloneProtocol = 'HTTPS';
					methodLogger.warn(
						'SSH clone URL not found, falling back to HTTPS',
					);
				}
			}
		}

		if (!cloneUrl) {
			throw new Error(
				'Could not find a valid clone URL for the repository',
			);
		}

		// Determine full target directory path
		// Clone into a subdirectory named after the repo slug
		const targetDir = path.join(processedTargetPath, repoSlug);
		methodLogger.debug(`Will clone to: ${targetDir}`);

		// Check if directory already exists
		try {
			const stats = await fs.stat(targetDir);
			if (stats.isDirectory()) {
				methodLogger.warn(
					`Target directory already exists: ${targetDir}`,
				);
				return {
					content: `⚠️ Target directory \`${targetDir}\` already exists. Please choose a different target path or remove the existing directory.`,
				};
			}
		} catch {
			// Error means directory doesn't exist, which is what we want
			methodLogger.debug(
				`Target directory doesn't exist, proceeding with clone`,
			);
		}

		// Execute git clone command
		methodLogger.debug(`Cloning from URL (${cloneProtocol}): ${cloneUrl}`);
		const command = `git clone ${cloneUrl} "${targetDir}"`;

		try {
			const result = await executeShellCommand(
				command,
				'cloning repository',
			);

			// Return success message with more detailed information
			return {
				content:
					`✅ Successfully cloned repository \`${workspaceSlug}/${repoSlug}\` to \`${targetDir}\` using ${cloneProtocol}.\n\n` +
					`**Details:**\n` +
					`- **Repository**: ${workspaceSlug}/${repoSlug}\n` +
					`- **Clone Protocol**: ${cloneProtocol}\n` +
					`- **Target Location**: ${targetDir}\n\n` +
					`**Output:**\n\`\`\`\n${result}\n\`\`\`\n\n` +
					`**Note**: If this is your first time cloning with SSH, ensure your SSH keys are set up correctly.`,
			};
		} catch (cloneError) {
			// Enhanced error message with troubleshooting steps
			const errorMsg = `Failed to clone repository: ${(cloneError as Error).message}`;
			let troubleshooting = '';

			if (cloneProtocol === 'SSH') {
				troubleshooting =
					`\n\n**Troubleshooting SSH Clone Issues:**\n` +
					`1. Ensure you have SSH keys set up with Bitbucket\n` +
					`2. Check if your SSH agent is running: \`eval "$(ssh-agent -s)"; ssh-add\`\n` +
					`3. Verify connectivity: \`ssh -T [email protected]\`\n` +
					`4. Try using HTTPS instead (modify your tool call with a different repository URL)`;
			} else {
				troubleshooting =
					`\n\n**Troubleshooting HTTPS Clone Issues:**\n` +
					`1. Check your Bitbucket credentials\n` +
					`2. Ensure the target directory is writable\n` +
					`3. Try running the command manually to see detailed errors`;
			}

			throw new Error(errorMsg + troubleshooting);
		}
	} catch (error) {
		throw handleControllerError(error, {
			entityType: 'Repository',
			operation: 'clone',
			source: 'controllers/atlassian.repositories.content.controller.ts@handleCloneRepository',
			additionalInfo: options,
		});
	}
}

/**
 * Retrieves file content from a repository
 * @param options Options including repository identifiers and file path
 * @returns The file content as text
 */
export async function handleGetFileContent(options: {
	workspaceSlug?: string;
	repoSlug: string;
	path: string;
	ref?: string;
}): Promise<ControllerResponse> {
	const methodLogger = logger.forMethod('handleGetFileContent');
	methodLogger.debug('Getting file content with options:', options);

	try {
		// Required parameters check
		const { repoSlug, path: filePath } = options;
		let { workspaceSlug } = options;

		if (!workspaceSlug) {
			methodLogger.debug(
				'No workspace provided, fetching default workspace',
			);
			const defaultWorkspace = await getDefaultWorkspace();
			if (!defaultWorkspace) {
				throw new Error(
					'No default workspace found. Please provide a workspace slug.',
				);
			}
			workspaceSlug = defaultWorkspace;
			methodLogger.debug(`Using default workspace: ${defaultWorkspace}`);
		}

		if (!repoSlug) {
			throw new Error('Repository slug is required');
		}
		if (!filePath) {
			throw new Error('File path is required');
		}

		// Get repository details to determine the correct default branch
		let commitRef = options.ref;
		if (!commitRef) {
			methodLogger.debug(
				`No ref provided, fetching repository details to get default branch`,
			);
			try {
				const repoDetails = await atlassianRepositoriesService.get({
					workspace: workspaceSlug,
					repo_slug: repoSlug,
				});

				// Use the repository's actual default branch
				if (repoDetails.mainbranch?.name) {
					commitRef = repoDetails.mainbranch.name;
					methodLogger.debug(
						`Using repository default branch: ${commitRef}`,
					);
				} else {
					// Fallback to common default branches
					commitRef = 'main';
					methodLogger.debug(
						`No default branch found, falling back to: ${commitRef}`,
					);
				}
			} catch (repoError) {
				methodLogger.warn(
					'Failed to get repository details, using fallback branch',
					repoError,
				);
				commitRef = 'main';
			}
		}

		// Get file content from service
		methodLogger.debug(
			`Fetching file content for ${workspaceSlug}/${repoSlug}/${filePath}`,
			{ ref: commitRef },
		);
		const fileContent = await atlassianRepositoriesService.getFileContent({
			workspace: workspaceSlug,
			repo_slug: repoSlug,
			path: filePath,
			commit: commitRef,
		});

		// Return the file content as is
		methodLogger.debug(
			`Retrieved file content (${fileContent.length} bytes)`,
		);
		return {
			content: fileContent,
		};
	} catch (error) {
		throw handleControllerError(error, {
			entityType: 'File Content',
			operation: 'get',
			source: 'controllers/atlassian.repositories.content.controller.ts@handleGetFileContent',
			additionalInfo: options,
		});
	}
}

```

--------------------------------------------------------------------------------
/src/utils/logger.util.ts:
--------------------------------------------------------------------------------

```typescript
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import * as crypto from 'crypto';

/**
 * Format a timestamp for logging
 * @returns Formatted timestamp [HH:MM:SS]
 */
function getTimestamp(): string {
	const now = new Date();
	return `[${now.toISOString().split('T')[1].split('.')[0]}]`;
}

/**
 * Safely convert object to string with size limits
 * @param obj Object to stringify
 * @param maxLength Maximum length of the resulting string
 * @returns Safely stringified object
 */
function safeStringify(obj: unknown, maxLength = 1000): string {
	try {
		const str = JSON.stringify(obj);
		if (str.length <= maxLength) {
			return str;
		}
		return `${str.substring(0, maxLength)}... (truncated, ${str.length} chars total)`;
	} catch {
		return '[Object cannot be stringified]';
	}
}

/**
 * Extract essential values from larger objects for logging
 * @param obj The object to extract values from
 * @param keys Keys to extract (if available)
 * @returns Object containing only the specified keys
 */
function extractEssentialValues(
	obj: Record<string, unknown>,
	keys: string[],
): Record<string, unknown> {
	const result: Record<string, unknown> = {};
	keys.forEach((key) => {
		if (Object.prototype.hasOwnProperty.call(obj, key)) {
			result[key] = obj[key];
		}
	});
	return result;
}

/**
 * Format source path consistently using the standardized format:
 * [module/file.ts@function] or [module/file.ts]
 *
 * @param filePath File path (with or without src/ prefix)
 * @param functionName Optional function name
 * @returns Formatted source path according to standard pattern
 */
function formatSourcePath(filePath: string, functionName?: string): string {
	// Always strip 'src/' prefix for consistency
	const normalizedPath = filePath.replace(/^src\//, '');

	return functionName
		? `[${normalizedPath}@${functionName}]`
		: `[${normalizedPath}]`;
}

/**
 * Check if debug logging is enabled for a specific module
 *
 * This function parses the DEBUG environment variable to determine if a specific
 * module should have debug logging enabled. The DEBUG variable can be:
 * - 'true' or '1': Enable all debug logging
 * - Comma-separated list of modules: Enable debug only for those modules
 * - Module patterns with wildcards: e.g., 'controllers/*' enables all controllers
 *
 * Examples:
 * - DEBUG=true
 * - DEBUG=controllers/*,services/aws.sso.auth.service.ts
 * - DEBUG=transport,utils/formatter*
 *
 * @param modulePath The module path to check against DEBUG patterns
 * @returns true if debug is enabled for this module, false otherwise
 */
function isDebugEnabledForModule(modulePath: string): boolean {
	const debugEnv = process.env.DEBUG;

	if (!debugEnv) {
		return false;
	}

	// If debug is set to true or 1, enable all debug logging
	if (debugEnv === 'true' || debugEnv === '1') {
		return true;
	}

	// Parse comma-separated debug patterns
	const debugPatterns = debugEnv.split(',').map((p) => p.trim());

	// Check if the module matches any pattern
	return debugPatterns.some((pattern) => {
		// Convert glob-like patterns to regex
		// * matches anything within a path segment
		// ** matches across path segments
		const regexPattern = pattern
			.replace(/\*/g, '.*') // Convert * to regex .*
			.replace(/\?/g, '.'); // Convert ? to regex .

		const regex = new RegExp(`^${regexPattern}$`);
		return (
			regex.test(modulePath) ||
			// Check for pattern matches without the 'src/' prefix
			regex.test(modulePath.replace(/^src\//, ''))
		);
	});
}

// Generate a unique session ID for this process
const SESSION_ID = crypto.randomUUID();

// Get the package name from environment variables or default to 'mcp-server'
const getPkgName = (): string => {
	try {
		// Try to get it from package.json first if available
		const packageJsonPath = path.resolve(process.cwd(), 'package.json');
		if (fs.existsSync(packageJsonPath)) {
			const packageJson = JSON.parse(
				fs.readFileSync(packageJsonPath, 'utf8'),
			);
			if (packageJson.name) {
				// Extract the last part of the name if it's scoped
				const match = packageJson.name.match(/(@[\w-]+\/)?(.+)/);
				return match ? match[2] : packageJson.name;
			}
		}
	} catch {
		// Silently fail and use default
	}

	// Fallback to environment variable or default
	return process.env.PACKAGE_NAME || 'mcp-server';
};

// MCP logs directory setup
const HOME_DIR = os.homedir();
const MCP_DATA_DIR = path.join(HOME_DIR, '.mcp', 'data');
const CLI_NAME = getPkgName();

// Ensure the MCP data directory exists
if (!fs.existsSync(MCP_DATA_DIR)) {
	fs.mkdirSync(MCP_DATA_DIR, { recursive: true });
}

// Create the log file path with session ID
const LOG_FILENAME = `${CLI_NAME}.${SESSION_ID}.log`;
const LOG_FILEPATH = path.join(MCP_DATA_DIR, LOG_FILENAME);

// Write initial log header
fs.writeFileSync(
	LOG_FILEPATH,
	`# ${CLI_NAME} Log Session\n` +
		`Session ID: ${SESSION_ID}\n` +
		`Started: ${new Date().toISOString()}\n` +
		`Process ID: ${process.pid}\n` +
		`Working Directory: ${process.cwd()}\n` +
		`Command: ${process.argv.join(' ')}\n\n` +
		`## Log Entries\n\n`,
	'utf8',
);

// Logger singleton to track initialization
let isLoggerInitialized = false;

/**
 * Logger class for consistent logging across the application.
 *
 * RECOMMENDED USAGE:
 *
 * 1. Create a file-level logger using the static forContext method:
 *    ```
 *    const logger = Logger.forContext('controllers/myController.ts');
 *    ```
 *
 * 2. For method-specific logging, create a method logger:
 *    ```
 *    const methodLogger = Logger.forContext('controllers/myController.ts', 'myMethod');
 *    ```
 *
 * 3. Avoid using raw string prefixes in log messages. Instead, use contextualized loggers.
 *
 * 4. For debugging objects, use the debugResponse method to log only essential properties.
 *
 * 5. Set DEBUG environment variable to control which modules show debug logs:
 *    - DEBUG=true (enable all debug logs)
 *    - DEBUG=controllers/*,services/* (enable for specific module groups)
 *    - DEBUG=transport,utils/formatter* (enable specific modules, supports wildcards)
 */
class Logger {
	private context?: string;
	private modulePath: string;
	private static sessionId = SESSION_ID;
	private static logFilePath = LOG_FILEPATH;

	constructor(context?: string, modulePath: string = '') {
		this.context = context;
		this.modulePath = modulePath;

		// Log initialization message only once
		if (!isLoggerInitialized) {
			this.info(
				`Logger initialized with session ID: ${Logger.sessionId}`,
			);
			this.info(`Logs will be saved to: ${Logger.logFilePath}`);
			isLoggerInitialized = true;
		}
	}

	/**
	 * Create a contextualized logger for a specific file or component.
	 * This is the preferred method for creating loggers.
	 *
	 * @param filePath The file path (e.g., 'controllers/aws.sso.auth.controller.ts')
	 * @param functionName Optional function name for more specific context
	 * @returns A new Logger instance with the specified context
	 *
	 * @example
	 * // File-level logger
	 * const logger = Logger.forContext('controllers/myController.ts');
	 *
	 * // Method-level logger
	 * const methodLogger = Logger.forContext('controllers/myController.ts', 'myMethod');
	 */
	static forContext(filePath: string, functionName?: string): Logger {
		return new Logger(formatSourcePath(filePath, functionName), filePath);
	}

	/**
	 * Create a method level logger from a context logger
	 * @param method Method name
	 * @returns A new logger with the method context
	 */
	forMethod(method: string): Logger {
		return Logger.forContext(this.modulePath, method);
	}

	private _formatMessage(message: string): string {
		return this.context ? `${this.context} ${message}` : message;
	}

	private _formatArgs(args: unknown[]): unknown[] {
		// If the first argument is an object and not an Error, safely stringify it
		if (
			args.length > 0 &&
			typeof args[0] === 'object' &&
			args[0] !== null &&
			!(args[0] instanceof Error)
		) {
			args[0] = safeStringify(args[0]);
		}
		return args;
	}

	_log(
		level: 'info' | 'warn' | 'error' | 'debug',
		message: string,
		...args: unknown[]
	) {
		// Skip debug messages if not enabled for this module
		if (level === 'debug' && !isDebugEnabledForModule(this.modulePath)) {
			return;
		}

		const timestamp = getTimestamp();
		const prefix = `${timestamp} [${level.toUpperCase()}]`;
		let logMessage = `${prefix} ${this._formatMessage(message)}`;

		const formattedArgs = this._formatArgs(args);
		if (formattedArgs.length > 0) {
			// Handle errors specifically
			if (formattedArgs[0] instanceof Error) {
				const error = formattedArgs[0] as Error;
				logMessage += ` Error: ${error.message}`;
				if (error.stack) {
					logMessage += `\n${error.stack}`;
				}
				// If there are more args, add them after the error
				if (formattedArgs.length > 1) {
					logMessage += ` ${formattedArgs
						.slice(1)
						.map((arg) =>
							typeof arg === 'string' ? arg : safeStringify(arg),
						)
						.join(' ')}`;
				}
			} else {
				logMessage += ` ${formattedArgs
					.map((arg) =>
						typeof arg === 'string' ? arg : safeStringify(arg),
					)
					.join(' ')}`;
			}
		}

		// Write to log file
		try {
			fs.appendFileSync(Logger.logFilePath, `${logMessage}\n`, 'utf8');
		} catch (err) {
			// If we can't write to the log file, log the error to console
			console.error(`Failed to write to log file: ${err}`);
		}

		if (process.env.NODE_ENV === 'test') {
			console[level](logMessage);
		} else {
			console.error(logMessage);
		}
	}

	info(message: string, ...args: unknown[]) {
		this._log('info', message, ...args);
	}

	warn(message: string, ...args: unknown[]) {
		this._log('warn', message, ...args);
	}

	error(message: string, ...args: unknown[]) {
		this._log('error', message, ...args);
	}

	debug(message: string, ...args: unknown[]) {
		this._log('debug', message, ...args);
	}

	/**
	 * Log essential information about an API response
	 * @param message Log message
	 * @param response API response object
	 * @param essentialKeys Keys to extract from the response
	 */
	debugResponse(
		message: string,
		response: Record<string, unknown>,
		essentialKeys: string[],
	) {
		const essentialInfo = extractEssentialValues(response, essentialKeys);
		this.debug(message, essentialInfo);
	}

	/**
	 * Get the current session ID
	 * @returns The UUID for the current logging session
	 */
	static getSessionId(): string {
		return Logger.sessionId;
	}

	/**
	 * Get the current log file path
	 * @returns The path to the current log file
	 */
	static getLogFilePath(): string {
		return Logger.logFilePath;
	}
}

// Only export the Logger class to enforce contextual logging via Logger.forContext
export { Logger };

```

--------------------------------------------------------------------------------
/src/utils/transport.util.ts:
--------------------------------------------------------------------------------

```typescript
import { Logger } from './logger.util.js';
import { config } from './config.util.js';
import { NETWORK_TIMEOUTS, DATA_LIMITS } from './constants.util.js';
import {
	createAuthInvalidError,
	createApiError,
	createUnexpectedError,
	McpError,
} from './error.util.js';

/**
 * Interface for Atlassian API credentials
 */
export interface AtlassianCredentials {
	// Standard Atlassian credentials
	siteName?: string;
	userEmail?: string;
	apiToken?: string;
	// Bitbucket-specific credentials (alternative approach)
	bitbucketUsername?: string;
	bitbucketAppPassword?: string;
	// Indicates which auth method to use
	useBitbucketAuth?: boolean;
}

/**
 * Interface for HTTP request options
 */
export interface RequestOptions {
	method?: 'GET' | 'POST' | 'PUT' | 'DELETE';
	headers?: Record<string, string>;
	body?: unknown;
	timeout?: number;
}

// Create a contextualized logger for this file
const transportLogger = Logger.forContext('utils/transport.util.ts');

// Log transport utility initialization
transportLogger.debug('Transport utility initialized');

/**
 * Get Atlassian credentials from environment variables
 * @returns AtlassianCredentials object or null if credentials are missing
 */
export function getAtlassianCredentials(): AtlassianCredentials | null {
	const methodLogger = Logger.forContext(
		'utils/transport.util.ts',
		'getAtlassianCredentials',
	);

	// First try standard Atlassian credentials (preferred for consistency)
	const siteName = config.get('ATLASSIAN_SITE_NAME');
	const userEmail = config.get('ATLASSIAN_USER_EMAIL');
	const apiToken = config.get('ATLASSIAN_API_TOKEN');

	// If standard credentials are available, use them
	if (userEmail && apiToken) {
		methodLogger.debug('Using standard Atlassian credentials');
		return {
			siteName,
			userEmail,
			apiToken,
			useBitbucketAuth: false,
		};
	}

	// If standard credentials are not available, try Bitbucket-specific credentials
	const bitbucketUsername = config.get('ATLASSIAN_BITBUCKET_USERNAME');
	const bitbucketAppPassword = config.get('ATLASSIAN_BITBUCKET_APP_PASSWORD');

	if (bitbucketUsername && bitbucketAppPassword) {
		methodLogger.debug('Using Bitbucket-specific credentials');
		return {
			bitbucketUsername,
			bitbucketAppPassword,
			useBitbucketAuth: true,
		};
	}

	// If neither set of credentials is available, return null
	methodLogger.warn(
		'Missing Atlassian credentials. Please set either ATLASSIAN_SITE_NAME, ATLASSIAN_USER_EMAIL, and ATLASSIAN_API_TOKEN environment variables, or ATLASSIAN_BITBUCKET_USERNAME and ATLASSIAN_BITBUCKET_APP_PASSWORD for Bitbucket-specific auth.',
	);
	return null;
}

/**
 * Fetch data from Atlassian API
 * @param credentials Atlassian API credentials
 * @param path API endpoint path (without base URL)
 * @param options Request options
 * @returns Response data
 */
export async function fetchAtlassian<T>(
	credentials: AtlassianCredentials,
	path: string,
	options: RequestOptions = {},
): Promise<T> {
	const methodLogger = Logger.forContext(
		'utils/transport.util.ts',
		'fetchAtlassian',
	);

	const baseUrl = 'https://api.bitbucket.org';

	// Set up auth headers based on credential type
	let authHeader: string;

	if (credentials.useBitbucketAuth) {
		// Bitbucket API uses a different auth format
		if (
			!credentials.bitbucketUsername ||
			!credentials.bitbucketAppPassword
		) {
			throw createAuthInvalidError(
				'Missing Bitbucket username or app password',
			);
		}
		authHeader = `Basic ${Buffer.from(
			`${credentials.bitbucketUsername}:${credentials.bitbucketAppPassword}`,
		).toString('base64')}`;
	} else {
		// Standard Atlassian API (Jira, Confluence)
		if (!credentials.userEmail || !credentials.apiToken) {
			throw createAuthInvalidError('Missing Atlassian credentials');
		}
		authHeader = `Basic ${Buffer.from(
			`${credentials.userEmail}:${credentials.apiToken}`,
		).toString('base64')}`;
	}

	// Ensure path starts with a slash
	const normalizedPath = path.startsWith('/') ? path : `/${path}`;

	// Construct the full URL
	const url = `${baseUrl}${normalizedPath}`;

	// Set up authentication and headers
	const headers = {
		Authorization: authHeader,
		'Content-Type': 'application/json',
		Accept: 'application/json',
		...options.headers,
	};

	// Prepare request options
	const requestOptions: RequestInit = {
		method: options.method || 'GET',
		headers,
		body: options.body ? JSON.stringify(options.body) : undefined,
	};

	methodLogger.debug(`Calling Atlassian API: ${url}`);

	// Set up timeout handling with configurable values
	const defaultTimeout = config.getNumber(
		'ATLASSIAN_REQUEST_TIMEOUT',
		NETWORK_TIMEOUTS.DEFAULT_REQUEST_TIMEOUT,
	);
	const timeoutMs = options.timeout ?? defaultTimeout;
	const controller = new AbortController();
	const timeoutId = setTimeout(() => {
		methodLogger.warn(`Request timeout after ${timeoutMs}ms: ${url}`);
		controller.abort();
	}, timeoutMs);

	// Add abort signal to request options
	requestOptions.signal = controller.signal;

	try {
		const response = await fetch(url, requestOptions);
		clearTimeout(timeoutId);

		// Log the raw response status and headers
		methodLogger.debug(
			`Raw response received: ${response.status} ${response.statusText}`,
			{
				url,
				status: response.status,
				statusText: response.statusText,
				headers: Object.fromEntries(response.headers.entries()),
			},
		);

		// Validate response size to prevent excessive memory usage (CWE-770)
		const contentLength = response.headers.get('content-length');
		if (contentLength) {
			const responseSize = parseInt(contentLength, 10);
			if (responseSize > DATA_LIMITS.MAX_RESPONSE_SIZE) {
				methodLogger.warn(
					`Response size ${responseSize} bytes exceeds limit of ${DATA_LIMITS.MAX_RESPONSE_SIZE} bytes`,
				);
				throw createApiError(
					`Response size (${Math.round(responseSize / (1024 * 1024))}MB) exceeds maximum limit of ${Math.round(DATA_LIMITS.MAX_RESPONSE_SIZE / (1024 * 1024))}MB`,
					413,
					{ responseSize, limit: DATA_LIMITS.MAX_RESPONSE_SIZE },
				);
			}
		}

		if (!response.ok) {
			const errorText = await response.text();
			methodLogger.error(
				`API error: ${response.status} ${response.statusText}`,
				errorText,
			);

			// Try to parse the error response
			let errorMessage = `${response.status} ${response.statusText}`;
			let parsedBitbucketError = null;

			try {
				if (
					errorText &&
					(errorText.startsWith('{') || errorText.startsWith('['))
				) {
					const parsedError = JSON.parse(errorText);

					// Extract specific error details from various Bitbucket API response formats
					if (
						parsedError.type === 'error' &&
						parsedError.error &&
						parsedError.error.message
					) {
						// Format: {"type":"error", "error":{"message":"...", "detail":"..."}}
						parsedBitbucketError = parsedError.error;
						errorMessage = parsedBitbucketError.message;
						if (parsedBitbucketError.detail) {
							errorMessage += ` Detail: ${parsedBitbucketError.detail}`;
						}
					} else if (parsedError.error && parsedError.error.message) {
						// Alternative error format: {"error": {"message": "..."}}
						parsedBitbucketError = parsedError.error;
						errorMessage = parsedBitbucketError.message;
					} else if (
						parsedError.errors &&
						Array.isArray(parsedError.errors) &&
						parsedError.errors.length > 0
					) {
						// Format: {"errors":[{"status":400,"code":"INVALID_REQUEST_PARAMETER","title":"..."}]}
						const atlassianError = parsedError.errors[0];
						if (atlassianError.title) {
							errorMessage = atlassianError.title;
							parsedBitbucketError = atlassianError;
						}
					} else if (parsedError.message) {
						// Format: {"message":"Some error message"}
						errorMessage = parsedError.message;
						parsedBitbucketError = parsedError;
					}
				}
			} catch (parseError) {
				methodLogger.debug(`Error parsing error response:`, parseError);
				// Fall back to the default error message
			}

			// Log the parsed error or raw error text
			methodLogger.debug(
				'Parsed Bitbucket error:',
				parsedBitbucketError || errorText,
			);

			// Use parsedBitbucketError (or errorText if parsing failed) as originalError
			const originalErrorForMcp = parsedBitbucketError || errorText;

			// Handle common Bitbucket API error status codes
			if (response.status === 401) {
				throw createAuthInvalidError(
					`Bitbucket API: Authentication failed - ${errorMessage}`,
					originalErrorForMcp,
				);
			}

			if (response.status === 403) {
				throw createApiError(
					`Bitbucket API: Permission denied - ${errorMessage}`,
					403,
					originalErrorForMcp,
				);
			}

			if (response.status === 404) {
				throw createApiError(
					`Bitbucket API: Resource not found - ${errorMessage}`,
					404,
					originalErrorForMcp,
				);
			}

			if (response.status === 429) {
				throw createApiError(
					`Bitbucket API: Rate limit exceeded - ${errorMessage}`,
					429,
					originalErrorForMcp,
				);
			}

			if (response.status >= 500) {
				throw createApiError(
					`Bitbucket API: Service error - ${errorMessage}`,
					response.status,
					originalErrorForMcp,
				);
			}

			// For other API errors, preserve the original vendor message
			throw createApiError(
				`Bitbucket API Error: ${errorMessage}`,
				response.status,
				originalErrorForMcp,
			);
		}

		// Check if the response is expected to be plain text
		const contentType = response.headers.get('content-type') || '';
		if (contentType.includes('text/plain')) {
			// If we're expecting text (like a diff), return the raw text
			const textResponse = await response.text();
			methodLogger.debug(
				`Text response received (truncated)`,
				textResponse.substring(0, 200) + '...',
			);
			return textResponse as unknown as T;
		}

		// For JSON responses, proceed as before
		// Clone the response to log its content without consuming it
		const clonedResponse = response.clone();
		try {
			const responseJson = await clonedResponse.json();
			methodLogger.debug(`Response body:`, responseJson);
		} catch {
			methodLogger.debug(
				`Could not parse response as JSON, returning raw content`,
			);
		}

		return response.json() as Promise<T>;
	} catch (error) {
		clearTimeout(timeoutId);
		methodLogger.error(`Request failed`, error);

		// If it's already an McpError, just rethrow it
		if (error instanceof McpError) {
			throw error;
		}

		// Handle timeout errors
		if (error instanceof Error && error.name === 'AbortError') {
			methodLogger.error(
				`Request timed out after ${timeoutMs}ms: ${url}`,
			);
			throw createApiError(
				`Request timeout: Bitbucket API did not respond within ${timeoutMs / 1000} seconds`,
				408,
				error,
			);
		}

		// Handle network errors more explicitly
		if (error instanceof TypeError) {
			// TypeError is typically a network/fetch error in this context
			const errorMessage = error.message || 'Network error occurred';
			methodLogger.debug(`Network error details: ${errorMessage}`);

			throw createApiError(
				`Network error while calling Bitbucket API: ${errorMessage}`,
				500, // This will be classified as NETWORK_ERROR by detectErrorType
				error,
			);
		}

		// Handle JSON parsing errors
		if (error instanceof SyntaxError) {
			methodLogger.debug(`JSON parsing error: ${error.message}`);

			throw createApiError(
				`Invalid response format from Bitbucket API: ${error.message}`,
				500,
				error,
			);
		}

		// Generic error handler for any other types of errors
		throw createUnexpectedError(
			`Unexpected error while calling Bitbucket API: ${error instanceof Error ? error.message : String(error)}`,
			error,
		);
	}
}

```
Page 2/4FirstPrevNextLast