This is page 3 of 11. Use http://codebase.md/sapientpants/sonarqube-mcp-server?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .adr-dir
├── .changeset
│ ├── config.json
│ └── README.md
├── .claude
│ ├── commands
│ │ ├── analyze-and-fix-github-issue.md
│ │ ├── fix-sonarqube-issues.md
│ │ ├── implement-github-issue.md
│ │ ├── release.md
│ │ ├── spec-feature.md
│ │ └── update-dependencies.md
│ ├── hooks
│ │ └── block-git-no-verify.ts
│ └── settings.json
├── .dockerignore
├── .github
│ ├── actionlint.yaml
│ ├── changeset.yml
│ ├── dependabot.yml
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.md
│ │ └── feature_request.md
│ ├── pull_request_template.md
│ ├── scripts
│ │ ├── determine-artifact.sh
│ │ └── version-and-release.js
│ ├── workflows
│ │ ├── codeql.yml
│ │ ├── main.yml
│ │ ├── pr.yml
│ │ ├── publish.yml
│ │ ├── reusable-docker.yml
│ │ ├── reusable-security.yml
│ │ └── reusable-validate.yml
│ └── WORKFLOWS.md
├── .gitignore
├── .husky
│ ├── commit-msg
│ └── pre-commit
├── .markdownlint.yaml
├── .markdownlintignore
├── .npmrc
├── .prettierignore
├── .prettierrc
├── .trivyignore
├── .yaml-lint.yml
├── .yamllintignore
├── CHANGELOG.md
├── changes.md
├── CLAUDE.md
├── CODE_OF_CONDUCT.md
├── commitlint.config.js
├── COMPATIBILITY.md
├── CONTRIBUTING.md
├── docker-compose.yml
├── Dockerfile
├── docs
│ ├── architecture
│ │ └── decisions
│ │ ├── 0001-record-architecture-decisions.md
│ │ ├── 0002-use-node-js-with-typescript.md
│ │ ├── 0003-adopt-model-context-protocol-for-sonarqube-integration.md
│ │ ├── 0004-use-sonarqube-web-api-client-for-all-sonarqube-interactions.md
│ │ ├── 0005-domain-driven-design-of-sonarqube-modules.md
│ │ ├── 0006-expose-sonarqube-features-as-mcp-tools.md
│ │ ├── 0007-support-multiple-authentication-methods-for-sonarqube.md
│ │ ├── 0008-use-environment-variables-for-configuration.md
│ │ ├── 0009-file-based-logging-to-avoid-stdio-conflicts.md
│ │ ├── 0010-use-stdio-transport-for-mcp-communication.md
│ │ ├── 0011-docker-containerization-for-deployment.md
│ │ ├── 0012-add-elicitation-support-for-interactive-user-input.md
│ │ ├── 0014-current-security-model-and-future-oauth2-considerations.md
│ │ ├── 0015-transport-architecture-refactoring.md
│ │ ├── 0016-http-transport-with-oauth-2-0-metadata-endpoints.md
│ │ ├── 0017-comprehensive-audit-logging-system.md
│ │ ├── 0018-add-comprehensive-monitoring-and-observability.md
│ │ ├── 0019-simplify-to-stdio-only-transport-for-mcp-gateway-deployment.md
│ │ ├── 0020-testing-framework-and-strategy-vitest-with-property-based-testing.md
│ │ ├── 0021-code-quality-toolchain-eslint-prettier-strict-typescript.md
│ │ ├── 0022-package-manager-choice-pnpm.md
│ │ ├── 0023-release-management-with-changesets.md
│ │ ├── 0024-ci-cd-platform-github-actions.md
│ │ ├── 0025-container-and-security-scanning-strategy.md
│ │ ├── 0026-circuit-breaker-pattern-with-opossum.md
│ │ ├── 0027-docker-image-publishing-strategy-ghcr-to-docker-hub.md
│ │ └── 0028-session-based-http-transport-with-server-sent-events.md
│ ├── architecture.md
│ ├── security.md
│ └── troubleshooting.md
├── eslint.config.js
├── examples
│ └── http-client.ts
├── jest.config.js
├── LICENSE
├── LICENSES.md
├── osv-scanner.toml
├── package.json
├── pnpm-lock.yaml
├── README.md
├── scripts
│ ├── actionlint.sh
│ ├── ci-local.sh
│ ├── load-test.sh
│ ├── README.md
│ ├── run-all-tests.sh
│ ├── scan-container.sh
│ ├── security-scan.sh
│ ├── setup.sh
│ ├── test-monitoring-integration.sh
│ └── validate-docs.sh
├── SECURITY.md
├── sonar-project.properties
├── src
│ ├── __tests__
│ │ ├── additional-coverage.test.ts
│ │ ├── advanced-index.test.ts
│ │ ├── assign-issue.test.ts
│ │ ├── auth-methods.test.ts
│ │ ├── boolean-string-transform.test.ts
│ │ ├── components.test.ts
│ │ ├── config
│ │ │ └── service-accounts.test.ts
│ │ ├── dependency-injection.test.ts
│ │ ├── direct-handlers.test.ts
│ │ ├── direct-lambdas.test.ts
│ │ ├── direct-schema-validation.test.ts
│ │ ├── domains
│ │ │ ├── components-domain-full.test.ts
│ │ │ ├── components-domain.test.ts
│ │ │ ├── hotspots-domain.test.ts
│ │ │ └── source-code-domain.test.ts
│ │ ├── environment-validation.test.ts
│ │ ├── error-handler.test.ts
│ │ ├── error-handling.test.ts
│ │ ├── errors.test.ts
│ │ ├── function-tests.test.ts
│ │ ├── handlers
│ │ │ ├── components-handler-integration.test.ts
│ │ │ └── projects-authorization.test.ts
│ │ ├── handlers.test.ts
│ │ ├── handlers.test.ts.skip
│ │ ├── index.test.ts
│ │ ├── issue-resolution-elicitation.test.ts
│ │ ├── issue-resolution.test.ts
│ │ ├── issue-transitions.test.ts
│ │ ├── issues-enhanced-search.test.ts
│ │ ├── issues-new-parameters.test.ts
│ │ ├── json-array-transform.test.ts
│ │ ├── lambda-functions.test.ts
│ │ ├── lambda-handlers.test.ts.skip
│ │ ├── logger.test.ts
│ │ ├── mapping-functions.test.ts
│ │ ├── mocked-environment.test.ts
│ │ ├── null-to-undefined.test.ts
│ │ ├── parameter-transformations-advanced.test.ts
│ │ ├── parameter-transformations.test.ts
│ │ ├── protocol-version.test.ts
│ │ ├── pull-request-transform.test.ts
│ │ ├── quality-gates.test.ts
│ │ ├── schema-parameter-transforms.test.ts
│ │ ├── schema-transformation-mocks.test.ts
│ │ ├── schema-transforms.test.ts
│ │ ├── schema-validators.test.ts
│ │ ├── schemas
│ │ │ ├── components-schema.test.ts
│ │ │ ├── hotspots-tools-schema.test.ts
│ │ │ └── issues-schema.test.ts
│ │ ├── sonarqube-elicitation.test.ts
│ │ ├── sonarqube.test.ts
│ │ ├── source-code.test.ts
│ │ ├── standalone-handlers.test.ts
│ │ ├── string-to-number-transform.test.ts
│ │ ├── tool-handler-lambdas.test.ts
│ │ ├── tool-handlers.test.ts
│ │ ├── tool-registration-schema.test.ts
│ │ ├── tool-registration-transforms.test.ts
│ │ ├── transformation-util.test.ts
│ │ ├── transports
│ │ │ ├── base.test.ts
│ │ │ ├── factory.test.ts
│ │ │ ├── http.test.ts
│ │ │ ├── session-manager.test.ts
│ │ │ └── stdio.test.ts
│ │ ├── utils
│ │ │ ├── retry.test.ts
│ │ │ └── transforms.test.ts
│ │ ├── zod-boolean-transform.test.ts
│ │ ├── zod-schema-transforms.test.ts
│ │ └── zod-transforms.test.ts
│ ├── config
│ │ ├── service-accounts.ts
│ │ └── versions.ts
│ ├── domains
│ │ ├── base.ts
│ │ ├── components.ts
│ │ ├── hotspots.ts
│ │ ├── index.ts
│ │ ├── issues.ts
│ │ ├── measures.ts
│ │ ├── metrics.ts
│ │ ├── projects.ts
│ │ ├── quality-gates.ts
│ │ ├── source-code.ts
│ │ └── system.ts
│ ├── errors.ts
│ ├── handlers
│ │ ├── components.ts
│ │ ├── hotspots.ts
│ │ ├── index.ts
│ │ ├── issues.ts
│ │ ├── measures.ts
│ │ ├── metrics.ts
│ │ ├── projects.ts
│ │ ├── quality-gates.ts
│ │ ├── source-code.ts
│ │ └── system.ts
│ ├── index.ts
│ ├── monitoring
│ │ ├── __tests__
│ │ │ └── circuit-breaker.test.ts
│ │ ├── circuit-breaker.ts
│ │ ├── health.ts
│ │ └── metrics.ts
│ ├── schemas
│ │ ├── common.ts
│ │ ├── components.ts
│ │ ├── hotspots-tools.ts
│ │ ├── hotspots.ts
│ │ ├── index.ts
│ │ ├── issues.ts
│ │ ├── measures.ts
│ │ ├── metrics.ts
│ │ ├── projects.ts
│ │ ├── quality-gates.ts
│ │ ├── source-code.ts
│ │ └── system.ts
│ ├── sonarqube.ts
│ ├── transports
│ │ ├── base.ts
│ │ ├── factory.ts
│ │ ├── http.ts
│ │ ├── index.ts
│ │ ├── session-manager.ts
│ │ └── stdio.ts
│ ├── types
│ │ ├── common.ts
│ │ ├── components.ts
│ │ ├── hotspots.ts
│ │ ├── index.ts
│ │ ├── issues.ts
│ │ ├── measures.ts
│ │ ├── metrics.ts
│ │ ├── projects.ts
│ │ ├── quality-gates.ts
│ │ ├── source-code.ts
│ │ └── system.ts
│ └── utils
│ ├── __tests__
│ │ ├── elicitation.test.ts
│ │ ├── pattern-matcher.test.ts
│ │ └── structured-response.test.ts
│ ├── client-factory.ts
│ ├── elicitation.ts
│ ├── error-handler.ts
│ ├── logger.ts
│ ├── parameter-mappers.ts
│ ├── pattern-matcher.ts
│ ├── retry.ts
│ ├── structured-response.ts
│ └── transforms.ts
├── test-http-transport.sh
├── tmp
│ └── .gitkeep
├── tsconfig.build.json
├── tsconfig.json
├── vitest.config.d.ts
├── vitest.config.js
├── vitest.config.js.map
└── vitest.config.ts
```
# Files
--------------------------------------------------------------------------------
/.claude/commands/spec-feature.md:
--------------------------------------------------------------------------------
```markdown
1 | # Spec a Feature
2 |
3 | You are about to create a feature specification in Gherkin format and turn it into a GitHub issue ready for implementation.
4 |
5 | ## Process
6 |
7 | 1. **Gather Requirements**
8 | - Ask the user for the feature name and description if not provided
9 | - Understand the business value and user needs
10 | - Identify scope, non-goals, and risks
11 |
12 | 2. **Write Gherkin Specification**
13 | Create a comprehensive specification including:
14 | - **Feature** name and description
15 | - **Background** (if needed)
16 | - **Scenarios** using Given/When/Then format
17 | - **Examples** with data tables where appropriate
18 | - **Acceptance Criteria**
19 | - **Non-Goals** (what this feature won't do)
20 | - **Risks & Mitigations**
21 | - **Technical Considerations**
22 |
23 | 3. **Format as GitHub Issue**
24 | Structure the issue with:
25 | - Clear title: `feat: [Feature Name]`
26 | - Labels: `enhancement`, `needs-implementation`
27 | - Milestone (if applicable)
28 | - Complete Gherkin specification in the body
29 | - Testing requirements
30 |
31 | 4. **Create the Issue**
32 | Use the `gh` CLI to create the issue:
33 | ```bash
34 | gh issue create --title "feat: [Feature Name]" \
35 | --body "[Full specification]" \
36 | --label enhancement \
37 | --label needs-implementation
38 | ```
39 |
40 | ## Template for Issue Body
41 |
42 | ````markdown
43 | ## Feature: [Feature Name]
44 |
45 | ### Business Value
46 |
47 | [Describe the business value and user benefit]
48 |
49 | ### User Story
50 |
51 | As a [type of user]
52 | I want [goal/desire]
53 | So that [benefit/value]
54 |
55 | ### Gherkin Specification
56 |
57 | ```gherkin
58 | Feature: [Feature Name]
59 | [Feature description explaining the feature's purpose]
60 |
61 | Background:
62 | Given [common preconditions for all scenarios]
63 |
64 | Scenario: [Happy path scenario]
65 | Given [initial context]
66 | When [action/event]
67 | Then [expected outcome]
68 | And [additional outcomes]
69 |
70 | Scenario: [Edge case or error scenario]
71 | Given [initial context]
72 | When [action/event]
73 | Then [expected outcome]
74 |
75 | Scenario Outline: [Parameterized scenario if needed]
76 | Given [context with <parameter>]
77 | When [action with <parameter>]
78 | Then [outcome with <expected>]
79 |
80 | Examples:
81 | | parameter | expected |
82 | | value1 | result1 |
83 | | value2 | result2 |
84 | ```
85 | ````
86 |
87 | ### Acceptance Criteria
88 |
89 | - [ ] Criterion 1
90 | - [ ] Criterion 2
91 | - [ ] Criterion 3
92 |
93 | ### Non-Goals
94 |
95 | - This feature will NOT [explicitly excluded functionality]
96 | - Out of scope: [related but excluded items]
97 |
98 | ### Risks & Mitigations
99 |
100 | - **Risk**: [Potential risk]
101 | **Mitigation**: [How to address it]
102 |
103 | ### Technical Considerations
104 |
105 | - Architecture impact: [if any]
106 | - Performance considerations: [if any]
107 | - Security considerations: [if any]
108 | - Dependencies: [external dependencies or prerequisites]
109 |
110 | ### Testing Requirements
111 |
112 | - Unit test coverage for all new functions
113 | - Property-based tests for business logic invariants
114 | - Integration tests for external interactions
115 | - Edge cases and error scenarios covered
116 |
117 | ### Definition of Done
118 |
119 | - [ ] All acceptance criteria met
120 | - [ ] All tests passing
121 | - [ ] Documentation updated
122 | - [ ] Code reviewed and approved
123 | - [ ] Changeset added
124 | - [ ] No security vulnerabilities
125 | - [ ] Performance requirements met
126 |
127 | ````
128 |
129 | ## Important Notes
130 |
131 | 1. **Be Specific**: Write clear, unambiguous scenarios
132 | 2. **Focus on Behavior**: Describe WHAT, not HOW
133 | 3. **Keep it Testable**: Each scenario should be verifiable
134 | 4. **Consider Edge Cases**: Include error and boundary scenarios
135 | 5. **Make it Implementable**: Provide enough detail for the `implement-github-issue` command
136 |
137 | ## Example Output
138 |
139 | After gathering requirements, create an issue like:
140 |
141 | ```bash
142 | gh issue create --title "feat: Add user authentication with JWT" \
143 | --body "## Feature: User Authentication with JWT
144 |
145 | ### Business Value
146 | Enable secure user authentication to protect user data and provide personalized experiences.
147 |
148 | ### User Story
149 | As a user
150 | I want to securely log in to the application
151 | So that I can access my personal data and features
152 |
153 | ### Gherkin Specification
154 | [... full specification ...]" \
155 | --label enhancement \
156 | --label needs-implementation
157 | ````
158 |
159 | The created issue will be ready for implementation using the `/implement-github-issue` command.
160 |
```
--------------------------------------------------------------------------------
/src/__tests__/transformation-util.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect } from 'vitest';
2 | describe('Field transformation utilities', () => {
3 | it('should transform array parameters correctly', () => {
4 | // Simulate the transformation logic in the tool registration
5 | function transformToArray(value: unknown): string[] {
6 | return Array.isArray(value) ? value : [value as string];
7 | }
8 | // Test with string input
9 | expect(transformToArray('single')).toEqual(['single']);
10 | // Test with array input
11 | expect(transformToArray(['one', 'two'])).toEqual(['one', 'two']);
12 | // Test with empty array
13 | expect(transformToArray([])).toEqual([]);
14 | });
15 | it('should transform page parameters correctly', () => {
16 | // Simulate the page transform logic
17 | function transformPage(val: string | undefined | null): number | null | undefined {
18 | return val ? parseInt(val, 10) || null : null;
19 | }
20 | // Valid number
21 | expect(transformPage('10')).toBe(10);
22 | // Invalid number
23 | expect(transformPage('not-a-number')).toBe(null);
24 | // Empty string
25 | expect(transformPage('')).toBe(null);
26 | // Undefined or null
27 | expect(transformPage(undefined)).toBe(null);
28 | expect(transformPage(null)).toBe(null);
29 | });
30 | it('should correctly transform page and page_size in tool handlers', () => {
31 | // Simulate the transform in tool handler
32 | function transformPageParams(params: Record<string, unknown>): {
33 | page?: number;
34 | pageSize?: number;
35 | } {
36 | function nullToUndefined<T>(value: T | null | undefined): T | undefined {
37 | return value === null ? undefined : value;
38 | }
39 | const page = nullToUndefined(params.page) as number | undefined;
40 | const pageSize = nullToUndefined(params.page_size) as number | undefined;
41 | return {
42 | ...(page !== undefined && { page }),
43 | ...(pageSize !== undefined && { pageSize }),
44 | };
45 | }
46 | // Test with numbers
47 | expect(transformPageParams({ page: 5, page_size: 20 })).toEqual({ page: 5, pageSize: 20 });
48 | // Test with strings
49 | expect(transformPageParams({ page: '5', page_size: '20' })).toEqual({
50 | page: '5',
51 | pageSize: '20',
52 | });
53 | // Test with null
54 | expect(transformPageParams({ page: null, page_size: null })).toEqual({
55 | page: undefined,
56 | pageSize: undefined,
57 | });
58 | // Test with mixed
59 | expect(transformPageParams({ page: 5, page_size: null })).toEqual({
60 | page: 5,
61 | pageSize: undefined,
62 | });
63 | // Test with undefined
64 | expect(transformPageParams({ page: undefined, page_size: undefined })).toEqual({
65 | page: undefined,
66 | pageSize: undefined,
67 | });
68 | // Test with empty object
69 | expect(transformPageParams({})).toEqual({ page: undefined, pageSize: undefined });
70 | });
71 | it('should handle component key transformation correctly', () => {
72 | // Simulate the component key transformation in the getComponentsMeasures handler
73 | function transformComponentKeys(componentKeys: string | string[]): string {
74 | return Array.isArray(componentKeys) ? componentKeys.join(',') : componentKeys;
75 | }
76 | // Test with string
77 | expect(transformComponentKeys('single-component')).toBe('single-component');
78 | // Test with array
79 | expect(transformComponentKeys(['component1', 'component2'])).toBe('component1,component2');
80 | // Test with single item array
81 | expect(transformComponentKeys(['component1'])).toBe('component1');
82 | // Test with empty array
83 | expect(transformComponentKeys([])).toBe('');
84 | });
85 | it('should handle metric keys transformation correctly', () => {
86 | // Simulate the metric keys transformation in the getComponentMeasures handler
87 | function transformMetricKeys(metricKeys: string | string[]): string {
88 | return Array.isArray(metricKeys) ? metricKeys.join(',') : metricKeys;
89 | }
90 | // Test with string
91 | expect(transformMetricKeys('single-metric')).toBe('single-metric');
92 | // Test with array
93 | expect(transformMetricKeys(['metric1', 'metric2'])).toBe('metric1,metric2');
94 | // Test with single item array
95 | expect(transformMetricKeys(['metric1'])).toBe('metric1');
96 | // Test with empty array
97 | expect(transformMetricKeys([])).toBe('');
98 | });
99 | });
100 |
```
--------------------------------------------------------------------------------
/src/__tests__/protocol-version.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, beforeEach, vi } from 'vitest';
2 | import { createLogger } from '../utils/logger.js';
3 | import {
4 | SDK_VERSION,
5 | SUPPORTED_PROTOCOL_VERSIONS,
6 | LATEST_PROTOCOL_VERSION,
7 | DEFAULT_NEGOTIATED_PROTOCOL_VERSION,
8 | VERSION_INFO,
9 | } from '../config/versions.js';
10 | describe('Protocol Version Support', () => {
11 | beforeEach(() => {
12 | vi.clearAllMocks();
13 | });
14 | describe('Server Initialization', () => {
15 | it('should log supported protocol versions on startup', () => {
16 | const logger = createLogger('test');
17 | const infoSpy = vi.spyOn(logger, 'info');
18 | // Simulate server startup logging
19 | logger.info('Starting SonarQube MCP server', {
20 | ...VERSION_INFO,
21 | logFile: 'not configured',
22 | logLevel: 'DEBUG',
23 | elicitation: 'disabled',
24 | });
25 | expect(infoSpy).toHaveBeenCalledWith('Starting SonarQube MCP server', {
26 | ...VERSION_INFO,
27 | logFile: 'not configured',
28 | logLevel: 'DEBUG',
29 | elicitation: 'disabled',
30 | });
31 | });
32 | it('should log protocol negotiation info on successful connection', () => {
33 | const logger = createLogger('test');
34 | const infoSpy = vi.spyOn(logger, 'info');
35 | // Simulate successful connection logging
36 | logger.info('SonarQube MCP server started successfully', {
37 | mcpProtocolInfo: 'Protocol version will be negotiated with client during initialization',
38 | });
39 | expect(infoSpy).toHaveBeenCalledWith('SonarQube MCP server started successfully', {
40 | mcpProtocolInfo: 'Protocol version will be negotiated with client during initialization',
41 | });
42 | });
43 | });
44 | describe('Protocol Version Constants', () => {
45 | it('should support all documented protocol versions', () => {
46 | // Verify the versions match our documentation
47 | expect(SUPPORTED_PROTOCOL_VERSIONS).toContain(LATEST_PROTOCOL_VERSION);
48 | expect(SUPPORTED_PROTOCOL_VERSIONS).toContain(DEFAULT_NEGOTIATED_PROTOCOL_VERSION);
49 | expect(SUPPORTED_PROTOCOL_VERSIONS.length).toBe(4);
50 | expect(SUPPORTED_PROTOCOL_VERSIONS).toEqual([
51 | '2025-06-18',
52 | '2025-03-26',
53 | '2024-11-05',
54 | '2024-10-07',
55 | ]);
56 | });
57 | it('should use semantic versioning for SDK', () => {
58 | const versionParts = SDK_VERSION.split('.');
59 | expect(versionParts).toHaveLength(3);
60 | expect(parseInt(versionParts[0]!, 10)).toBeGreaterThanOrEqual(1);
61 | expect(parseInt(versionParts[1]!, 10)).toBeGreaterThanOrEqual(13);
62 | expect(parseInt(versionParts[2]!, 10)).toBeGreaterThanOrEqual(0);
63 | });
64 | });
65 | describe('Protocol Compatibility', () => {
66 | it('should maintain backward compatibility with older protocol versions', () => {
67 | const oldestSupportedVersion = '2024-10-07';
68 | // Ensure we still support the oldest protocol version
69 | expect(SUPPORTED_PROTOCOL_VERSIONS).toContain(oldestSupportedVersion);
70 | });
71 | it('should document protocol version support in COMPATIBILITY.md', () => {
72 | // This test verifies that we have proper documentation
73 | // The actual file content is maintained separately
74 | const expectedSections = [
75 | 'Protocol Version Support',
76 | 'Version Negotiation',
77 | 'Current SDK Version',
78 | 'Feature Compatibility',
79 | 'Client Compatibility',
80 | 'SDK Update Process',
81 | ];
82 | // This is a documentation test - it doesn't execute but serves as a reminder
83 | expectedSections.forEach((section) => {
84 | expect(section).toBeTruthy();
85 | });
86 | });
87 | });
88 | describe('SDK Version Management', () => {
89 | it('should have consistent SDK version references', () => {
90 | // Verify SDK version is correctly set
91 | expect(SDK_VERSION).toBe('1.13.0');
92 | });
93 | it('should follow SDK update process as documented', () => {
94 | // This test serves as a reminder of the update process
95 | const updateSteps = [
96 | 'Check SDK release notes',
97 | 'Review changelog for breaking changes',
98 | 'Update dependency in package.json',
99 | 'Run tests',
100 | 'Update COMPATIBILITY.md',
101 | 'Test with multiple clients',
102 | ];
103 | updateSteps.forEach((step) => {
104 | expect(step).toBeTruthy();
105 | });
106 | });
107 | });
108 | });
109 |
```
--------------------------------------------------------------------------------
/src/__tests__/handlers/projects-authorization.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, vi, type MockedFunction } from 'vitest';
2 | import { handleSonarQubeProjects } from '../../handlers/projects.js';
3 | import type { ISonarQubeClient } from '../../types/index.js';
4 |
5 | describe('Projects Handler Authorization Error', () => {
6 | // Mock client
7 | const mockClient: ISonarQubeClient = {
8 | webApiClient: {} as any,
9 | listProjects: vi.fn() as any,
10 | getIssues: vi.fn() as any,
11 | getMetrics: vi.fn() as any,
12 | getHealth: vi.fn() as any,
13 | getStatus: vi.fn() as any,
14 | ping: vi.fn() as any,
15 | getComponentMeasures: vi.fn() as any,
16 | getComponentsMeasures: vi.fn() as any,
17 | getMeasuresHistory: vi.fn() as any,
18 | listQualityGates: vi.fn() as any,
19 | getQualityGate: vi.fn() as any,
20 | getProjectQualityGateStatus: vi.fn() as any,
21 | getSourceCode: vi.fn() as any,
22 | getScmBlame: vi.fn() as any,
23 | hotspots: vi.fn() as any,
24 | hotspot: vi.fn() as any,
25 | updateHotspotStatus: vi.fn() as any,
26 | markIssueFalsePositive: vi.fn() as any,
27 | markIssueWontFix: vi.fn() as any,
28 | markIssuesFalsePositive: vi.fn() as any,
29 | markIssuesWontFix: vi.fn() as any,
30 | addCommentToIssue: vi.fn() as any,
31 | assignIssue: vi.fn() as any,
32 | confirmIssue: vi.fn() as any,
33 | unconfirmIssue: vi.fn() as any,
34 | resolveIssue: vi.fn() as any,
35 | reopenIssue: vi.fn() as any,
36 | };
37 |
38 | it('should provide helpful error message when authorization fails', async () => {
39 | // Mock the listProjects method to throw an authorization error
40 | const authError = new Error('Insufficient privileges');
41 | (mockClient.listProjects as MockedFunction<typeof mockClient.listProjects>).mockRejectedValue(
42 | authError
43 | );
44 |
45 | await expect(handleSonarQubeProjects({}, mockClient)).rejects.toThrow(
46 | /Note: The 'projects' tool requires admin permissions/
47 | );
48 | });
49 |
50 | it('should provide helpful error message for error containing "403"', async () => {
51 | vi.clearAllMocks();
52 | const authError = new Error('Error 403 Forbidden');
53 | (mockClient.listProjects as MockedFunction<typeof mockClient.listProjects>).mockRejectedValue(
54 | authError
55 | );
56 |
57 | await expect(handleSonarQubeProjects({}, mockClient)).rejects.toThrow(
58 | /Note: The 'projects' tool requires admin permissions/
59 | );
60 | });
61 |
62 | it('should provide helpful error message for "Insufficient privileges" error', async () => {
63 | vi.clearAllMocks();
64 | const authError = new Error('Insufficient privileges');
65 | (mockClient.listProjects as MockedFunction<typeof mockClient.listProjects>).mockRejectedValue(
66 | authError
67 | );
68 |
69 | await expect(handleSonarQubeProjects({}, mockClient)).rejects.toThrow(
70 | /Note: The 'projects' tool requires admin permissions/
71 | );
72 | });
73 |
74 | it('should not modify error message for non-authorization errors', async () => {
75 | // Mock a different type of error
76 | const serverError = new Error('Internal server error');
77 | (mockClient.listProjects as MockedFunction<typeof mockClient.listProjects>).mockRejectedValue(
78 | serverError
79 | );
80 |
81 | await expect(handleSonarQubeProjects({}, mockClient)).rejects.toThrow('Internal server error');
82 | await expect(handleSonarQubeProjects({}, mockClient)).rejects.not.toThrow(
83 | /Note: The 'projects' tool requires admin permissions/
84 | );
85 | });
86 |
87 | it('should handle successful response without error', async () => {
88 | const mockResponse = {
89 | projects: [
90 | {
91 | key: 'test-project',
92 | name: 'Test Project',
93 | qualifier: 'TRK',
94 | visibility: 'public',
95 | lastAnalysisDate: '2023-01-01',
96 | revision: 'abc123',
97 | managed: false,
98 | },
99 | ],
100 | paging: {
101 | pageIndex: 1,
102 | pageSize: 10,
103 | total: 1,
104 | },
105 | };
106 |
107 | (mockClient.listProjects as MockedFunction<typeof mockClient.listProjects>).mockResolvedValue(
108 | mockResponse
109 | );
110 |
111 | const result = await handleSonarQubeProjects({}, mockClient);
112 | const firstContent = result.content[0]!;
113 | if ('text' in firstContent && typeof firstContent.text === 'string') {
114 | const data = JSON.parse(firstContent.text);
115 | expect(data.projects).toHaveLength(1);
116 | expect(data.projects[0].key).toBe('test-project');
117 | } else {
118 | throw new Error('Expected text content');
119 | }
120 | });
121 | });
122 |
```
--------------------------------------------------------------------------------
/scripts/ci-local.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/usr/bin/env bash
2 | set -euo pipefail
3 |
4 | # =============================================================================
5 | # Local CI Simulation Script
6 | # Purpose: Run the same checks as CI locally before pushing
7 | # Usage: ./scripts/ci-local.sh [--fast] [--no-security]
8 | # =============================================================================
9 |
10 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11 | PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
12 | FAST_MODE=false
13 | SKIP_SECURITY=false
14 |
15 | # Colors for output
16 | RED='\033[0;31m'
17 | GREEN='\033[0;32m'
18 | YELLOW='\033[1;33m'
19 | BLUE='\033[0;34m'
20 | NC='\033[0m' # No Color
21 |
22 | log() {
23 | echo -e "${GREEN}[CI-LOCAL]${NC} $1"
24 | }
25 |
26 | warn() {
27 | echo -e "${YELLOW}[WARN]${NC} $1"
28 | }
29 |
30 | error() {
31 | echo -e "${RED}[ERROR]${NC} $1"
32 | exit 1
33 | }
34 |
35 | info() {
36 | echo -e "${BLUE}[INFO]${NC} $1"
37 | }
38 |
39 | # Parse command line arguments
40 | while [[ $# -gt 0 ]]; do
41 | case $1 in
42 | --fast)
43 | FAST_MODE=true
44 | shift
45 | ;;
46 | --no-security)
47 | SKIP_SECURITY=true
48 | shift
49 | ;;
50 | --help|-h)
51 | echo "Usage: $0 [--fast] [--no-security]"
52 | echo " --fast Skip slower checks (container scan, full coverage)"
53 | echo " --no-security Skip security scans"
54 | exit 0
55 | ;;
56 | *)
57 | error "Unknown option: $1"
58 | ;;
59 | esac
60 | done
61 |
62 | cd "$PROJECT_ROOT"
63 |
64 | log "Starting local CI simulation..."
65 | if [ "$FAST_MODE" = true ]; then
66 | info "Running in fast mode (some checks skipped)"
67 | fi
68 |
69 | # 1. Install dependencies (like CI setup)
70 | log "Installing dependencies..."
71 | pnpm install --frozen-lockfile
72 |
73 | # 2. Core validation (parallel in real CI)
74 | log "Running core validation checks..."
75 |
76 | echo " → Audit check..."
77 | pnpm audit --audit-level critical
78 |
79 | echo " → Type checking..."
80 | pnpm typecheck
81 |
82 | echo " → Linting..."
83 | pnpm lint
84 |
85 | echo " → Format checking..."
86 | pnpm format
87 |
88 | echo " → Running tests..."
89 | if [ "$FAST_MODE" = true ]; then
90 | pnpm test
91 | else
92 | pnpm test:coverage
93 | fi
94 |
95 | # 3. Build check
96 | log "Checking build..."
97 | pnpm build
98 |
99 | # 4. Security scans (if not skipped)
100 | if [ "$SKIP_SECURITY" = false ]; then
101 | log "Running security checks..."
102 |
103 | # Check if OSV scanner is available
104 | if command -v osv-scanner &> /dev/null; then
105 | echo " → OSV vulnerability scan..."
106 | if [ -r pnpm-lock.yaml ]; then
107 | osv-scanner --lockfile=pnpm-lock.yaml
108 | else
109 | warn "pnpm-lock.yaml not found or not readable. Skipping OSV scan."
110 | fi
111 | else
112 | warn "OSV scanner not installed. Run: go install github.com/google/osv-scanner/cmd/osv-scanner@latest"
113 | fi
114 |
115 | # Container scan (if not in fast mode)
116 | if [ "$FAST_MODE" = false ] && [ -f "./scripts/scan-container.sh" ]; then
117 | if command -v docker &> /dev/null && command -v trivy &> /dev/null; then
118 | echo " → Container security scan..."
119 | ./scripts/scan-container.sh
120 | else
121 | warn "Docker or Trivy not available. Skipping container scan."
122 | fi
123 | fi
124 | else
125 | info "Security scans skipped"
126 | fi
127 |
128 | # 5. Changeset validation (simulate CI check)
129 | log "Checking changesets..."
130 | if git rev-parse --verify HEAD~1 >/dev/null 2>&1; then
131 | # Check if we have any commits to compare
132 | if git diff --name-only HEAD~1 | grep -E '(src/|tests/)' > /dev/null; then
133 | # We have code changes, check for changesets
134 | if ! pnpm changeset:status > /dev/null 2>&1; then
135 | warn "Code changes detected but no changesets found"
136 | echo " Run 'pnpm changeset' to create one, or 'pnpm changeset --empty' for non-release changes"
137 | else
138 | info "Changesets validated"
139 | fi
140 | else
141 | info "No code changes detected, changeset check skipped"
142 | fi
143 | else
144 | info "No previous commit found, changeset check skipped"
145 | fi
146 |
147 | # 6. Final summary
148 | log "✅ Local CI simulation complete!"
149 | echo
150 | info "Summary:"
151 | echo " ✅ Dependencies installed"
152 | echo " ✅ Core validation passed"
153 | echo " ✅ Build successful"
154 | if [ "$SKIP_SECURITY" = false ]; then
155 | echo " ✅ Security checks completed"
156 | fi
157 | echo " ✅ Changesets validated"
158 | echo
159 | info "Your code is ready to push! 🚀"
160 | echo
161 | info "Next steps:"
162 | echo " 1. git add . && git commit -m 'your commit message'"
163 | echo " 2. git push origin your-branch"
164 | echo " 3. Create a pull request"
```
--------------------------------------------------------------------------------
/docs/architecture/decisions/0019-simplify-to-stdio-only-transport-for-mcp-gateway-deployment.md:
--------------------------------------------------------------------------------
```markdown
1 | # 19. Simplify to stdio-only transport for MCP gateway deployment
2 |
3 | Date: 2025-01-30
4 | Partially Superseded: 2025-10-12 by ADR-0028
5 |
6 | ## Status
7 |
8 | Partially Superseded by ADR-0028
9 |
10 | This decision was partially reversed on 2025-10-12. While the removal of OAuth-based HTTP transport and authentication complexity remains valid, the "stdio-only" decision was superseded by ADR-0028, which re-introduced HTTP transport in a simpler, session-based form without OAuth complexity.
11 |
12 | **What remains valid from this ADR:**
13 |
14 | - Removal of OAuth 2.0 authentication infrastructure (60+ files)
15 | - Removal of service account management and permission filtering
16 | - Delegation of enterprise features to MCP gateways
17 | - Simplified authentication model
18 |
19 | **What was superseded:**
20 |
21 | - "stdio-only" transport decision (HTTP transport re-added in ADR-0028)
22 | - Removal of all HTTP endpoints (HTTP re-added with session management)
23 | - Removal of SSE (SSE re-added for real-time notifications)
24 |
25 | See ADR-0028 for the current HTTP transport implementation (session-based without OAuth).
26 |
27 | ## Context
28 |
29 | The SonarQube MCP Server initially supported only stdio transport (ADR-0010). Later, HTTP transport with OAuth 2.0 support was added (ADR-0016) to enable enterprise features like multi-tenancy, authentication, and audit logging.
30 |
31 | However, this added significant complexity:
32 |
33 | - 60+ authentication/authorization files
34 | - Complex OAuth token validation
35 | - Service account management
36 | - Permission filtering system
37 | - Audit logging infrastructure
38 | - HTTP server configuration
39 | - External IdP integration
40 |
41 | Meanwhile, the MCP ecosystem has evolved with gateway solutions that handle these enterprise concerns:
42 |
43 | - Docker MCP Gateway
44 | - IBM Context Forge
45 | - SGNL
46 | - Operant
47 |
48 | These gateways provide authentication, multi-tenancy, monitoring, and other enterprise features at the gateway layer, making the HTTP transport implementation redundant.
49 |
50 | ## Decision
51 |
52 | We will simplify the MCP server to support only stdio transport, removing all HTTP, OAuth, SSE, and related enterprise infrastructure. Enterprise features will be handled by MCP gateways.
53 |
54 | This involves:
55 |
56 | 1. Removing HTTP transport and all OAuth/authentication code
57 | 2. Removing service account management and permission filtering
58 | 3. Removing audit logging (handled by gateways)
59 | 4. Removing Kubernetes/Helm/Terraform deployment configs
60 | 5. Simplifying configuration to core SonarQube settings
61 | 6. Reducing Docker image size and resource requirements
62 |
63 | ## Consequences
64 |
65 | ### Positive
66 |
67 | - **Reduced Complexity**: ~40% reduction in codebase size
68 | - **Improved Maintainability**: Focus on core SonarQube integration
69 | - **Better Separation of Concerns**: Business logic vs infrastructure
70 | - **Faster Startup**: No HTTP server or auth initialization
71 | - **Smaller Attack Surface**: No network exposure
72 | - **Easier Testing**: No auth/permission mocking needed
73 | - **Gateway Flexibility**: Users can choose their preferred gateway
74 |
75 | ### Negative
76 |
77 | - **Breaking Change**: Users of HTTP transport must migrate
78 | - **Feature Migration**: Enterprise users need to adopt MCP gateways
79 | - **Documentation Updates**: Significant documentation changes required
80 |
81 | ### Neutral
82 |
83 | - **Unix Philosophy**: Aligns with "do one thing well"
84 | - **Ecosystem Evolution**: Follows MCP community direction
85 | - **Gateway Pattern**: Standard in microservices architecture
86 |
87 | ## Implementation
88 |
89 | The simplification will be implemented in phases:
90 |
91 | 1. **Phase 1**: Remove HTTP/OAuth infrastructure files
92 | 2. **Phase 2**: Simplify configuration and environment variables
93 | 3. **Phase 3**: Update documentation for stdio-only approach
94 | 4. **Phase 4**: Optimize core functionality and startup time
95 | 5. **Phase 5**: Optimize Docker image for minimal footprint
96 | 6. **Phase 6**: Update tests and validate functionality
97 |
98 | ## Migration Path
99 |
100 | Users currently using HTTP transport should:
101 |
102 | 1. Deploy an MCP gateway (Docker MCP Gateway, IBM Context Forge, etc.)
103 | 2. Configure the stdio server behind the gateway
104 | 3. Move authentication/authorization to the gateway layer
105 | 4. Leverage gateway features for monitoring and audit
106 |
107 | ## References
108 |
109 | - GitHub Issue #243: Simplify to stdio-only transport
110 | - ADR-0010: Use stdio transport for MCP communication
111 | - ADR-0016: HTTP transport with OAuth 2.0 (being reverted)
112 | - **ADR-0028: Session-Based HTTP Transport with SSE (PARTIALLY SUPERSEDES THIS ADR)**
113 | - MCP Specification: Transport layer abstraction
114 |
```
--------------------------------------------------------------------------------
/src/domains/measures.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { MeasuresAdditionalField } from 'sonarqube-web-api-client';
2 | import type {
3 | ComponentMeasuresParams,
4 | ComponentsMeasuresParams,
5 | MeasuresHistoryParams,
6 | SonarQubeComponentMeasuresResult,
7 | SonarQubeComponentsMeasuresResult,
8 | SonarQubeMeasuresHistoryResult,
9 | } from '../types/index.js';
10 | import { BaseDomain } from './base.js';
11 | import { ensureStringArray } from '../utils/transforms.js';
12 |
13 | /**
14 | * Domain module for measures-related operations
15 | */
16 | export class MeasuresDomain extends BaseDomain {
17 | /**
18 | * Gets measures for a specific component
19 | * @param params Parameters including component key and metric keys
20 | * @returns Promise with the component measures
21 | */
22 | async getComponentMeasures(
23 | params: ComponentMeasuresParams
24 | ): Promise<SonarQubeComponentMeasuresResult> {
25 | const { component, metricKeys, additionalFields, branch, pullRequest } = params;
26 |
27 | const request: {
28 | component: string;
29 | metricKeys: string[];
30 | additionalFields?: MeasuresAdditionalField[];
31 | branch?: string;
32 | pullRequest?: string;
33 | } = {
34 | component,
35 | metricKeys: ensureStringArray(metricKeys),
36 | ...(additionalFields && { additionalFields: additionalFields as MeasuresAdditionalField[] }),
37 | ...(branch && { branch }),
38 | ...(pullRequest && { pullRequest }),
39 | };
40 |
41 | const response = await this.webApiClient.measures.component(request);
42 |
43 | return response as SonarQubeComponentMeasuresResult;
44 | }
45 |
46 | /**
47 | * Gets measures for multiple components
48 | * @param params Parameters including component keys and metric keys
49 | * @returns Promise with the components measures
50 | */
51 | async getComponentsMeasures(
52 | params: ComponentsMeasuresParams
53 | ): Promise<SonarQubeComponentsMeasuresResult> {
54 | // The API only supports querying one component at a time for detailed measures
55 | // We need to make multiple requests and aggregate the results
56 | const componentKeys = ensureStringArray(params.componentKeys);
57 | const metricKeys = ensureStringArray(params.metricKeys);
58 |
59 | const results = await Promise.all(
60 | componentKeys.map((componentKey) => {
61 | const requestParams: ComponentMeasuresParams = {
62 | component: componentKey,
63 | metricKeys,
64 | ...(params.additionalFields && { additionalFields: params.additionalFields }),
65 | ...(params.branch && { branch: params.branch }),
66 | ...(params.pullRequest && { pullRequest: params.pullRequest }),
67 | ...(params.period && { period: params.period }),
68 | };
69 | return this.getComponentMeasures(requestParams);
70 | })
71 | );
72 |
73 | // Aggregate results with pagination
74 | const allComponents = results.map((result) => result.component);
75 | const page = params.page ?? 1;
76 | const pageSize = params.pageSize ?? 100; // Default to 100 like SonarQube API
77 |
78 | // Apply pagination
79 | const startIndex = (page - 1) * pageSize;
80 | const endIndex = startIndex + pageSize;
81 | const paginatedComponents = allComponents.slice(startIndex, endIndex);
82 |
83 | const response: SonarQubeComponentsMeasuresResult = {
84 | components: paginatedComponents,
85 | metrics: results[0]?.metrics ?? [],
86 | paging: {
87 | pageIndex: page,
88 | pageSize: pageSize,
89 | total: componentKeys.length,
90 | },
91 | };
92 |
93 | // Only add period if it exists
94 | if (results[0]?.period) {
95 | response.period = results[0].period;
96 | }
97 |
98 | return response;
99 | }
100 |
101 | /**
102 | * Gets measures history for a component
103 | * @param params Parameters including component key and metrics
104 | * @returns Promise with the measures history
105 | */
106 | async getMeasuresHistory(params: MeasuresHistoryParams): Promise<SonarQubeMeasuresHistoryResult> {
107 | const { component, metrics, from, to, branch, pullRequest, page, pageSize } = params;
108 |
109 | const builder = this.webApiClient.measures.searchHistory(component, ensureStringArray(metrics));
110 |
111 | if (from) {
112 | builder.from(from);
113 | }
114 | if (to) {
115 | builder.to(to);
116 | }
117 | if (branch) {
118 | builder.withBranch(branch);
119 | }
120 | if (pullRequest) {
121 | builder.withPullRequest(pullRequest);
122 | }
123 | if (page !== undefined) {
124 | builder.page(page);
125 | }
126 | if (pageSize !== undefined) {
127 | builder.pageSize(pageSize);
128 | }
129 |
130 | const response = await builder.execute();
131 | return {
132 | ...response,
133 | paging: response.paging ?? { pageIndex: 1, pageSize: 100, total: 0 },
134 | };
135 | }
136 | }
137 |
```
--------------------------------------------------------------------------------
/docs/architecture/decisions/0020-testing-framework-and-strategy-vitest-with-property-based-testing.md:
--------------------------------------------------------------------------------
```markdown
1 | # 20. Testing Framework and Strategy Vitest with Property Based Testing
2 |
3 | Date: 2025-10-11
4 |
5 | ## Status
6 |
7 | Accepted
8 |
9 | ## Context
10 |
11 | The SonarQube MCP Server requires a comprehensive testing strategy to ensure reliability, maintainability, and code quality. The project needs:
12 |
13 | - Fast test execution for rapid feedback during development
14 | - Strong integration with TypeScript and ES modules
15 | - Property-based testing to uncover edge cases
16 | - High code coverage requirements (80% minimum)
17 | - Modern test tooling with good developer experience
18 | - Compatibility with CI/CD automation
19 |
20 | Jest, while popular, has challenges with ES modules and TypeScript, requiring additional configuration and workarounds. We needed a testing framework that provides first-class support for modern JavaScript/TypeScript patterns.
21 |
22 | ## Decision
23 |
24 | We will use **Vitest** as our primary testing framework, complemented by **fast-check** for property-based testing.
25 |
26 | ### Core Testing Stack
27 |
28 | 1. **Vitest** (v3.2.4+): Test framework and runner
29 | - Native ES modules support
30 | - Built-in TypeScript support
31 | - Compatible with Jest API for easier migration
32 | - Fast execution with smart watch mode
33 | - Integrated coverage reporting with V8
34 |
35 | 2. **fast-check** (v4.3.0+): Property-based testing library
36 | - Generate comprehensive test cases automatically
37 | - Uncover edge cases that unit tests might miss
38 | - Integrated with Vitest via @fast-check/vitest
39 |
40 | 3. **Coverage Requirements**:
41 | - Minimum 80% coverage for lines, functions, branches, and statements
42 | - Enforced in CI/CD pipeline
43 | - Configured via vitest.config.ts
44 |
45 | ### Test Organization
46 |
47 | ```
48 | src/__tests__/
49 | ├── [feature].test.ts # Unit tests
50 | ├── domains/ # Domain-specific tests
51 | │ └── [domain].test.ts
52 | ├── schemas/ # Schema validation tests
53 | │ └── [schema].test.ts
54 | └── transports/ # Transport layer tests
55 | └── [transport].test.ts
56 | ```
57 |
58 | ### Testing Approach
59 |
60 | 1. **Unit Tests**: Test individual functions and classes in isolation
61 | 2. **Integration Tests**: Test interactions between components
62 | 3. **Property-Based Tests**: Use fast-check to test properties that should hold for all inputs
63 | 4. **Schema Tests**: Validate Zod schema definitions with comprehensive inputs
64 |
65 | ## Consequences
66 |
67 | ### Positive
68 |
69 | - **Fast Execution**: Vitest is significantly faster than Jest (2-3x in our benchmarks)
70 | - **Better DX**: Native TypeScript support eliminates configuration complexity
71 | - **ES Modules**: First-class support for modern JavaScript patterns
72 | - **Property-Based Testing**: fast-check uncovers edge cases that traditional tests miss
73 | - **Coverage Enforcement**: Built-in V8 coverage ensures quality standards
74 | - **Familiar API**: Jest-compatible API reduces migration friction
75 | - **Watch Mode**: Intelligent test re-running speeds up development
76 | - **Type Safety**: Strong TypeScript integration catches errors early
77 |
78 | ### Negative
79 |
80 | - **Ecosystem Maturity**: Vitest is newer than Jest, with smaller community
81 | - **Learning Curve**: Property-based testing requires different thinking
82 | - **CI Time**: Property-based tests can be slower (mitigated with test timeouts)
83 | - **Documentation**: Less third-party documentation compared to Jest
84 |
85 | ### Neutral
86 |
87 | - **Migration Path**: Jest-compatible API makes future changes easier
88 | - **Property Test Complexity**: Requires careful generator design
89 | - **Coverage Tools**: V8 coverage differs slightly from Istanbul
90 |
91 | ## Implementation
92 |
93 | ### Configuration
94 |
95 | ```typescript
96 | // vitest.config.ts
97 | export default defineConfig({
98 | test: {
99 | environment: 'node',
100 | globals: true,
101 | coverage: {
102 | provider: 'v8',
103 | reporter: ['text', 'html', 'json-summary', 'lcov'],
104 | thresholds: {
105 | branches: 80,
106 | functions: 80,
107 | lines: 80,
108 | statements: 80,
109 | },
110 | },
111 | },
112 | });
113 | ```
114 |
115 | ### Example Tests
116 |
117 | **Unit Test**:
118 |
119 | ```typescript
120 | describe('nullToUndefined', () => {
121 | it('should convert null to undefined', () => {
122 | expect(nullToUndefined(null)).toBeUndefined();
123 | });
124 | });
125 | ```
126 |
127 | **Property-Based Test**:
128 |
129 | ```typescript
130 | import { fc, test } from '@fast-check/vitest';
131 |
132 | test.prop([fc.string()])('should handle any string input', (input) => {
133 | const result = processString(input);
134 | expect(typeof result).toBe('string');
135 | });
136 | ```
137 |
138 | ## References
139 |
140 | - Vitest Documentation: https://vitest.dev/
141 | - fast-check Documentation: https://fast-check.dev/
142 | - Coverage Configuration: vitest.config.ts
143 | - Test Examples: src/**tests**/
144 |
```
--------------------------------------------------------------------------------
/src/domains/hotspots.ts:
--------------------------------------------------------------------------------
```typescript
1 | import type {
2 | HotspotSearchParams,
3 | SonarQubeHotspotSearchResult,
4 | SonarQubeHotspotDetails,
5 | HotspotStatusUpdateParams,
6 | SonarQubeHotspot,
7 | SeverityLevel,
8 | } from '../types/index.js';
9 | import { BaseDomain } from './base.js';
10 |
11 | /**
12 | * Domain module for security hotspots operations
13 | */
14 | export class HotspotsDomain extends BaseDomain {
15 | /**
16 | * Search for security hotspots
17 | * @param params Search parameters
18 | * @returns Promise with the search results
19 | */
20 | async hotspots(params: HotspotSearchParams): Promise<SonarQubeHotspotSearchResult> {
21 | const builder = this.webApiClient.hotspots.search();
22 |
23 | if (params.projectKey) {
24 | builder.projectKey(params.projectKey);
25 | }
26 | // Note: The hotspots API doesn't support branch/pullRequest filtering directly
27 | // These parameters might be ignored or need to be handled differently
28 | if (params.status) {
29 | builder.status(params.status);
30 | }
31 | if (params.resolution) {
32 | builder.resolution(params.resolution);
33 | }
34 | if (params.files) {
35 | builder.files(params.files);
36 | }
37 | if (params.assignedToMe !== undefined) {
38 | builder.onlyMine(params.assignedToMe);
39 | }
40 | if (params.sinceLeakPeriod !== undefined) {
41 | builder.sinceLeakPeriod(params.sinceLeakPeriod);
42 | }
43 | if (params.inNewCodePeriod !== undefined) {
44 | // inNewCodePeriod might not be available, use sinceLeakPeriod instead
45 | if (params.inNewCodePeriod) {
46 | builder.sinceLeakPeriod(true);
47 | }
48 | }
49 | if (params.page !== undefined) {
50 | builder.page(params.page);
51 | }
52 | if (params.pageSize !== undefined) {
53 | builder.pageSize(params.pageSize);
54 | }
55 |
56 | const response = await builder.execute();
57 |
58 | return {
59 | hotspots: response.hotspots as SonarQubeHotspot[],
60 | components: response.components?.map((comp) => ({
61 | key: comp.key,
62 | qualifier: comp.qualifier,
63 | name: comp.name,
64 | longName: comp.longName,
65 | path: comp.path,
66 | })),
67 | paging: response.paging ?? { pageIndex: 1, pageSize: 100, total: 0 },
68 | };
69 | }
70 |
71 | /**
72 | * Get details for a specific hotspot
73 | * @param hotspotKey The hotspot key
74 | * @returns Promise with the hotspot details
75 | */
76 | async hotspot(hotspotKey: string): Promise<SonarQubeHotspotDetails> {
77 | const response = await this.webApiClient.hotspots.show({ hotspot: hotspotKey });
78 | // Map the response to our interface
79 | return {
80 | key: response.key,
81 | component: response.component.key,
82 | project: response.project.key,
83 | securityCategory: response.rule.securityCategory,
84 | vulnerabilityProbability: response.rule.vulnerabilityProbability as SeverityLevel,
85 | status: response.status,
86 | ...(response.resolution && { resolution: response.resolution }),
87 | line: response.line ?? 0,
88 | message: response.message,
89 | ...(response.assignee?.login && { assignee: response.assignee.login }),
90 | ...(response.author?.login && { author: response.author.login }),
91 | creationDate: response.creationDate,
92 | updateDate: response.updateDate,
93 | rule: {
94 | key: response.rule.key,
95 | name: response.rule.name,
96 | securityCategory: response.rule.securityCategory,
97 | vulnerabilityProbability: response.rule.vulnerabilityProbability as SeverityLevel,
98 | },
99 | changelog: response.changelog?.map((change) => ({
100 | user: change.user?.login,
101 | userName: change.user?.name,
102 | creationDate: change.creationDate,
103 | diffs:
104 | change.diffs?.map((diff) => ({
105 | key: diff.key,
106 | oldValue: diff.oldValue,
107 | newValue: diff.newValue,
108 | })) ?? [],
109 | })),
110 | comment: response.comment,
111 | users: (response as { users?: SonarQubeHotspotDetails['users'] }).users,
112 | };
113 | }
114 |
115 | /**
116 | * Update the status of a hotspot
117 | * @param params Update parameters
118 | * @returns Promise that resolves when the update is complete
119 | */
120 | async updateHotspotStatus(params: HotspotStatusUpdateParams): Promise<void> {
121 | const request: {
122 | hotspot: string;
123 | status: 'TO_REVIEW' | 'REVIEWED';
124 | resolution?: 'FIXED' | 'SAFE';
125 | comment?: string;
126 | } = {
127 | hotspot: params.hotspot,
128 | status: params.status,
129 | };
130 |
131 | if (params.resolution !== undefined) {
132 | request.resolution = params.resolution;
133 | }
134 | if (params.comment !== undefined) {
135 | request.comment = params.comment;
136 | }
137 |
138 | await this.webApiClient.hotspots.changeStatus(request);
139 | }
140 | }
141 |
```
--------------------------------------------------------------------------------
/.github/scripts/version-and-release.js:
--------------------------------------------------------------------------------
```javascript
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * =============================================================================
5 | * SCRIPT: Version and Release Manager
6 | * PURPOSE: Validate changesets and manage version bumps for releases
7 | * USAGE: Called by main.yml workflow after successful validation
8 | * OUTPUTS: Sets GitHub Actions outputs for version and changed status
9 | * =============================================================================
10 | */
11 |
12 | import { execSync } from 'child_process';
13 | import fs from 'fs';
14 |
15 | // Execute shell command and return trimmed output
16 | const exec = (cmd) => execSync(cmd, { encoding: 'utf-8', stdio: 'pipe' }).trim();
17 | // eslint-disable-next-line no-console
18 | const log = (msg) => console.log(msg);
19 |
20 | async function main() {
21 | try {
22 | // =============================================================================
23 | // CHANGESET DETECTION
24 | // Check if changesets exist in .changeset directory
25 | // =============================================================================
26 |
27 | // Look for changeset markdown files (excluding README.md)
28 | const hasChangesets =
29 | fs.existsSync('.changeset') &&
30 | fs.readdirSync('.changeset').some((f) => f.endsWith('.md') && f !== 'README.md');
31 |
32 | if (!hasChangesets) {
33 | // =============================================================================
34 | // VALIDATE COMMITS MATCH CHANGESETS
35 | // Ensure feat/fix commits have corresponding changesets
36 | // =============================================================================
37 |
38 | // Find the last git tag to determine commit range
39 | let lastTag = '';
40 | try {
41 | lastTag = exec('git describe --tags --abbrev=0');
42 | } catch {
43 | // No tags exist yet (first release)
44 | lastTag = '';
45 | }
46 |
47 | // Get commits since last tag (or all commits if no tags)
48 | const commitRange = lastTag ? `${lastTag}..HEAD` : 'HEAD';
49 | const commits = exec(`git log ${commitRange} --pretty=format:"%s"`).split('\n');
50 |
51 | // Check if any commits require a release (feat, fix, perf, refactor)
52 | const hasReleasableCommits = commits.some((c) =>
53 | /^(feat|fix|perf|refactor)(\(.+\))?:/.test(c)
54 | );
55 |
56 | if (!hasReleasableCommits) {
57 | // No commits that need a release
58 | log('⏭️ No releasable commits found, skipping release');
59 | process.exit(0);
60 | }
61 |
62 | // VALIDATION ERROR: Found releasable commits without changesets
63 | // This enforces that all features/fixes are documented in changelog
64 | log('❌ Found releasable commits but no changeset');
65 | log('Commits that require a changeset:');
66 | commits
67 | .filter((c) => /^(feat|fix|perf|refactor)(\(.+\))?:/.test(c))
68 | .forEach((c) => log(` - ${c}`));
69 | log('\nPlease add a changeset by running: pnpm changeset');
70 | process.exit(1);
71 | }
72 |
73 | // =============================================================================
74 | // VERSION MANAGEMENT
75 | // Apply changesets to bump version and update CHANGELOG.md
76 | // =============================================================================
77 |
78 | // Get current version from package.json
79 | const pkg = JSON.parse(fs.readFileSync('package.json', 'utf-8'));
80 | const currentVersion = pkg.version;
81 | log(`Current version: ${currentVersion}`);
82 |
83 | // Apply all pending changesets
84 | // This updates package.json version and CHANGELOG.md
85 | exec('pnpm changeset version');
86 |
87 | // Check if version actually changed
88 | const updatedPkg = JSON.parse(fs.readFileSync('package.json', 'utf-8'));
89 | const newVersion = updatedPkg.version;
90 |
91 | if (currentVersion === newVersion) {
92 | // No version bump needed (e.g., all changesets were --empty)
93 | log('⏭️ No version change');
94 | process.exit(0);
95 | }
96 |
97 | log(`📦 Version changed to: ${newVersion}`);
98 |
99 | // =============================================================================
100 | // GITHUB ACTIONS OUTPUT
101 | // Set outputs for workflow to use in subsequent steps
102 | // =============================================================================
103 |
104 | // Output for GitHub Actions
105 | // These values are used by main.yml to decide whether to create a release
106 | if (process.env.GITHUB_OUTPUT) {
107 | fs.appendFileSync(process.env.GITHUB_OUTPUT, `changed=true\n`);
108 | fs.appendFileSync(process.env.GITHUB_OUTPUT, `version=${newVersion}\n`);
109 | }
110 | } catch (error) {
111 | // Error handling with clear message
112 | // Common errors: permission issues, git conflicts, invalid changesets
113 | // eslint-disable-next-line no-console
114 | console.error('Error:', error.message);
115 | process.exit(1);
116 | }
117 | }
118 |
119 | main();
120 |
```
--------------------------------------------------------------------------------
/.github/workflows/pr.yml:
--------------------------------------------------------------------------------
```yaml
1 | # =============================================================================
2 | # WORKFLOW: Pull Request Validation
3 | # PURPOSE: Ensure code quality and security before merging to main
4 | # TRIGGERS: Pull requests targeting main branch
5 | # REQUIREMENTS: All checks must pass, changesets required for features/fixes
6 | # =============================================================================
7 |
8 | name: PR
9 |
10 | on:
11 | pull_request:
12 | branches: [main]
13 |
14 | # Allow only one PR workflow per branch
15 | # cancel-in-progress: true cancels old runs when new commits are pushed
16 | # This speeds up feedback by focusing on the latest code
17 | concurrency:
18 | group: ${{ github.workflow }}-${{ github.ref }}
19 | cancel-in-progress: true
20 |
21 | # Minimal permissions for security
22 | # contents: read - Read code for analysis
23 | # security-events: write - Upload security findings
24 | # actions: read - Access workflow artifacts
25 | # packages: write - Required by reusable-docker.yml (not used in PR builds)
26 | permissions:
27 | contents: read
28 | security-events: write
29 | actions: read
30 | packages: write
31 |
32 | jobs:
33 | # =============================================================================
34 | # PARALLEL VALIDATION
35 | # All checks run simultaneously for faster feedback
36 | # =============================================================================
37 |
38 | # Core validation: audit, typecheck, lint, format, tests
39 | # upload-coverage: true generates coverage reports for visibility
40 | # FAILS IF: Any check fails or coverage drops below 80%
41 | validate:
42 | uses: ./.github/workflows/reusable-validate.yml
43 | secrets:
44 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
45 | with:
46 | validate-changesets: true
47 |
48 | # Security: Static security analysis for TypeScript/JavaScript
49 | # Scans for: XSS, injection attacks, insecure patterns
50 | # Results appear in Security tab of the PR
51 | security:
52 | uses: ./.github/workflows/reusable-security.yml
53 |
54 | # =============================================================================
55 | # DOCKER CONTAINER VALIDATION
56 | # Build and scan Docker image for vulnerabilities
57 | # =============================================================================
58 |
59 | # Docker: Build and security scan container image
60 | # Only runs when ENABLE_DOCKER_RELEASE is configured
61 | # Scans for: CVEs, misconfigurations, secrets in image layers
62 | docker:
63 | if: vars.ENABLE_DOCKER_RELEASE == 'true'
64 | uses: ./.github/workflows/reusable-docker.yml
65 | with:
66 | platforms: 'linux/amd64' # Single platform for faster PR validation
67 | save-artifact: false # Don't save artifact for PRs
68 | image-name: 'sonarqube-mcp-server-pr'
69 |
70 | # =============================================================================
71 | # FINAL STATUS CHECK
72 | # Single job to verify all parallel checks succeeded
73 | # =============================================================================
74 |
75 | # Final status check - ensures all jobs passed
76 | # Required for branch protection rules
77 | pr-status:
78 | needs: [validate, security, docker]
79 | if: always() # Run even if previous jobs failed
80 | runs-on: ubuntu-latest
81 | steps:
82 | - name: Check status
83 | # Aggregates results from all parallel jobs
84 | # This single check can be used as a required status check
85 | # FAILS IF: Any validation job failed
86 | # Common failures:
87 | # - validate: Tests fail, coverage below 80%, lint errors, workflow errors, missing changesets
88 | # - security: Security vulnerabilities, vulnerable dependencies, audit failures
89 | # - docker: Container vulnerabilities or build failures (when enabled)
90 | run: |
91 | # Check Docker job status
92 | # The job can be:
93 | # - success: Job ran and passed
94 | # - failure: Job ran and failed
95 | # - cancelled: Job was cancelled
96 | # - skipped: Job condition was not met (e.g., ENABLE_DOCKER_RELEASE != 'true')
97 | DOCKER_RESULT="${{ needs.docker.result }}"
98 |
99 | # Docker is acceptable if it succeeded or was skipped
100 | # It's a failure only if it actually ran and failed/was cancelled
101 | if [ "$DOCKER_RESULT" == "failure" ] || [ "$DOCKER_RESULT" == "cancelled" ]; then
102 | DOCKER_FAILED=true
103 | else
104 | DOCKER_FAILED=false
105 | fi
106 |
107 | if [ "${{ needs.validate.result }}" != "success" ] || \
108 | [ "${{ needs.security.result }}" != "success" ] || \
109 | [ "$DOCKER_FAILED" == "true" ]; then
110 | echo "❌ PR validation failed"
111 | # Check individual job results for debugging
112 | echo "Validate: ${{ needs.validate.result }}"
113 | echo "Security: ${{ needs.security.result }}"
114 | echo "Docker: ${{ needs.docker.result }}"
115 | exit 1
116 | fi
117 | echo "✅ All PR checks passed"
118 |
```
--------------------------------------------------------------------------------
/src/__tests__/additional-coverage.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
2 | import nock from 'nock';
3 |
4 | // Mock environment variables
5 | process.env.SONARQUBE_TOKEN = 'test-token';
6 | process.env.SONARQUBE_URL = 'http://localhost:9000';
7 |
8 | describe('Lambda Handlers Coverage Tests', () => {
9 | beforeEach(() => {
10 | vi.resetModules();
11 |
12 | // Setup nock to mock SonarQube API responses
13 | nock('http://localhost:9000')
14 | .persist()
15 | .get('/api/metrics/search')
16 | .query(true)
17 | .reply(200, {
18 | metrics: [
19 | {
20 | key: 'test-metric',
21 | name: 'Test Metric',
22 | description: 'Test metric description',
23 | domain: 'test',
24 | type: 'INT',
25 | },
26 | ],
27 | paging: {
28 | pageIndex: 1,
29 | pageSize: 10,
30 | total: 1,
31 | },
32 | });
33 |
34 | nock('http://localhost:9000')
35 | .persist()
36 | .get('/api/measures/component')
37 | .query(true)
38 | .reply(200, {
39 | component: {
40 | key: 'test-component',
41 | name: 'Test Component',
42 | qualifier: 'TRK',
43 | measures: [
44 | {
45 | metric: 'coverage',
46 | value: '85.4',
47 | },
48 | ],
49 | },
50 | metrics: [
51 | {
52 | key: 'coverage',
53 | name: 'Coverage',
54 | description: 'Test coverage',
55 | domain: 'Coverage',
56 | type: 'PERCENT',
57 | },
58 | ],
59 | });
60 |
61 | nock('http://localhost:9000')
62 | .persist()
63 | .get('/api/measures/components')
64 | .query(true)
65 | .reply(200, {
66 | components: [
67 | {
68 | key: 'test-component-1',
69 | name: 'Test Component 1',
70 | qualifier: 'TRK',
71 | measures: [
72 | {
73 | metric: 'coverage',
74 | value: '85.4',
75 | },
76 | ],
77 | },
78 | ],
79 | metrics: [
80 | {
81 | key: 'coverage',
82 | name: 'Coverage',
83 | description: 'Test coverage',
84 | domain: 'Coverage',
85 | type: 'PERCENT',
86 | },
87 | ],
88 | paging: {
89 | pageIndex: 1,
90 | pageSize: 100,
91 | total: 1,
92 | },
93 | });
94 |
95 | nock('http://localhost:9000')
96 | .persist()
97 | .get('/api/measures/search_history')
98 | .query(true)
99 | .reply(200, {
100 | measures: [
101 | {
102 | metric: 'coverage',
103 | history: [
104 | {
105 | date: '2023-01-01T00:00:00+0000',
106 | value: '80.0',
107 | },
108 | ],
109 | },
110 | ],
111 | paging: {
112 | pageIndex: 1,
113 | pageSize: 100,
114 | total: 1,
115 | },
116 | });
117 |
118 | // No need for this now since we're importing directly in each test
119 | });
120 |
121 | afterEach(() => {
122 | nock.cleanAll();
123 | });
124 |
125 | // Import the module directly in each test to ensure it's available
126 | it('should call metricsHandler', async () => {
127 | const module = await import('../index.js');
128 | const result = await module.metricsHandler({ page: 1, page_size: 10 });
129 | expect(result).toBeDefined();
130 | expect(result.content).toBeDefined();
131 | expect(result.content?.[0]?.text).toBeDefined();
132 | });
133 |
134 | it('should call componentMeasuresHandler', async () => {
135 | const module = await import('../index.js');
136 | const result = await module.componentMeasuresHandler({
137 | component: 'test-component',
138 | metric_keys: ['coverage'],
139 | additional_fields: ['periods'],
140 | branch: 'main',
141 | pull_request: 'pr-123',
142 | period: '1',
143 | });
144 | expect(result).toBeDefined();
145 | expect(result.content).toBeDefined();
146 | expect(result.content?.[0]?.text).toBeDefined();
147 | });
148 |
149 | it('should call componentsMeasuresHandler', async () => {
150 | const module = await import('../index.js');
151 | const result = await module.componentsMeasuresHandler({
152 | component_keys: ['component1', 'component2'],
153 | metric_keys: ['coverage', 'bugs'],
154 | additional_fields: ['metrics'],
155 | branch: 'develop',
156 | pull_request: 'pr-456',
157 | period: '2',
158 | page: '1',
159 | page_size: '20',
160 | });
161 | expect(result).toBeDefined();
162 | expect(result.content).toBeDefined();
163 | expect(result.content?.[0]?.text).toBeDefined();
164 | });
165 |
166 | it('should call measuresHistoryHandler', async () => {
167 | const module = await import('../index.js');
168 | const result = await module.measuresHistoryHandler({
169 | component: 'test-component',
170 | metrics: ['coverage', 'bugs'],
171 | from: '2023-01-01',
172 | to: '2023-12-31',
173 | branch: 'feature',
174 | pull_request: 'pr-789',
175 | page: '1',
176 | page_size: '30',
177 | });
178 | expect(result).toBeDefined();
179 | expect(result.content).toBeDefined();
180 | expect(result.content?.[0]?.text).toBeDefined();
181 | });
182 | });
183 |
```
--------------------------------------------------------------------------------
/src/__tests__/utils/transforms.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, test, expect } from 'vitest';
2 | import {
3 | ensureArray,
4 | ensureStringArray,
5 | nullToUndefined,
6 | stringToNumberTransform,
7 | numberOrStringToString,
8 | } from '../../utils/transforms.js';
9 |
10 | describe('transforms', () => {
11 | describe('nullToUndefined', () => {
12 | test('converts null to undefined', () => {
13 | expect(nullToUndefined(null)).toBeUndefined();
14 | });
15 |
16 | test('preserves undefined', () => {
17 | expect(nullToUndefined(undefined)).toBeUndefined();
18 | });
19 |
20 | test('preserves other values', () => {
21 | expect(nullToUndefined(0)).toBe(0);
22 | expect(nullToUndefined('')).toBe('');
23 | expect(nullToUndefined(false)).toBe(false);
24 | expect(nullToUndefined({ foo: 'bar' })).toEqual({ foo: 'bar' });
25 | });
26 | });
27 |
28 | describe('stringToNumberTransform', () => {
29 | test('converts valid string to number', () => {
30 | expect(stringToNumberTransform('123')).toBe(123);
31 | expect(stringToNumberTransform('0')).toBe(0);
32 | expect(stringToNumberTransform('-456')).toBe(-456);
33 | });
34 |
35 | test('returns null for invalid strings', () => {
36 | expect(stringToNumberTransform('abc')).toBeNull();
37 | expect(stringToNumberTransform('')).toBeNull();
38 | expect(stringToNumberTransform('12.34')).toBe(12); // parseInt behavior
39 | });
40 |
41 | test('preserves null and undefined', () => {
42 | expect(stringToNumberTransform(null)).toBeNull();
43 | expect(stringToNumberTransform(undefined)).toBeUndefined();
44 | });
45 | });
46 |
47 | describe('ensureArray', () => {
48 | test('returns empty array for undefined', () => {
49 | expect(ensureArray(undefined)).toEqual([]);
50 | });
51 |
52 | test('wraps single value in array', () => {
53 | expect(ensureArray('hello')).toEqual(['hello']);
54 | expect(ensureArray(123)).toEqual([123]);
55 | expect(ensureArray(true)).toEqual([true]);
56 | expect(ensureArray({ key: 'value' })).toEqual([{ key: 'value' }]);
57 | });
58 |
59 | test('returns array as-is', () => {
60 | expect(ensureArray(['a', 'b', 'c'])).toEqual(['a', 'b', 'c']);
61 | expect(ensureArray([1, 2, 3])).toEqual([1, 2, 3]);
62 | expect(ensureArray([])).toEqual([]);
63 | });
64 |
65 | test('handles mixed type arrays', () => {
66 | const mixed = [1, 'two', { three: 3 }];
67 | expect(ensureArray(mixed)).toEqual(mixed);
68 | });
69 |
70 | test('handles null as a value', () => {
71 | expect(ensureArray(null)).toEqual([null]);
72 | });
73 |
74 | test('handles zero and empty string', () => {
75 | expect(ensureArray(0)).toEqual([0]);
76 | expect(ensureArray('')).toEqual(['']);
77 | });
78 | });
79 |
80 | describe('ensureStringArray', () => {
81 | test('returns empty array for undefined', () => {
82 | expect(ensureStringArray(undefined)).toEqual([]);
83 | });
84 |
85 | test('wraps single string in array', () => {
86 | expect(ensureStringArray('hello')).toEqual(['hello']);
87 | expect(ensureStringArray('')).toEqual(['']);
88 | });
89 |
90 | test('returns string array as-is', () => {
91 | expect(ensureStringArray(['a', 'b', 'c'])).toEqual(['a', 'b', 'c']);
92 | expect(ensureStringArray([])).toEqual([]);
93 | });
94 |
95 | test('preserves array reference', () => {
96 | const arr = ['test'];
97 | expect(ensureStringArray(arr)).toBe(arr);
98 | });
99 |
100 | test('splits comma-separated strings', () => {
101 | expect(ensureStringArray('a,b,c')).toEqual(['a', 'b', 'c']);
102 | expect(ensureStringArray('comp1,comp2')).toEqual(['comp1', 'comp2']);
103 | expect(ensureStringArray('single,double,triple')).toEqual(['single', 'double', 'triple']);
104 | });
105 |
106 | test('handles strings with no commas', () => {
107 | expect(ensureStringArray('nocommas')).toEqual(['nocommas']);
108 | expect(ensureStringArray('single-value')).toEqual(['single-value']);
109 | });
110 | });
111 |
112 | describe('numberOrStringToString', () => {
113 | test('converts number to string', () => {
114 | expect(numberOrStringToString(123)).toBe('123');
115 | expect(numberOrStringToString(0)).toBe('0');
116 | expect(numberOrStringToString(-456)).toBe('-456');
117 | expect(numberOrStringToString(12.34)).toBe('12.34');
118 | });
119 |
120 | test('preserves string values', () => {
121 | expect(numberOrStringToString('123')).toBe('123');
122 | expect(numberOrStringToString('abc')).toBe('abc');
123 | expect(numberOrStringToString('')).toBe('');
124 | expect(numberOrStringToString('pr-123')).toBe('pr-123');
125 | });
126 |
127 | test('preserves null and undefined', () => {
128 | expect(numberOrStringToString(null)).toBeNull();
129 | expect(numberOrStringToString(undefined)).toBeUndefined();
130 | });
131 |
132 | test('handles edge cases', () => {
133 | expect(numberOrStringToString(0)).toBe('0');
134 | expect(numberOrStringToString('')).toBe('');
135 | expect(numberOrStringToString(NaN)).toBe('NaN');
136 | expect(numberOrStringToString(Infinity)).toBe('Infinity');
137 | expect(numberOrStringToString(-Infinity)).toBe('-Infinity');
138 | });
139 | });
140 | });
141 |
```
--------------------------------------------------------------------------------
/docs/architecture/decisions/0016-http-transport-with-oauth-2-0-metadata-endpoints.md:
--------------------------------------------------------------------------------
```markdown
1 | # 16. HTTP Transport with OAuth 2.0 Metadata Endpoints
2 |
3 | Date: 2025-06-22
4 | Superseded: 2025-01-30 by ADR-0019
5 |
6 | ## Status
7 |
8 | Superseded by ADR-0019
9 |
10 | This decision was reversed on 2025-01-30. The HTTP transport and OAuth 2.0 implementation was removed in favor of stdio-only transport with enterprise features delegated to MCP gateways (Docker MCP Gateway, IBM Context Forge, SGNL, Operant, etc.).
11 |
12 | **Rationale for reversal:**
13 |
14 | - HTTP transport added significant complexity (~60+ authentication/authorization files)
15 | - MCP gateway solutions now provide these enterprise features at the infrastructure layer
16 | - Better separation of concerns: business logic vs infrastructure concerns
17 | - Reduced attack surface and maintenance burden
18 | - Aligns with Unix philosophy of "do one thing well"
19 |
20 | See ADR-0019 for the current stdio-only transport approach.
21 |
22 | ## Context
23 |
24 | Following the transport architecture refactoring in ADR-0015, we need to implement HTTP transport to support enterprise deployment scenarios. The HTTP transport must provide authentication discovery mechanisms for MCP clients as outlined in the MCP specification.
25 |
26 | ### Requirements:
27 |
28 | 1. Implement HTTP transport as an alternative to STDIO
29 | 2. Support OAuth 2.0 metadata discovery endpoints (RFC9728 and RFC8414)
30 | 3. Enable enterprise authentication workflows
31 | 4. Maintain compatibility with existing transport architecture
32 | 5. Prepare for future OAuth 2.0 flow implementation
33 |
34 | ### Standards Compliance:
35 |
36 | - RFC9728: OAuth 2.0 Protected Resource Metadata
37 | - RFC8414: OAuth 2.0 Authorization Server Metadata
38 | - RFC6750: Bearer Token Usage
39 |
40 | ## Decision
41 |
42 | We will implement HTTP transport with OAuth 2.0 metadata endpoints:
43 |
44 | 1. **HTTP Transport Implementation**: Express-based HTTP server following the ITransport interface
45 | 2. **Metadata Endpoints**:
46 | - `/.well-known/oauth-protected-resource` (RFC9728)
47 | - `/.well-known/oauth-authorization-server` (RFC8414, optional)
48 | 3. **Authentication Structure**: WWW-Authenticate headers with resource metadata URLs
49 | 4. **Configuration**: Environment variable-based configuration consistent with existing patterns
50 |
51 | ### Architecture Details:
52 |
53 | ```typescript
54 | // HTTP Transport with OAuth metadata
55 | class HttpTransport implements ITransport {
56 | // Express server with CORS support
57 | // OAuth metadata endpoints
58 | // Bearer token authentication middleware
59 | // MCP HTTP transport integration
60 | }
61 |
62 | // Protected Resource Metadata response
63 | {
64 | "resource": "https://mcp.company.com",
65 | "authorization_servers": ["https://auth.company.com"],
66 | "bearer_methods_supported": ["header"],
67 | "resource_signing_alg_values_supported": ["RS256"]
68 | }
69 | ```
70 |
71 | ### Environment Variables:
72 |
73 | - `MCP_TRANSPORT=http`: Enable HTTP transport
74 | - `MCP_HTTP_PORT`: HTTP server port
75 | - `MCP_HTTP_HOST`: HTTP server host
76 | - `MCP_HTTP_PUBLIC_URL`: Public URL for metadata endpoints
77 | - `MCP_OAUTH_AUTH_SERVERS`: External authorization server URLs
78 | - `MCP_OAUTH_BUILTIN`: Enable built-in auth server metadata
79 |
80 | ## Consequences
81 |
82 | ### Positive:
83 |
84 | 1. **Enterprise Ready**: Supports enterprise authentication discovery workflows
85 | 2. **Standards Compliant**: Follows OAuth 2.0 RFCs for metadata discovery
86 | 3. **Extensible**: Structure ready for full OAuth 2.0 flow implementation
87 | 4. **Backward Compatible**: STDIO transport remains default
88 | 5. **Discovery Mechanism**: Clients can automatically discover authentication requirements
89 |
90 | ### Negative:
91 |
92 | 1. **Token Validation Pending**: Actual token validation not yet implemented
93 | 2. **Additional Dependencies**: Requires Express and CORS packages
94 |
95 | ### Neutral:
96 |
97 | 1. **Incremental Implementation**: Sets foundation for future OAuth stories
98 | 2. **Documentation Required**: New transport needs comprehensive documentation
99 |
100 | ## Implementation Notes
101 |
102 | 1. HTTP transport integrates with MCP SDK's HTTP server transport
103 | 2. Authentication middleware prepared for future token validation
104 | 3. Health check endpoint provided for monitoring
105 | 4. CORS enabled by default for cross-origin requests
106 | 5. All responses follow RFC specifications for JSON structure
107 |
108 | ## Related ADRs
109 |
110 | - ADR-0015: Transport Architecture Refactoring
111 | - ADR-0014: Current Security Model and Future OAuth2 Considerations
112 | - ADR-0008: Use Environment Variables for Configuration
113 | - **ADR-0019: Simplify to stdio-only transport (SUPERSEDES THIS ADR)**
114 |
115 | ## Historical Note
116 |
117 | This ADR documents the HTTP transport implementation that was later removed. The decision to implement HTTP transport with OAuth 2.0 was sound at the time (June 2025), but the rapid evolution of the MCP ecosystem with purpose-built gateway solutions made this approach redundant. The code and infrastructure described in this ADR were removed in January 2025 as part of a significant simplification effort that reduced the codebase by ~40%.
118 |
119 | This ADR is retained for historical context and to document the architectural exploration that led to the current stdio-only approach.
120 |
```
--------------------------------------------------------------------------------
/src/__tests__/pull-request-transform.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, test, expect } from 'vitest';
2 | import { z } from 'zod';
3 | import { issuesToolSchema } from '../schemas/issues.js';
4 | import {
5 | componentMeasuresToolSchema,
6 | componentsMeasuresToolSchema,
7 | measuresHistoryToolSchema,
8 | } from '../schemas/measures.js';
9 | import { hotspotsToolSchema } from '../schemas/hotspots-tools.js';
10 | import { sourceCodeToolSchema, scmBlameToolSchema } from '../schemas/source-code.js';
11 | import { qualityGateStatusToolSchema } from '../schemas/quality-gates.js';
12 | import { componentsToolSchema } from '../schemas/components.js';
13 |
14 | describe('pull_request parameter transform', () => {
15 | describe('issues schema', () => {
16 | test('accepts string pull_request', () => {
17 | const schema = z.object(issuesToolSchema);
18 | const result = schema.parse({
19 | pull_request: '123',
20 | });
21 | expect(result.pull_request).toBe('123');
22 | });
23 |
24 | test('accepts number pull_request and converts to string', () => {
25 | const schema = z.object(issuesToolSchema);
26 | const result = schema.parse({
27 | pull_request: 123,
28 | });
29 | expect(result.pull_request).toBe('123');
30 | });
31 |
32 | test('preserves null values', () => {
33 | const schema = z.object(issuesToolSchema);
34 | const result = schema.parse({
35 | pull_request: null,
36 | });
37 | expect(result.pull_request).toBeNull();
38 | });
39 | });
40 |
41 | describe('measures schemas', () => {
42 | test('componentMeasuresToolSchema accepts number and converts to string', () => {
43 | const schema = z.object(componentMeasuresToolSchema);
44 | const result = schema.parse({
45 | component: 'test',
46 | metric_keys: ['coverage'],
47 | pull_request: 456,
48 | });
49 | expect(result.pull_request).toBe('456');
50 | });
51 |
52 | test('componentsMeasuresToolSchema accepts number and converts to string', () => {
53 | const schema = z.object(componentsMeasuresToolSchema);
54 | const result = schema.parse({
55 | component_keys: ['test'],
56 | metric_keys: ['coverage'],
57 | pull_request: 789,
58 | });
59 | expect(result.pull_request).toBe('789');
60 | });
61 |
62 | test('measuresHistoryToolSchema accepts number and converts to string', () => {
63 | const schema = z.object(measuresHistoryToolSchema);
64 | const result = schema.parse({
65 | component: 'test',
66 | metrics: ['coverage'],
67 | pull_request: 999,
68 | });
69 | expect(result.pull_request).toBe('999');
70 | });
71 | });
72 |
73 | describe('hotspots schema', () => {
74 | test('accepts number pull_request and converts to string', () => {
75 | const schema = z.object(hotspotsToolSchema);
76 | const result = schema.parse({
77 | pull_request: 111,
78 | });
79 | expect(result.pull_request).toBe('111');
80 | });
81 | });
82 |
83 | describe('source code schemas', () => {
84 | test('sourceCodeToolSchema accepts number and converts to string', () => {
85 | const schema = z.object(sourceCodeToolSchema);
86 | const result = schema.parse({
87 | key: 'test',
88 | pull_request: 222,
89 | });
90 | expect(result.pull_request).toBe('222');
91 | });
92 |
93 | test('scmBlameToolSchema accepts number and converts to string', () => {
94 | const schema = z.object(scmBlameToolSchema);
95 | const result = schema.parse({
96 | key: 'test',
97 | pull_request: 333,
98 | });
99 | expect(result.pull_request).toBe('333');
100 | });
101 | });
102 |
103 | describe('quality gates schema', () => {
104 | test('accepts number pull_request and converts to string', () => {
105 | const schema = z.object(qualityGateStatusToolSchema);
106 | const result = schema.parse({
107 | project_key: 'test',
108 | pull_request: 444,
109 | });
110 | expect(result.pull_request).toBe('444');
111 | });
112 | });
113 |
114 | describe('components schema', () => {
115 | test('accepts number pullRequest and converts to string', () => {
116 | const schema = z.object(componentsToolSchema);
117 | const result = schema.parse({
118 | pullRequest: 555,
119 | });
120 | expect(result.pullRequest).toBe('555');
121 | });
122 |
123 | test('accepts string pullRequest', () => {
124 | const schema = z.object(componentsToolSchema);
125 | const result = schema.parse({
126 | pullRequest: 'pr-666',
127 | });
128 | expect(result.pullRequest).toBe('pr-666');
129 | });
130 | });
131 |
132 | describe('edge cases', () => {
133 | test('handles decimal numbers', () => {
134 | const schema = z.object(issuesToolSchema);
135 | const result = schema.parse({
136 | pull_request: 123.456,
137 | });
138 | expect(result.pull_request).toBe('123.456');
139 | });
140 |
141 | test('handles negative numbers', () => {
142 | const schema = z.object(issuesToolSchema);
143 | const result = schema.parse({
144 | pull_request: -123,
145 | });
146 | expect(result.pull_request).toBe('-123');
147 | });
148 |
149 | test('handles zero', () => {
150 | const schema = z.object(issuesToolSchema);
151 | const result = schema.parse({
152 | pull_request: 0,
153 | });
154 | expect(result.pull_request).toBe('0');
155 | });
156 | });
157 | });
158 |
```
--------------------------------------------------------------------------------
/docs/architecture/decisions/0017-comprehensive-audit-logging-system.md:
--------------------------------------------------------------------------------
```markdown
1 | # 17. Comprehensive Audit Logging System
2 |
3 | Date: 2025-06-25
4 |
5 | ## Status
6 |
7 | Accepted
8 |
9 | ## Context
10 |
11 | The SonarQube MCP Server needs comprehensive audit logging to meet enterprise compliance requirements including SOC 2 Type II, ISO 27001, and GDPR. The system must log all security-relevant events including authentication attempts, tool invocations, permission checks, and configuration changes while protecting sensitive data and ensuring log integrity.
12 |
13 | Current logging infrastructure:
14 |
15 | - File-based logging system (to avoid STDIO conflicts with MCP protocol)
16 | - Service account auditor for tracking account usage
17 | - Basic error and debug logging
18 |
19 | Requirements:
20 |
21 | - Structured JSON audit logs with all required fields
22 | - Log all authentication attempts (success and failure)
23 | - Log all tool invocations with parameters
24 | - Log permission checks and denials
25 | - Configurable log retention policies
26 | - Log shipping to SIEM systems
27 | - PII redaction capabilities
28 | - Audit log integrity protection
29 |
30 | ## Decision
31 |
32 | We will implement a comprehensive audit logging system that builds on the existing logging infrastructure with the following components:
33 |
34 | ### 1. Audit Logger Service
35 |
36 | A dedicated service for handling audit events that:
37 |
38 | - Extends the existing logger utility
39 | - Provides structured JSON logging format
40 | - Implements PII redaction
41 | - Ensures log integrity with checksums
42 | - Manages log rotation and retention
43 |
44 | ### 2. Audit Event Schema
45 |
46 | Standardized schema for all audit events:
47 |
48 | ```typescript
49 | interface AuditEvent {
50 | // Core fields
51 | timestamp: string; // ISO 8601 format
52 | eventId: string; // Unique event identifier
53 | eventType: AuditEventType; // Enumerated event types
54 | eventCategory: AuditEventCategory; // auth, access, config, etc.
55 |
56 | // Actor information
57 | actor: {
58 | userId: string;
59 | userGroups?: string[];
60 | sessionId?: string;
61 | ipAddress?: string;
62 | userAgent?: string;
63 | };
64 |
65 | // Target information
66 | target: {
67 | type: string; // tool, project, issue, etc.
68 | id: string; // Resource identifier
69 | name?: string; // Human-readable name
70 | };
71 |
72 | // Action details
73 | action: {
74 | type: string; // read, write, delete, etc.
75 | parameters?: Record<string, unknown>;
76 | result: 'success' | 'failure' | 'partial';
77 | error?: string;
78 | };
79 |
80 | // Context
81 | context: {
82 | serviceAccount?: string;
83 | sonarqubeUrl?: string;
84 | environment?: string;
85 | traceId?: string; // For request correlation
86 | };
87 |
88 | // Security metadata
89 | security: {
90 | tokenAud?: string;
91 | tokenIss?: string;
92 | tokenJti?: string;
93 | tlsVersion?: string;
94 | permissionChecks?: Array<{
95 | permission: string;
96 | result: boolean;
97 | reason?: string;
98 | }>;
99 | };
100 |
101 | // Compliance fields
102 | compliance: {
103 | dataClassification?: string;
104 | piiRedacted?: boolean;
105 | retentionPeriod?: number;
106 | };
107 |
108 | // Integrity
109 | checksum?: string; // SHA-256 of event content
110 | }
111 | ```
112 |
113 | ### 3. Event Types
114 |
115 | Comprehensive event types covering all security-relevant actions:
116 |
117 | - Authentication: login, logout, token validation, MFA events
118 | - Authorization: permission checks, access grants/denials
119 | - Tool invocation: all MCP tool calls with parameters
120 | - Data access: project/issue/component queries
121 | - Configuration: service account changes, permission updates
122 | - System: health checks, errors, maintenance
123 |
124 | ### 4. PII Redaction
125 |
126 | Automatic redaction of sensitive data:
127 |
128 | - Email addresses (except domain)
129 | - IP addresses (configurable)
130 | - User names in free-text fields
131 | - Custom patterns via configuration
132 |
133 | ### 5. Log Storage and Rotation
134 |
135 | - Separate audit log files from application logs
136 | - Daily rotation with compression
137 | - Configurable retention periods
138 | - Archive to cold storage after retention period
139 |
140 | ### 6. SIEM Integration
141 |
142 | - JSON format compatible with major SIEM systems
143 | - Syslog forwarding support
144 | - Webhook delivery for real-time streaming
145 | - Batch export for bulk ingestion
146 |
147 | ### 7. Integrity Protection
148 |
149 | - SHA-256 checksums for each event
150 | - Optional HMAC signing with rotating keys
151 | - Tamper detection on log files
152 | - Chain of custody documentation
153 |
154 | ## Consequences
155 |
156 | ### Positive
157 |
158 | - Full compliance with SOC 2, ISO 27001, and GDPR requirements
159 | - Complete audit trail for security investigations
160 | - Ability to detect and investigate security incidents
161 | - Support for compliance reporting and audits
162 | - Integration with enterprise security tools
163 | - Protection of sensitive user data
164 |
165 | ### Negative
166 |
167 | - Increased storage requirements for audit logs
168 | - Performance overhead for logging and checksumming
169 | - Complexity in managing log retention and rotation
170 | - Additional configuration for SIEM integration
171 | - Potential for log files to contain sensitive data if redaction fails
172 |
173 | ### Implementation Notes
174 |
175 | 1. Build on existing `logger.ts` utility
176 | 2. Extend `service-account-auditor.ts` for broader audit coverage
177 | 3. Add audit hooks to permission wrapper and handlers
178 | 4. Implement as middleware for HTTP transport
179 | 5. Create separate audit log directory structure
180 | 6. Add configuration for retention, redaction, and SIEM
181 | 7. Ensure all audit logging is async and non-blocking
182 |
```
--------------------------------------------------------------------------------
/.claude/commands/update-dependencies.md:
--------------------------------------------------------------------------------
```markdown
1 | # Update Dependencies
2 |
3 | You are about to update the dependencies of the project. This command helps maintain the project's dependencies while adhering to the established CI/CD workflow and changeset requirements.
4 |
5 | ## Workflow Steps
6 |
7 | ### 1. Create a Feature Branch
8 |
9 | Create a new branch following the naming convention:
10 |
11 | ```bash
12 | git checkout -b chore/update-dependencies-<date>
13 | # Example: chore/update-dependencies-2024-01
14 | ```
15 |
16 | ### 2. Update Dependencies
17 |
18 | #### For Production Dependencies:
19 |
20 | ```bash
21 | # Check outdated packages
22 | pnpm outdated
23 |
24 | # Update all dependencies to latest
25 | pnpm update --latest
26 |
27 | # Or update specific packages
28 | pnpm update <package-name> --latest
29 | ```
30 |
31 | #### For Dev Dependencies:
32 |
33 | ```bash
34 | # Update dev dependencies
35 | pnpm update --latest --dev
36 | ```
37 |
38 | ### 3. Install and Lock Dependencies
39 |
40 | ```bash
41 | # Ensure pnpm-lock.yaml is updated
42 | pnpm install
43 |
44 | # Deduplicate dependencies if needed
45 | pnpm dedupe
46 | ```
47 |
48 | ### 4. Test the Updates
49 |
50 | Run the full verification suite to ensure compatibility:
51 |
52 | ```bash
53 | # Run all checks (audit, typecheck, lint, format, test)
54 | pnpm verify
55 |
56 | # Run specific checks if needed
57 | pnpm test
58 | pnpm typecheck
59 | pnpm lint
60 | ```
61 |
62 | ### 5. Create a Changeset
63 |
64 | **IMPORTANT**: The CI/CD pipeline requires changesets for all changes. Choose the right type based on impact:
65 |
66 | #### When to Use Empty Changeset
67 |
68 | Use `pnpm changeset --empty` for changes that don't affect users:
69 |
70 | - Dev dependency updates only
71 | - CI/CD configuration changes
72 | - Test-only improvements
73 | - Internal tooling updates
74 | - Documentation changes
75 |
76 | ```bash
77 | # Create an empty changeset (no version bump)
78 | pnpm changeset --empty
79 |
80 | # The changeset message should document:
81 | # - Which dependencies were updated
82 | # - Why they were updated
83 | # - Any tooling changes developers should know about
84 | ```
85 |
86 | #### When to Use Regular Changeset
87 |
88 | Use `pnpm changeset` for changes that affect the package/application:
89 |
90 | - Production dependency updates
91 | - Security fixes visible to users
92 | - Breaking changes in dependencies
93 | - Bug fixes from dependency updates
94 | - New features from dependency updates
95 |
96 | ```bash
97 | # Create a proper changeset (will bump version)
98 | pnpm changeset
99 |
100 | # Select version bump:
101 | # - patch: security updates, bug fixes, minor dependency updates
102 | # - minor: significant updates with new capabilities
103 | # - major: breaking changes requiring user action
104 |
105 | # Examples:
106 | # patch: "Update zod to fix validation edge case"
107 | # minor: "Update Pino with new structured logging features"
108 | # major: "Update to Node 24 (drops Node 22 support)"
109 | ```
110 |
111 | ### 6. Commit the Changes
112 |
113 | Follow conventional commit format:
114 |
115 | ```bash
116 | git add .
117 |
118 | # For routine updates
119 | git commit -m "chore: update dependencies
120 |
121 | - Updated production dependencies to latest versions
122 | - Updated dev dependencies to latest versions
123 | - No breaking changes identified"
124 |
125 | # For updates with notable changes
126 | git commit -m "chore: update dependencies with <notable-package> v<version>
127 |
128 | - Updated <package> from v<old> to v<new>
129 | - <List any important changes>
130 | - All tests passing"
131 | ```
132 |
133 | ### 7. Push and Create Pull Request
134 |
135 | ```bash
136 | # Push the branch
137 | git push -u origin chore/update-dependencies-<date>
138 |
139 | # Create PR with detailed description
140 | gh pr create \
141 | --title "chore: update dependencies" \
142 | --body "## Summary
143 | Updates all dependencies to their latest versions.
144 |
145 | ## Changes
146 | - Production dependencies updated
147 | - Dev dependencies updated
148 | - No breaking changes identified
149 |
150 | ## Testing
151 | - ✅ All tests passing
152 | - ✅ Type checking successful
153 | - ✅ Linting clean
154 | - ✅ Coverage maintained at 80%+
155 |
156 | ## Changeset
157 | - [x] Empty changeset added for dev dependency updates" \
158 | --assignee @me
159 | ```
160 |
161 | ### 8. Monitor CI/CD Pipeline
162 |
163 | ```bash
164 | # Watch the PR checks
165 | gh pr checks --watch
166 |
167 | # View detailed CI logs if needed
168 | gh run list
169 | gh run view <run-id>
170 | ```
171 |
172 | ### 9. Merge the Pull Request
173 |
174 | Once all checks pass:
175 |
176 | ```bash
177 | # Squash and merge (maintains clean history)
178 | gh pr merge --squash --delete-branch
179 |
180 | # Or merge through GitHub UI with "Squash and merge"
181 | ```
182 |
183 | ## Important Notes
184 |
185 | ### Changeset Requirements
186 |
187 | - **Dev dependencies only**: Use `pnpm changeset --empty` to satisfy CI requirements
188 | - **Production dependencies**: Create a proper changeset with appropriate version bump
189 | - **Mixed updates**: Use proper changeset and document both types
190 |
191 | ### Common Issues and Solutions
192 |
193 | #### CI Fails Due to Missing Changeset
194 |
195 | ```bash
196 | # Add an empty changeset if you forgot
197 | pnpm changeset --empty
198 | git add .
199 | git commit --amend
200 | git push --force-with-lease
201 | ```
202 |
203 | #### Breaking Changes in Dependencies
204 |
205 | 1. Review the changelog of the updated package
206 | 2. Update code to accommodate changes
207 | 3. Add tests for affected functionality
208 | 4. Use minor or major version bump in changeset
209 |
210 | #### Audit Vulnerabilities
211 |
212 | ```bash
213 | # Check for vulnerabilities
214 | pnpm audit
215 |
216 | # Fix automatically if possible
217 | pnpm audit --fix
218 |
219 | # For critical vulnerabilities that can't be auto-fixed,
220 | # document in PR and consider alternatives
221 | ```
222 |
223 | ### Security Considerations
224 |
225 | - Always run `pnpm audit` after updates
226 | - Review security advisories for updated packages
227 | - Be cautious with major version updates
228 | - Consider the security track record of new dependencies
229 |
```
--------------------------------------------------------------------------------
/src/__tests__/mapping-functions.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, beforeEach } from 'vitest';
2 | describe('Mapping Functions', () => {
3 | let mapToSonarQubeParams: any;
4 | beforeEach(async () => {
5 | // Import the function fresh for each test
6 | const module = await import('../index.js');
7 | mapToSonarQubeParams = module.mapToSonarQubeParams;
8 | });
9 | it('should properly map basic required parameters', () => {
10 | const params = mapToSonarQubeParams({ project_key: 'my-project' });
11 | expect(params.projectKey).toBe('my-project');
12 | expect(params.severity).toBeUndefined();
13 | expect(params.statuses).toBeUndefined();
14 | });
15 | it('should map pagination parameters', () => {
16 | const params = mapToSonarQubeParams({
17 | project_key: 'my-project',
18 | page: 2,
19 | page_size: 20,
20 | });
21 | expect(params.projectKey).toBe('my-project');
22 | expect(params.page).toBe(2);
23 | expect(params.pageSize).toBe(20);
24 | });
25 | it('should map severity parameter', () => {
26 | const params = mapToSonarQubeParams({
27 | project_key: 'my-project',
28 | severity: 'MAJOR',
29 | });
30 | expect(params.projectKey).toBe('my-project');
31 | expect(params.severity).toBe('MAJOR');
32 | });
33 | it('should map array parameters', () => {
34 | const params = mapToSonarQubeParams({
35 | project_key: 'my-project',
36 | statuses: ['OPEN', 'CONFIRMED'],
37 | types: ['BUG', 'VULNERABILITY'],
38 | rules: ['rule1', 'rule2'],
39 | tags: ['tag1', 'tag2'],
40 | });
41 | expect(params.projectKey).toBe('my-project');
42 | expect(params.statuses).toEqual(['OPEN', 'CONFIRMED']);
43 | expect(params.types).toEqual(['BUG', 'VULNERABILITY']);
44 | expect(params.rules).toEqual(['rule1', 'rule2']);
45 | expect(params.tags).toEqual(['tag1', 'tag2']);
46 | });
47 | it('should map boolean parameters', () => {
48 | const params = mapToSonarQubeParams({
49 | project_key: 'my-project',
50 | resolved: true,
51 | on_component_only: false,
52 | since_leak_period: true,
53 | in_new_code_period: false,
54 | });
55 | expect(params.projectKey).toBe('my-project');
56 | expect(params.resolved).toBe(true);
57 | expect(params.onComponentOnly).toBe(false);
58 | expect(params.sinceLeakPeriod).toBe(true);
59 | expect(params.inNewCodePeriod).toBe(false);
60 | });
61 | it('should map date parameters', () => {
62 | const params = mapToSonarQubeParams({
63 | project_key: 'my-project',
64 | created_after: '2023-01-01',
65 | created_before: '2023-12-31',
66 | created_at: '2023-06-15',
67 | created_in_last: '7d',
68 | });
69 | expect(params.projectKey).toBe('my-project');
70 | expect(params.createdAfter).toBe('2023-01-01');
71 | expect(params.createdBefore).toBe('2023-12-31');
72 | expect(params.createdAt).toBe('2023-06-15');
73 | expect(params.createdInLast).toBe('7d');
74 | });
75 | it('should map assignees and authors', () => {
76 | const params = mapToSonarQubeParams({
77 | project_key: 'my-project',
78 | assignees: ['user1', 'user2'],
79 | authors: ['author1', 'author2'],
80 | });
81 | expect(params.projectKey).toBe('my-project');
82 | expect(params.assignees).toEqual(['user1', 'user2']);
83 | expect(params.authors).toEqual(['author1', 'author2']);
84 | });
85 | it('should map security-related parameters', () => {
86 | const params = mapToSonarQubeParams({
87 | project_key: 'my-project',
88 | cwe: ['cwe1', 'cwe2'],
89 | languages: ['java', 'typescript'],
90 | owasp_top10: ['a1', 'a2'],
91 | sans_top25: ['sans1', 'sans2'],
92 | sonarsource_security: ['sec1', 'sec2'],
93 | });
94 | expect(params.projectKey).toBe('my-project');
95 | expect(params.cwe).toEqual(['cwe1', 'cwe2']);
96 | expect(params.languages).toEqual(['java', 'typescript']);
97 | expect(params.owaspTop10).toEqual(['a1', 'a2']);
98 | expect(params.sansTop25).toEqual(['sans1', 'sans2']);
99 | expect(params.sonarsourceSecurity).toEqual(['sec1', 'sec2']);
100 | });
101 | it('should map facets parameter', () => {
102 | const params = mapToSonarQubeParams({
103 | project_key: 'my-project',
104 | facets: ['facet1', 'facet2'],
105 | });
106 | expect(params.projectKey).toBe('my-project');
107 | expect(params.facets).toEqual(['facet1', 'facet2']);
108 | });
109 | it('should correctly handle null values', () => {
110 | const params = mapToSonarQubeParams({
111 | project_key: 'my-project',
112 | severity: null,
113 | statuses: null,
114 | rules: null,
115 | });
116 | expect(params.projectKey).toBe('my-project');
117 | expect(params.severity).toBeUndefined();
118 | expect(params.statuses).toBeUndefined();
119 | expect(params.rules).toBeUndefined();
120 | });
121 | it('should handle a mix of parameter types', () => {
122 | const params = mapToSonarQubeParams({
123 | project_key: 'my-project',
124 | severity: 'MAJOR',
125 | page: 2,
126 | statuses: ['OPEN'],
127 | resolved: true,
128 | created_after: '2023-01-01',
129 | assignees: ['user1'],
130 | cwe: ['cwe1'],
131 | facets: ['facet1'],
132 | });
133 | expect(params.projectKey).toBe('my-project');
134 | expect(params.severity).toBe('MAJOR');
135 | expect(params.page).toBe(2);
136 | expect(params.statuses).toEqual(['OPEN']);
137 | expect(params.resolved).toBe(true);
138 | expect(params.createdAfter).toBe('2023-01-01');
139 | expect(params.assignees).toEqual(['user1']);
140 | expect(params.cwe).toEqual(['cwe1']);
141 | expect(params.facets).toEqual(['facet1']);
142 | });
143 | });
144 |
```
--------------------------------------------------------------------------------
/src/__tests__/schema-transformation-mocks.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect } from 'vitest';
2 | // These tests mock the transformations used in the tool registrations in index.ts
3 | describe('Schema Transformation Mocks', () => {
4 | describe('Page and PageSize Transformations in Tool Registrations', () => {
5 | it('should test page schema transformation - projects tool', () => {
6 | const pageTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
7 | expect(pageTransform('10')).toBe(10);
8 | expect(pageTransform('invalid')).toBe(null);
9 | expect(pageTransform(undefined)).toBe(null);
10 | expect(pageTransform('')).toBe(null);
11 | });
12 | it('should test page_size schema transformation - projects tool', () => {
13 | const pageSizeTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
14 | expect(pageSizeTransform('20')).toBe(20);
15 | expect(pageSizeTransform('invalid')).toBe(null);
16 | expect(pageSizeTransform(undefined)).toBe(null);
17 | expect(pageSizeTransform('')).toBe(null);
18 | });
19 | it('should test page schema transformation - metrics tool', () => {
20 | const pageTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
21 | expect(pageTransform('10')).toBe(10);
22 | expect(pageTransform('invalid')).toBe(null);
23 | expect(pageTransform(undefined)).toBe(null);
24 | expect(pageTransform('')).toBe(null);
25 | });
26 | it('should test page_size schema transformation - metrics tool', () => {
27 | const pageSizeTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
28 | expect(pageSizeTransform('20')).toBe(20);
29 | expect(pageSizeTransform('invalid')).toBe(null);
30 | expect(pageSizeTransform(undefined)).toBe(null);
31 | expect(pageSizeTransform('')).toBe(null);
32 | });
33 | it('should test page schema transformation - issues tool', () => {
34 | const pageTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
35 | expect(pageTransform('10')).toBe(10);
36 | expect(pageTransform('invalid')).toBe(null);
37 | expect(pageTransform(undefined)).toBe(null);
38 | expect(pageTransform('')).toBe(null);
39 | });
40 | it('should test page_size schema transformation - issues tool', () => {
41 | const pageSizeTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
42 | expect(pageSizeTransform('20')).toBe(20);
43 | expect(pageSizeTransform('invalid')).toBe(null);
44 | expect(pageSizeTransform(undefined)).toBe(null);
45 | expect(pageSizeTransform('')).toBe(null);
46 | });
47 | it('should test page schema transformation - measures_components tool', () => {
48 | const pageTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
49 | expect(pageTransform('10')).toBe(10);
50 | expect(pageTransform('invalid')).toBe(null);
51 | expect(pageTransform(undefined)).toBe(null);
52 | expect(pageTransform('')).toBe(null);
53 | });
54 | it('should test page_size schema transformation - measures_components tool', () => {
55 | const pageSizeTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
56 | expect(pageSizeTransform('20')).toBe(20);
57 | expect(pageSizeTransform('invalid')).toBe(null);
58 | expect(pageSizeTransform(undefined)).toBe(null);
59 | expect(pageSizeTransform('')).toBe(null);
60 | });
61 | it('should test page schema transformation - measures_history tool', () => {
62 | const pageTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
63 | expect(pageTransform('10')).toBe(10);
64 | expect(pageTransform('invalid')).toBe(null);
65 | expect(pageTransform(undefined)).toBe(null);
66 | expect(pageTransform('')).toBe(null);
67 | });
68 | it('should test page_size schema transformation - measures_history tool', () => {
69 | const pageSizeTransform = (val: any) => (val ? parseInt(val, 10) || null : null);
70 | expect(pageSizeTransform('20')).toBe(20);
71 | expect(pageSizeTransform('invalid')).toBe(null);
72 | expect(pageSizeTransform(undefined)).toBe(null);
73 | expect(pageSizeTransform('')).toBe(null);
74 | });
75 | });
76 | describe('Boolean Parameter Transformations in Issues Tool Registration', () => {
77 | it('should test resolved parameter transformation', () => {
78 | const boolTransform = (val: any) => val === 'true';
79 | expect(boolTransform('true')).toBe(true);
80 | expect(boolTransform('false')).toBe(false);
81 | expect(boolTransform('something')).toBe(false);
82 | });
83 | it('should test on_component_only parameter transformation', () => {
84 | const boolTransform = (val: any) => val === 'true';
85 | expect(boolTransform('true')).toBe(true);
86 | expect(boolTransform('false')).toBe(false);
87 | expect(boolTransform('something')).toBe(false);
88 | });
89 | it('should test since_leak_period parameter transformation', () => {
90 | const boolTransform = (val: any) => val === 'true';
91 | expect(boolTransform('true')).toBe(true);
92 | expect(boolTransform('false')).toBe(false);
93 | expect(boolTransform('something')).toBe(false);
94 | });
95 | it('should test in_new_code_period parameter transformation', () => {
96 | const boolTransform = (val: any) => val === 'true';
97 | expect(boolTransform('true')).toBe(true);
98 | expect(boolTransform('false')).toBe(false);
99 | expect(boolTransform('something')).toBe(false);
100 | });
101 | });
102 | });
103 |
```
--------------------------------------------------------------------------------
/.github/WORKFLOWS.md:
--------------------------------------------------------------------------------
```markdown
1 | # GitHub Actions Workflows Documentation
2 |
3 | This document describes the GitHub Actions workflows used in this repository.
4 |
5 | ## Workflow Overview
6 |
7 | ```mermaid
8 | graph LR
9 | PR[Pull Request] --> VALIDATE[Validate]
10 | VALIDATE --> TEST[Test & Build]
11 |
12 | PUSH[Push to Main] --> MAIN[Main Workflow]
13 | MAIN --> VERSION[Version Packages]
14 | VERSION --> RELEASE[Create Release]
15 | RELEASE --> PUBLISH[Publish Workflow]
16 | PUBLISH --> NPM[NPM Package]
17 | PUBLISH --> DOCKER[Docker Image]
18 | ```
19 |
20 | ## Important Setup Requirements
21 |
22 | ### Personal Access Token for Release Workflow
23 |
24 | The Main workflow creates GitHub releases that should trigger the Publish workflow. However, due to GitHub's security features, workflows triggered by the default `GITHUB_TOKEN` cannot trigger other workflows (this prevents infinite loops).
25 |
26 | **To enable the Publish workflow to trigger automatically:**
27 |
28 | 1. Create a Personal Access Token (PAT) with the following permissions:
29 | - `contents:write` - To create releases
30 | - `actions:read` - To trigger workflows
31 |
32 | 2. Add the PAT as a repository secret named `RELEASE_TOKEN`:
33 | - Go to Settings → Secrets and variables → Actions
34 | - Click "New repository secret"
35 | - Name: `RELEASE_TOKEN`
36 | - Value: Your PAT
37 |
38 | 3. The Main workflow will automatically use `RELEASE_TOKEN` if available, falling back to `GITHUB_TOKEN` if not configured.
39 |
40 | ## Workflows
41 |
42 | ### 1. Pull Request Workflow (`pr.yml`)
43 |
44 | **Purpose**: Validates and tests all pull requests.
45 |
46 | **Triggers**: Pull requests to `main` branch
47 |
48 | **Jobs**:
49 |
50 | - **validate**: Checks changeset status
51 | - **security-audit**: Runs security audit
52 | - **type-checking**: TypeScript type checking
53 | - **linting**: ESLint checks
54 | - **format-checking**: Prettier formatting checks
55 | - **test**: Runs test suite with coverage
56 | - **codeql**: Security analysis
57 | - **osv-scan**: Vulnerability scanning
58 |
59 | ### 2. Main Workflow (`main.yml`)
60 |
61 | **Purpose**: Handles versioning and release creation when changes are merged to main.
62 |
63 | **Triggers**: Push to `main` branch
64 |
65 | **Jobs**:
66 |
67 | - **build**:
68 | - Checks for changesets (fails if releasable commits lack changesets)
69 | - Versions packages using changesets
70 | - Builds artifacts and generates SBOM
71 | - Creates GitHub release with artifacts
72 | - Generates build attestations
73 |
74 | **Important**: Requires `RELEASE_TOKEN` secret to trigger the publish workflow.
75 |
76 | ### 3. Publish Workflow (`publish.yml`)
77 |
78 | **Purpose**: Publishes packages to registries after a release.
79 |
80 | **Triggers**:
81 |
82 | - Release published event
83 | - Manual workflow dispatch
84 |
85 | **Jobs**:
86 |
87 | - **npm**: Publishes to NPM (requires `ENABLE_NPM_RELEASE` variable and `NPM_TOKEN` secret)
88 | - **docker**: Builds and publishes Docker images (requires `ENABLE_DOCKER_RELEASE` variable)
89 |
90 | ## Configuration
91 |
92 | ### Required Secrets
93 |
94 | | Secret | Purpose | Required For |
95 | | -------------------- | --------------------------------------------- | --------------- |
96 | | `RELEASE_TOKEN` | PAT to trigger publish workflow from releases | Auto-publish |
97 | | `NPM_TOKEN` | NPM authentication | NPM publishing |
98 | | `DOCKERHUB_USERNAME` | Docker Hub username | Docker Hub push |
99 | | `DOCKERHUB_TOKEN` | Docker Hub access token | Docker Hub push |
100 |
101 | ### Repository Variables
102 |
103 | | Variable | Purpose | Default |
104 | | ----------------------- | -------------------------- | ------- |
105 | | `ENABLE_NPM_RELEASE` | Enable NPM publishing | `false` |
106 | | `ENABLE_DOCKER_RELEASE` | Enable Docker distribution | `false` |
107 |
108 | ## Setup Instructions
109 |
110 | ### 1. Create a Personal Access Token for Releases
111 |
112 | Create a fine-grained PAT with:
113 |
114 | - **Repository access**: Your repository
115 | - **Permissions**: Contents (Write), Actions (Read)
116 |
117 | Add as `RELEASE_TOKEN` secret.
118 |
119 | ### 2. Configure NPM Publishing
120 |
121 | 1. Get NPM token from npmjs.com
122 | 2. Add as `NPM_TOKEN` secret
123 | 3. Set `ENABLE_NPM_RELEASE=true` variable
124 |
125 | ### 3. Enable Docker Distribution (Optional)
126 |
127 | 1. Set `ENABLE_DOCKER_RELEASE=true` variable
128 | 2. Add Docker Hub credentials if using Docker Hub
129 |
130 | ## How It Works
131 |
132 | 1. **Developer creates PR** with changes and changeset
133 | 2. **CI validates** the PR (tests pass, changeset present)
134 | 3. **PR is merged** to main branch
135 | 4. **Main workflow runs**:
136 | - Checks for changesets
137 | - Versions packages
138 | - Builds artifacts
139 | - Creates GitHub release
140 | 5. **Publish workflow triggers** (if `RELEASE_TOKEN` configured):
141 | - Publishes to NPM
142 | - Builds Docker images
143 | - Deploys to configured targets
144 |
145 | ## Troubleshooting
146 |
147 | ### Publish Workflow Not Triggering
148 |
149 | - Verify `RELEASE_TOKEN` secret is configured
150 | - Ensure the PAT has `contents:write` and `actions:read` permissions
151 | - Check that the release was created (not draft)
152 |
153 | ### NPM Publish Failing
154 |
155 | - Verify `NPM_TOKEN` is valid
156 | - Check `ENABLE_NPM_RELEASE` variable is set to `true`
157 | - Ensure package.json version doesn't already exist
158 |
159 | ### Missing Changesets
160 |
161 | - Main workflow will fail if releasable commits lack changesets
162 | - Add changesets with `pnpm changeset`
163 |
164 | ## Best Practices
165 |
166 | 1. **Always include changesets** in PRs with changes
167 | 2. **Configure `RELEASE_TOKEN`** for automatic publishing
168 | 3. **Keep secrets secure** and rotate regularly
169 | 4. **Monitor workflow runs** for failures
170 | 5. **Use `pnpm verify`** before pushing code
171 |
```
--------------------------------------------------------------------------------
/src/__tests__/error-handling.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, beforeEach, afterEach, beforeAll, vi } from 'vitest';
2 | import nock from 'nock';
3 | // Mock environment variables
4 | process.env.SONARQUBE_TOKEN = 'test-token';
5 | process.env.SONARQUBE_URL = 'http://localhost:9000';
6 | // Save environment variables
7 | const originalEnv = process.env;
8 | beforeAll(() => {
9 | nock.cleanAll();
10 | });
11 | afterAll(() => {
12 | nock.cleanAll();
13 | });
14 | let nullToUndefined: any;
15 | // No need to mock axios anymore since we're using sonarqube-web-api-client
16 | describe('Error Handling', () => {
17 | beforeAll(async () => {
18 | const module = await import('../index.js');
19 | nullToUndefined = module.nullToUndefined;
20 | });
21 | beforeEach(() => {
22 | vi.resetModules();
23 | process.env = { ...originalEnv };
24 | nock.cleanAll();
25 | });
26 | afterEach(() => {
27 | process.env = originalEnv;
28 | vi.restoreAllMocks();
29 | nock.cleanAll();
30 | });
31 | describe('nullToUndefined function', () => {
32 | it('should handle various input types correctly', () => {
33 | // Test nulls
34 | expect(nullToUndefined(null)).toBeUndefined();
35 | // Test undefined
36 | expect(nullToUndefined(undefined)).toBeUndefined();
37 | // Test various other types
38 | expect(nullToUndefined(0)).toBe(0);
39 | expect(nullToUndefined('')).toBe('');
40 | expect(nullToUndefined('test')).toBe('test');
41 | expect(nullToUndefined(false)).toBe(false);
42 | expect(nullToUndefined(true)).toBe(true);
43 | // Test objects and arrays
44 | const obj = { test: 1 };
45 | const arr = [1, 2, 3];
46 | expect(nullToUndefined(obj)).toBe(obj);
47 | expect(nullToUndefined(arr)).toBe(arr);
48 | });
49 | });
50 | describe('mapToSonarQubeParams', () => {
51 | it('should handle all parameters', async () => {
52 | const module = await import('../index.js');
53 | const mapToSonarQubeParams = module.mapToSonarQubeParams;
54 | const params = mapToSonarQubeParams({
55 | project_key: 'test-project',
56 | severity: 'MAJOR',
57 | page: 1,
58 | page_size: 10,
59 | statuses: ['OPEN', 'CONFIRMED'],
60 | resolutions: ['FALSE-POSITIVE', 'FIXED'],
61 | resolved: true,
62 | types: ['BUG', 'VULNERABILITY'],
63 | rules: ['rule1', 'rule2'],
64 | tags: ['tag1', 'tag2'],
65 | created_after: '2023-01-01',
66 | created_before: '2023-12-31',
67 | created_at: '2023-06-15',
68 | created_in_last: '7d',
69 | assignees: ['user1', 'user2'],
70 | authors: ['author1', 'author2'],
71 | cwe: ['cwe1', 'cwe2'],
72 | languages: ['java', 'typescript'],
73 | owasp_top10: ['a1', 'a2'],
74 | sans_top25: ['sans1', 'sans2'],
75 | sonarsource_security: ['sec1', 'sec2'],
76 | on_component_only: true,
77 | facets: ['facet1', 'facet2'],
78 | since_leak_period: true,
79 | in_new_code_period: true,
80 | });
81 | expect(params.projectKey).toBe('test-project');
82 | expect(params.severity).toBe('MAJOR');
83 | expect(params.page).toBe(1);
84 | expect(params.pageSize).toBe(10);
85 | expect(params.statuses).toEqual(['OPEN', 'CONFIRMED']);
86 | expect(params.resolutions).toEqual(['FALSE-POSITIVE', 'FIXED']);
87 | expect(params.resolved).toBe(true);
88 | expect(params.types).toEqual(['BUG', 'VULNERABILITY']);
89 | expect(params.rules).toEqual(['rule1', 'rule2']);
90 | expect(params.tags).toEqual(['tag1', 'tag2']);
91 | expect(params.createdAfter).toBe('2023-01-01');
92 | expect(params.createdBefore).toBe('2023-12-31');
93 | expect(params.createdAt).toBe('2023-06-15');
94 | expect(params.createdInLast).toBe('7d');
95 | expect(params.assignees).toEqual(['user1', 'user2']);
96 | expect(params.authors).toEqual(['author1', 'author2']);
97 | expect(params.cwe).toEqual(['cwe1', 'cwe2']);
98 | expect(params.languages).toEqual(['java', 'typescript']);
99 | expect(params.owaspTop10).toEqual(['a1', 'a2']);
100 | expect(params.sansTop25).toEqual(['sans1', 'sans2']);
101 | expect(params.sonarsourceSecurity).toEqual(['sec1', 'sec2']);
102 | expect(params.onComponentOnly).toBe(true);
103 | expect(params.facets).toEqual(['facet1', 'facet2']);
104 | expect(params.sinceLeakPeriod).toBe(true);
105 | expect(params.inNewCodePeriod).toBe(true);
106 | });
107 | it('should handle empty parameters', async () => {
108 | const module = await import('../index.js');
109 | const mapToSonarQubeParams = module.mapToSonarQubeParams;
110 | const params = mapToSonarQubeParams({ project_key: 'test-project' });
111 | expect(params.projectKey).toBe('test-project');
112 | expect(params.severity).toBeUndefined();
113 | expect(params.statuses).toBeUndefined();
114 | expect(params.resolutions).toBeUndefined();
115 | expect(params.resolved).toBeUndefined();
116 | expect(params.types).toBeUndefined();
117 | expect(params.rules).toBeUndefined();
118 | });
119 | });
120 | describe('Error handling utility functions', () => {
121 | it('should properly handle null parameters', () => {
122 | expect(nullToUndefined(null)).toBeUndefined();
123 | });
124 | it('should pass through non-null values', () => {
125 | expect(nullToUndefined('value')).toBe('value');
126 | expect(nullToUndefined(123)).toBe(123);
127 | expect(nullToUndefined(true)).toBe(true);
128 | expect(nullToUndefined(false)).toBe(false);
129 | expect(nullToUndefined([])).toEqual([]);
130 | expect(nullToUndefined({})).toEqual({});
131 | });
132 | it('should handle undefined parameters', () => {
133 | expect(nullToUndefined(undefined)).toBeUndefined();
134 | });
135 | });
136 | });
137 |
```
--------------------------------------------------------------------------------
/src/monitoring/health.ts:
--------------------------------------------------------------------------------
```typescript
1 | /* istanbul ignore file */
2 | import { createLogger } from '../utils/logger.js';
3 | import { createSonarQubeClient } from '../sonarqube.js';
4 | import { getServiceAccountConfig } from '../config/service-accounts.js';
5 | import { SERVER_VERSION } from '../config/versions.js';
6 |
7 | const logger = createLogger('HealthService');
8 |
9 | export type HealthStatus = 'healthy' | 'unhealthy' | 'degraded';
10 |
11 | export interface DependencyHealth {
12 | name: string;
13 | status: HealthStatus;
14 | message?: string;
15 | latency?: number;
16 | lastCheck?: Date;
17 | }
18 |
19 | export interface HealthCheckResult {
20 | status: HealthStatus;
21 | version: string;
22 | uptime: number;
23 | timestamp: Date;
24 | dependencies: Record<string, DependencyHealth>;
25 | features: Record<string, boolean>;
26 | metrics?: {
27 | requests: number;
28 | errors: number;
29 | activeSession: number;
30 | };
31 | }
32 |
33 | export class HealthService {
34 | private static instance: HealthService;
35 | private readonly startTime: Date;
36 | private cachedHealth?: {
37 | result: HealthCheckResult;
38 | timestamp: number;
39 | };
40 | private readonly cacheTimeout = 5000; // 5 seconds cache
41 |
42 | private constructor() {
43 | this.startTime = new Date();
44 | }
45 |
46 | static getInstance(): HealthService {
47 | if (!this.instance) {
48 | this.instance = new HealthService();
49 | }
50 | return this.instance;
51 | }
52 |
53 | /**
54 | * Perform comprehensive health check
55 | */
56 | async checkHealth(): Promise<HealthCheckResult> {
57 | // Check cache
58 | if (this.cachedHealth && Date.now() - this.cachedHealth.timestamp < this.cacheTimeout) {
59 | return this.cachedHealth.result;
60 | }
61 |
62 | const dependencies: Record<string, DependencyHealth> = {};
63 |
64 | // Check SonarQube connectivity
65 | dependencies.sonarqube = await this.checkSonarQube();
66 |
67 | // Determine overall status
68 | const statuses = Object.values(dependencies);
69 | let overallStatus: HealthStatus = 'healthy';
70 |
71 | if (statuses.some((d) => d.status === 'unhealthy')) {
72 | overallStatus = 'unhealthy';
73 | } else if (statuses.some((d) => d.status === 'degraded')) {
74 | overallStatus = 'degraded';
75 | }
76 |
77 | // Get metrics summary
78 | const metrics = this.getMetricsSummary();
79 |
80 | const result: HealthCheckResult = {
81 | status: overallStatus,
82 | version: SERVER_VERSION,
83 | uptime: Date.now() - this.startTime.getTime(),
84 | timestamp: new Date(),
85 | dependencies,
86 | features: {
87 | metrics: true,
88 | },
89 | };
90 |
91 | // Only add metrics if they exist
92 | if (metrics !== undefined) {
93 | result.metrics = metrics;
94 | }
95 |
96 | // Cache result
97 | this.cachedHealth = {
98 | result,
99 | timestamp: Date.now(),
100 | };
101 |
102 | return result;
103 | }
104 |
105 | /**
106 | * Check SonarQube connectivity
107 | */
108 | private async checkSonarQube(): Promise<DependencyHealth> {
109 | const startTime = Date.now();
110 |
111 | try {
112 | const config = getServiceAccountConfig('default');
113 | if (!config?.token) {
114 | return {
115 | name: 'SonarQube',
116 | status: 'unhealthy',
117 | message: 'No default service account configured',
118 | lastCheck: new Date(),
119 | };
120 | }
121 |
122 | const client = createSonarQubeClient(
123 | config.token,
124 | config.url ?? process.env.SONARQUBE_URL ?? 'https://sonarcloud.io',
125 | config.organization ?? process.env.SONARQUBE_ORGANIZATION
126 | );
127 |
128 | // Try to ping SonarQube
129 | await client.ping();
130 |
131 | return {
132 | name: 'SonarQube',
133 | status: 'healthy',
134 | latency: Date.now() - startTime,
135 | lastCheck: new Date(),
136 | };
137 | } catch (error) {
138 | logger.error('SonarQube health check failed', error);
139 | return {
140 | name: 'SonarQube',
141 | status: 'unhealthy',
142 | message: error instanceof Error ? error.message : 'Unknown error',
143 | latency: Date.now() - startTime,
144 | lastCheck: new Date(),
145 | };
146 | }
147 | }
148 |
149 | /**
150 | * Get metrics summary
151 | */
152 | private getMetricsSummary(): HealthCheckResult['metrics'] {
153 | // Get current metric values (this is a simplified version)
154 | // In a real implementation, you'd query the actual metric values
155 | // For now, we'll return static values
156 | return {
157 | requests: 0, // Would query mcpRequestsTotal
158 | errors: 0, // Would query sonarqubeErrorsTotal + authFailuresTotal
159 | activeSession: 0, // Would query activeSessions
160 | };
161 | }
162 |
163 | /**
164 | * Get readiness status (for Kubernetes)
165 | */
166 | async checkReadiness(): Promise<{
167 | ready: boolean;
168 | checks: Record<string, { ready: boolean; message?: string }>;
169 | }> {
170 | const checks: Record<string, { ready: boolean; message?: string }> = {};
171 |
172 | // Check if server is initialized
173 | checks.server = { ready: true };
174 |
175 | // Check SonarQube connectivity (async)
176 | const sonarqubeHealth = await this.checkSonarQube();
177 | const sonarqubeCheck: { ready: boolean; message?: string } = {
178 | ready: sonarqubeHealth.status !== 'unhealthy',
179 | };
180 |
181 | if (sonarqubeHealth.message !== undefined) {
182 | sonarqubeCheck.message = sonarqubeHealth.message;
183 | }
184 |
185 | checks.sonarqube = sonarqubeCheck;
186 |
187 | // Overall readiness
188 | const ready = Object.values(checks).every((check) => check.ready);
189 |
190 | return { ready, checks };
191 | }
192 |
193 | /**
194 | * Reset the singleton instance (for testing)
195 | */
196 | static resetInstance(): void {
197 | // Reset the singleton instance for testing purposes
198 | // @ts-expect-error - Intentionally setting to undefined for testing
199 | this.instance = undefined;
200 | }
201 | }
202 |
```
--------------------------------------------------------------------------------
/src/monitoring/circuit-breaker.ts:
--------------------------------------------------------------------------------
```typescript
1 | import CircuitBreaker from 'opossum';
2 | import { createLogger } from '../utils/logger.js';
3 | import { updateCircuitBreakerMetrics, trackCircuitBreakerFailure } from './metrics.js';
4 |
5 | const logger = createLogger('CircuitBreaker');
6 |
7 | export interface CircuitBreakerOptions {
8 | timeout?: number;
9 | errorThresholdPercentage?: number;
10 | resetTimeout?: number;
11 | rollingCountTimeout?: number;
12 | rollingCountBuckets?: number;
13 | name?: string;
14 | volumeThreshold?: number;
15 | errorFilter?: (error: Error) => boolean;
16 | }
17 |
18 | /**
19 | * Circuit breaker factory for external service calls
20 | */
21 | export class CircuitBreakerFactory {
22 | private static readonly breakers = new Map<string, CircuitBreaker>();
23 |
24 | /**
25 | * Create or get a circuit breaker for a service
26 | */
27 | static getBreaker<T extends unknown[], R>(
28 | name: string,
29 | fn: (...args: T) => Promise<R>,
30 | options: CircuitBreakerOptions = {}
31 | ): CircuitBreaker<T, R> {
32 | // Check if breaker already exists
33 | const existing = this.breakers.get(name);
34 | if (existing) {
35 | return existing as CircuitBreaker<T, R>;
36 | }
37 |
38 | // Create new circuit breaker with defaults
39 | const breakerOptions: CircuitBreaker.Options = {
40 | timeout: options.timeout ?? 10000, // 10 seconds
41 | errorThresholdPercentage: options.errorThresholdPercentage ?? 50,
42 | resetTimeout: options.resetTimeout ?? 30000, // 30 seconds
43 | rollingCountTimeout: options.rollingCountTimeout ?? 10000, // 10 seconds
44 | rollingCountBuckets: options.rollingCountBuckets ?? 10,
45 | name: options.name ?? name,
46 | volumeThreshold: options.volumeThreshold ?? 5,
47 | errorFilter: options.errorFilter,
48 | };
49 |
50 | const breaker = new CircuitBreaker(fn, breakerOptions);
51 |
52 | // Add event listeners for metrics
53 | this.attachEventListeners(name, breaker);
54 |
55 | // Store breaker
56 | this.breakers.set(name, breaker);
57 |
58 | logger.info('Circuit breaker created', {
59 | name,
60 | options: breakerOptions,
61 | });
62 |
63 | return breaker;
64 | }
65 |
66 | /**
67 | * Attach event listeners for metrics and logging
68 | */
69 | private static attachEventListeners(name: string, breaker: CircuitBreaker): void {
70 | // Circuit opened (failure threshold reached)
71 | breaker.on('open', () => {
72 | logger.warn('Circuit breaker opened', { name });
73 | updateCircuitBreakerMetrics(name, 'open');
74 | });
75 |
76 | // Circuit closed (recovered)
77 | breaker.on('close', () => {
78 | logger.info('Circuit breaker closed', { name });
79 | updateCircuitBreakerMetrics(name, 'closed');
80 | });
81 |
82 | // Circuit half-open (testing if service recovered)
83 | breaker.on('halfOpen', () => {
84 | logger.info('Circuit breaker half-open', { name });
85 | updateCircuitBreakerMetrics(name, 'half-open');
86 | });
87 |
88 | // Request rejected due to open circuit
89 | breaker.on('reject', () => {
90 | trackCircuitBreakerFailure(name);
91 | logger.debug('Request rejected by circuit breaker', { name });
92 | });
93 |
94 | // Request failed
95 | breaker.on('failure', (error: Error) => {
96 | trackCircuitBreakerFailure(name);
97 | logger.debug('Request failed in circuit breaker', {
98 | name,
99 | error: error.message,
100 | });
101 | });
102 |
103 | // Request succeeded
104 | breaker.on('success', (result: unknown) => {
105 | logger.debug('Request succeeded in circuit breaker', {
106 | name,
107 | hasResult: !!result,
108 | });
109 | });
110 |
111 | // Timeout
112 | breaker.on('timeout', () => {
113 | trackCircuitBreakerFailure(name);
114 | logger.warn('Request timed out in circuit breaker', { name });
115 | });
116 | }
117 |
118 | /**
119 | * Get circuit breaker statistics
120 | */
121 | static getStats(name: string): CircuitBreaker.Stats | undefined {
122 | const breaker = this.breakers.get(name);
123 | return breaker?.stats;
124 | }
125 |
126 | /**
127 | * Get all circuit breakers
128 | */
129 | static getAllBreakers(): Map<string, CircuitBreaker> {
130 | return new Map(this.breakers);
131 | }
132 |
133 | /**
134 | * Shutdown all circuit breakers
135 | */
136 | static shutdown(): void {
137 | logger.info('Shutting down all circuit breakers', {
138 | count: this.breakers.size,
139 | });
140 |
141 | for (const [name, breaker] of Array.from(this.breakers.entries())) {
142 | breaker.shutdown();
143 | logger.debug('Circuit breaker shut down', { name });
144 | }
145 |
146 | this.breakers.clear();
147 | }
148 |
149 | /**
150 | * Reset all circuit breakers (for testing)
151 | */
152 | static reset(): void {
153 | this.breakers.clear();
154 | }
155 | }
156 |
157 | /**
158 | * Decorator to apply circuit breaker to a method
159 | */
160 | export function withCircuitBreaker(name: string, options?: CircuitBreakerOptions): MethodDecorator {
161 | return function (target: unknown, propertyKey: string | symbol, descriptor: PropertyDescriptor) {
162 | const originalMethod = descriptor.value as (...args: unknown[]) => Promise<unknown>;
163 |
164 | descriptor.value = async function (...args: unknown[]) {
165 | const breaker = CircuitBreakerFactory.getBreaker(
166 | `${name}.${String(propertyKey)}`,
167 | originalMethod.bind(this) as (...args: unknown[]) => Promise<unknown>,
168 | options
169 | );
170 |
171 | return breaker.fire(...args);
172 | };
173 |
174 | return descriptor;
175 | };
176 | }
177 |
178 | /**
179 | * Wrap a function with a circuit breaker
180 | */
181 | export function wrapWithCircuitBreaker<T extends unknown[], R>(
182 | name: string,
183 | fn: (...args: T) => Promise<R>,
184 | options?: CircuitBreakerOptions
185 | ): (...args: T) => Promise<R> {
186 | const breaker = CircuitBreakerFactory.getBreaker(name, fn, options);
187 |
188 | return async (...args: T) => {
189 | return breaker.fire(...args);
190 | };
191 | }
192 |
```
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "name": "sonarqube-mcp-server",
3 | "version": "1.10.21",
4 | "description": "Model Context Protocol server for SonarQube",
5 | "keywords": [
6 | "sonarqube",
7 | "mcp",
8 | "model-context-protocol"
9 | ],
10 | "author": "Marc Tremblay <[email protected]>",
11 | "license": "MIT",
12 | "repository": {
13 | "type": "git",
14 | "url": "git+https://github.com/sapientpants/sonarqube-mcp-server.git"
15 | },
16 | "bugs": {
17 | "url": "https://github.com/sapientpants/sonarqube-mcp-server/issues"
18 | },
19 | "homepage": "https://github.com/sapientpants/sonarqube-mcp-server",
20 | "type": "module",
21 | "main": "dist/index.js",
22 | "types": "dist/index.d.ts",
23 | "exports": {
24 | ".": {
25 | "import": "./dist/index.js",
26 | "types": "./dist/index.d.ts"
27 | }
28 | },
29 | "bin": {
30 | "sonarqube-mcp-server": "./dist/index.js"
31 | },
32 | "packageManager": "[email protected]",
33 | "scripts": {
34 | "dev": "tsc --watch --preserveWatchOutput",
35 | "start": "node dist/index.js",
36 | "build": "tsc -p tsconfig.build.json",
37 | "build:watch": "tsc -p tsconfig.build.json --watch",
38 | "typecheck": "tsc -p tsconfig.json --noEmit",
39 | "typecheck:watch": "tsc -p tsconfig.json --noEmit --watch",
40 | "lint": "eslint .",
41 | "lint:fix": "eslint . --fix",
42 | "lint:workflows": "./scripts/actionlint.sh",
43 | "lint:markdown": "markdownlint-cli2 \"**/*.md\" \"#node_modules\"",
44 | "lint:markdown:fix": "markdownlint-cli2 --fix \"**/*.md\" \"#node_modules\"",
45 | "lint:yaml": "yamllint '**/*.{yml,yaml}' --ignore='node_modules/**' --ignore='.github/workflows/**' --ignore='pnpm-lock.yaml'",
46 | "format": "prettier --check .",
47 | "format:fix": "prettier --write .",
48 | "test": "vitest run --reporter=verbose",
49 | "test:watch": "vitest",
50 | "test:ui": "vitest --ui",
51 | "test:coverage": "vitest run --reporter=verbose --coverage",
52 | "test:coverage:watch": "vitest --coverage",
53 | "coverage:report": "vitest run --coverage --reporter=verbose",
54 | "coverage:open": "open coverage/index.html",
55 | "clean": "rimraf dist coverage",
56 | "reset": "pnpm clean && pnpm install",
57 | "quick-check": "pnpm typecheck && pnpm lint && pnpm test",
58 | "precommit": "pnpm audit --audit-level critical && pnpm typecheck && pnpm lint && pnpm lint:workflows && pnpm lint:markdown && pnpm lint:yaml && pnpm format && pnpm test",
59 | "verify": "pnpm precommit",
60 | "setup": "./scripts/setup.sh",
61 | "lint-staged": "lint-staged",
62 | "sbom": "pnpm dlx @cyclonedx/cdxgen -o sbom.cdx.json",
63 | "scan:container": "./scripts/scan-container.sh",
64 | "scan:container:sarif": "./scripts/scan-container.sh --format sarif --output container-scan.sarif",
65 | "prepare": "husky",
66 | "release": "changeset version && pnpm build",
67 | "release:publish": "pnpm build && changeset publish",
68 | "release:tag": "git add -A && git commit -m \"chore(release): version packages\" && git tag -a v$(node -p \"require('./package.json').version\") -m \"Release\"",
69 | "changeset": "changeset",
70 | "changeset:status": "changeset status --since=main",
71 | "ci:local": "./scripts/ci-local.sh",
72 | "ci:local:fast": "./scripts/ci-local.sh --fast",
73 | "doctor": "node -e \"console.log('Node:', process.version); console.log('npm scripts:', Object.keys(require('./package.json').scripts).length, 'available'); console.log('Run: pnpm run help')\"",
74 | "help": "node -e \"const s=require('./package.json').scripts; console.log('Available commands:'); Object.keys(s).sort().forEach(k => console.log(' pnpm run ' + k.padEnd(20) + ' # ' + s[k].split(' ')[0]))\""
75 | },
76 | "lint-staged": {
77 | "*.{ts,tsx,js,json,md,yml,yaml}": [
78 | "prettier --write"
79 | ],
80 | "*.{ts,tsx,js,json,jsonc,json5}": [
81 | "eslint --fix"
82 | ],
83 | "*.md": [
84 | "markdownlint-cli2 --fix"
85 | ],
86 | "*.{yml,yaml}": [
87 | "yamllint"
88 | ]
89 | },
90 | "dependencies": {
91 | "@modelcontextprotocol/sdk": "^1.20.1",
92 | "cors": "2.8.5",
93 | "express": "5.1.0",
94 | "lodash": "^4.17.21",
95 | "opossum": "^9.0.0",
96 | "pino": "10.1.0",
97 | "pino-roll": "4.0.0",
98 | "pino-syslog": "3.2.0",
99 | "sonarqube-web-api-client": "1.0.1",
100 | "uuid": "13.0.0",
101 | "zod": "^3.25.76",
102 | "zod-to-json-schema": "^3.24.6"
103 | },
104 | "devDependencies": {
105 | "@changesets/cli": "^2.29.7",
106 | "@commitlint/cli": "20.1.0",
107 | "@commitlint/config-conventional": "20.0.0",
108 | "@cyclonedx/cdxgen": "11.10.0",
109 | "@eslint/js": "^9.38.0",
110 | "@fast-check/vitest": "^0.2.2",
111 | "@jest/globals": "^30.2.0",
112 | "@types/cors": "2.8.19",
113 | "@types/express": "5.0.3",
114 | "@types/jest": "^30.0.0",
115 | "@types/lodash": "^4.17.20",
116 | "@types/node": "^24.8.1",
117 | "@types/opossum": "^8.1.9",
118 | "@typescript-eslint/eslint-plugin": "^8.46.1",
119 | "@typescript-eslint/parser": "^8.46.1",
120 | "@vitest/coverage-v8": "^3.2.4",
121 | "changelog-github-custom": "1.2.7",
122 | "eslint": "^9.38.0",
123 | "eslint-config-prettier": "^10.1.8",
124 | "eslint-plugin-jsonc": "2.21.0",
125 | "eslint-plugin-prettier": "^5.5.4",
126 | "fast-check": "^4.3.0",
127 | "husky": "^9.1.7",
128 | "jest": "^30.2.0",
129 | "jsonc-eslint-parser": "2.4.1",
130 | "lint-staged": "^16.2.4",
131 | "markdownlint-cli2": "0.18.1",
132 | "nock": "^14.0.10",
133 | "pino-pretty": "13.1.2",
134 | "prettier": "^3.6.2",
135 | "rimraf": "^6.0.1",
136 | "supertest": "^7.1.4",
137 | "ts-jest": "^29.4.5",
138 | "ts-node": "^10.9.2",
139 | "ts-node-dev": "^2.0.0",
140 | "typescript": "^5.9.3",
141 | "vite": "7.1.11",
142 | "vitest": "^3.2.4",
143 | "yaml-lint": "1.7.0"
144 | },
145 | "pnpm": {
146 | "overrides": {
147 | "@eslint/plugin-kit": ">=0.3.3"
148 | }
149 | }
150 | }
151 |
```
--------------------------------------------------------------------------------
/src/__tests__/schemas/hotspots-tools-schema.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { z } from 'zod';
2 | import {
3 | hotspotsToolSchema,
4 | hotspotToolSchema,
5 | updateHotspotStatusToolSchema,
6 | } from '../../schemas/hotspots-tools.js';
7 |
8 | describe('hotspotsToolSchema', () => {
9 | it('should validate minimal hotspots search parameters', () => {
10 | const input = {};
11 | const result = z.object(hotspotsToolSchema).parse(input);
12 | expect(result).toEqual({});
13 | });
14 |
15 | it('should validate hotspots search with all parameters', () => {
16 | const input = {
17 | project_key: 'my-project',
18 | branch: 'main',
19 | pull_request: 'PR-123',
20 | status: 'TO_REVIEW',
21 | resolution: 'FIXED',
22 | files: ['file1.java', 'file2.java'],
23 | assigned_to_me: true,
24 | since_leak_period: false,
25 | in_new_code_period: true,
26 | page: '2',
27 | page_size: '50',
28 | };
29 | const result = z.object(hotspotsToolSchema).parse(input);
30 | expect(result.project_key).toBe('my-project');
31 | expect(result.status).toBe('TO_REVIEW');
32 | expect(result.resolution).toBe('FIXED');
33 | expect(result.files).toEqual(['file1.java', 'file2.java']);
34 | expect(result.assigned_to_me).toBe(true);
35 | expect(result.page).toBe(2);
36 | });
37 |
38 | it('should handle boolean string conversions', () => {
39 | const input = {
40 | assigned_to_me: 'true',
41 | since_leak_period: 'false',
42 | in_new_code_period: 'true',
43 | };
44 | const result = z.object(hotspotsToolSchema).parse(input);
45 | expect(result.assigned_to_me).toBe(true);
46 | expect(result.since_leak_period).toBe(false);
47 | expect(result.in_new_code_period).toBe(true);
48 | });
49 |
50 | it('should handle page number string conversions', () => {
51 | const input = {
52 | page: '3',
53 | page_size: '25',
54 | };
55 | const result = z.object(hotspotsToolSchema).parse(input);
56 | expect(result.page).toBe(3);
57 | expect(result.page_size).toBe(25);
58 | });
59 |
60 | it('should handle null values', () => {
61 | const input = {
62 | branch: null,
63 | pull_request: null,
64 | status: null,
65 | resolution: null,
66 | files: null,
67 | assigned_to_me: null,
68 | since_leak_period: null,
69 | in_new_code_period: null,
70 | };
71 | const result = z.object(hotspotsToolSchema).parse(input);
72 | expect(result.branch).toBeNull();
73 | expect(result.pull_request).toBeNull();
74 | expect(result.status).toBeNull();
75 | expect(result.resolution).toBeNull();
76 | expect(result.files).toBeNull();
77 | expect(result.assigned_to_me).toBeNull();
78 | });
79 |
80 | it('should reject invalid status values', () => {
81 | const input = {
82 | status: 'INVALID_STATUS',
83 | };
84 | expect(() => z.object(hotspotsToolSchema).parse(input)).toThrow();
85 | });
86 |
87 | it('should reject invalid resolution values', () => {
88 | const input = {
89 | resolution: 'INVALID_RESOLUTION',
90 | };
91 | expect(() => z.object(hotspotsToolSchema).parse(input)).toThrow();
92 | });
93 | });
94 |
95 | describe('hotspotToolSchema', () => {
96 | it('should validate hotspot key parameter', () => {
97 | const input = {
98 | hotspot_key: 'AYg1234567890',
99 | };
100 | const result = z.object(hotspotToolSchema).parse(input);
101 | expect(result.hotspot_key).toBe('AYg1234567890');
102 | });
103 |
104 | it('should require hotspot_key', () => {
105 | const input = {};
106 | expect(() => z.object(hotspotToolSchema).parse(input)).toThrow();
107 | });
108 | });
109 |
110 | describe('updateHotspotStatusToolSchema', () => {
111 | it('should validate minimal update parameters', () => {
112 | const input = {
113 | hotspot_key: 'AYg1234567890',
114 | status: 'REVIEWED',
115 | };
116 | const result = z.object(updateHotspotStatusToolSchema).parse(input);
117 | expect(result.hotspot_key).toBe('AYg1234567890');
118 | expect(result.status).toBe('REVIEWED');
119 | expect(result.resolution).toBeUndefined();
120 | expect(result.comment).toBeUndefined();
121 | });
122 |
123 | it('should validate update with all parameters', () => {
124 | const input = {
125 | hotspot_key: 'AYg1234567890',
126 | status: 'REVIEWED',
127 | resolution: 'SAFE',
128 | comment: 'This is safe after review',
129 | };
130 | const result = z.object(updateHotspotStatusToolSchema).parse(input);
131 | expect(result.hotspot_key).toBe('AYg1234567890');
132 | expect(result.status).toBe('REVIEWED');
133 | expect(result.resolution).toBe('SAFE');
134 | expect(result.comment).toBe('This is safe after review');
135 | });
136 |
137 | it('should handle null values for optional parameters', () => {
138 | const input = {
139 | hotspot_key: 'AYg1234567890',
140 | status: 'TO_REVIEW',
141 | resolution: null,
142 | comment: null,
143 | };
144 | const result = z.object(updateHotspotStatusToolSchema).parse(input);
145 | expect(result.hotspot_key).toBe('AYg1234567890');
146 | expect(result.status).toBe('TO_REVIEW');
147 | expect(result.resolution).toBeNull();
148 | expect(result.comment).toBeNull();
149 | });
150 |
151 | it('should require hotspot_key and status', () => {
152 | const input1 = { status: 'REVIEWED' };
153 | expect(() => z.object(updateHotspotStatusToolSchema).parse(input1)).toThrow();
154 |
155 | const input2 = { hotspot_key: 'AYg1234567890' };
156 | expect(() => z.object(updateHotspotStatusToolSchema).parse(input2)).toThrow();
157 | });
158 |
159 | it('should reject invalid status values', () => {
160 | const input = {
161 | hotspot_key: 'AYg1234567890',
162 | status: 'INVALID_STATUS',
163 | };
164 | expect(() => z.object(updateHotspotStatusToolSchema).parse(input)).toThrow();
165 | });
166 |
167 | it('should reject invalid resolution values', () => {
168 | const input = {
169 | hotspot_key: 'AYg1234567890',
170 | status: 'REVIEWED',
171 | resolution: 'INVALID_RESOLUTION',
172 | };
173 | expect(() => z.object(updateHotspotStatusToolSchema).parse(input)).toThrow();
174 | });
175 | });
176 |
```
--------------------------------------------------------------------------------
/src/__tests__/advanced-index.test.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { describe, it, expect, beforeEach, afterEach, beforeAll, vi } from 'vitest';
2 | import nock from 'nock';
3 | import { z } from 'zod';
4 | // Mock environment variables
5 | process.env.SONARQUBE_TOKEN = 'test-token';
6 | process.env.SONARQUBE_URL = 'http://localhost:9000';
7 | process.env.SONARQUBE_ORGANIZATION = 'test-org';
8 | // Save environment variables
9 | const originalEnv = process.env;
10 | beforeAll(() => {
11 | nock.cleanAll();
12 | // Common mocks for all tests
13 | nock('http://localhost:9000')
14 | .persist()
15 | .get('/api/projects/search')
16 | .query(true)
17 | .reply(200, {
18 | components: [
19 | {
20 | key: 'test-project',
21 | name: 'Test Project',
22 | qualifier: 'TRK',
23 | visibility: 'public',
24 | },
25 | ],
26 | paging: {
27 | pageIndex: 1,
28 | pageSize: 10,
29 | total: 1,
30 | },
31 | });
32 | });
33 | afterAll(() => {
34 | nock.cleanAll();
35 | });
36 | let nullToUndefined: any;
37 | let mapToSonarQubeParams: any;
38 | // No need to mock axios anymore since we're using sonarqube-web-api-client
39 | describe('Advanced MCP Server Tests', () => {
40 | beforeAll(async () => {
41 | // Import functions we need to test
42 | const module = await import('../index.js');
43 | nullToUndefined = module.nullToUndefined;
44 | mapToSonarQubeParams = module.mapToSonarQubeParams;
45 | });
46 | beforeEach(() => {
47 | vi.resetModules();
48 | process.env = { ...originalEnv };
49 | });
50 | afterEach(() => {
51 | process.env = originalEnv;
52 | vi.clearAllMocks();
53 | nock.cleanAll();
54 | });
55 | describe('Schema Transformation Tests', () => {
56 | it('should transform page parameters correctly', () => {
57 | // Create a schema that matches the one in the tool registration
58 | const pageSchema = z
59 | .string()
60 | .optional()
61 | .transform((val: any) => (val ? parseInt(val, 10) || null : null));
62 | // Test valid inputs
63 | expect(pageSchema.parse('10')).toBe(10);
64 | expect(pageSchema.parse('100')).toBe(100);
65 | // Test invalid or empty inputs
66 | expect(pageSchema.parse('')).toBe(null);
67 | expect(pageSchema.parse('abc')).toBe(null);
68 | expect(pageSchema.parse(undefined)).toBe(null);
69 | });
70 | it('should transform boolean parameters correctly', () => {
71 | const booleanSchema = z
72 | .union([z.boolean(), z.string().transform((val: any) => val === 'true')])
73 | .nullable()
74 | .optional();
75 | // Test string values
76 | expect(booleanSchema.parse('true')).toBe(true);
77 | expect(booleanSchema.parse('false')).toBe(false);
78 | // Test boolean values
79 | expect(booleanSchema.parse(true)).toBe(true);
80 | expect(booleanSchema.parse(false)).toBe(false);
81 | // Test null/undefined values
82 | expect(booleanSchema.parse(null)).toBe(null);
83 | expect(booleanSchema.parse(undefined)).toBe(undefined);
84 | });
85 | });
86 | describe('nullToUndefined Tests', () => {
87 | it('should convert null to undefined', () => {
88 | expect(nullToUndefined(null)).toBeUndefined();
89 | });
90 | it('should pass through other values', () => {
91 | expect(nullToUndefined(123)).toBe(123);
92 | expect(nullToUndefined('string')).toBe('string');
93 | expect(nullToUndefined(false)).toBe(false);
94 | expect(nullToUndefined({})).toEqual({});
95 | expect(nullToUndefined([])).toEqual([]);
96 | expect(nullToUndefined(undefined)).toBeUndefined();
97 | });
98 | });
99 | describe('mapToSonarQubeParams Tests', () => {
100 | it('should map MCP parameters to SonarQube parameters', () => {
101 | const mcpParams = {
102 | project_key: 'test-project',
103 | severity: 'MAJOR',
104 | page: 1,
105 | page_size: 10,
106 | };
107 | const sonarQubeParams = mapToSonarQubeParams(mcpParams);
108 | expect(sonarQubeParams.projectKey).toBe('test-project');
109 | expect(sonarQubeParams.severity).toBe('MAJOR');
110 | expect(sonarQubeParams.page).toBe(1);
111 | expect(sonarQubeParams.pageSize).toBe(10);
112 | });
113 | it('should handle empty optional parameters', () => {
114 | const mcpParams = {
115 | project_key: 'test-project',
116 | };
117 | const sonarQubeParams = mapToSonarQubeParams(mcpParams);
118 | expect(sonarQubeParams.projectKey).toBe('test-project');
119 | expect(sonarQubeParams.severity).toBeUndefined();
120 | expect(sonarQubeParams.page).toBeUndefined();
121 | expect(sonarQubeParams.pageSize).toBeUndefined();
122 | });
123 | it('should handle array parameters', () => {
124 | const mcpParams = {
125 | project_key: 'test-project',
126 | statuses: ['OPEN', 'CONFIRMED'],
127 | types: ['BUG', 'VULNERABILITY'],
128 | };
129 | const sonarQubeParams = mapToSonarQubeParams(mcpParams);
130 | expect(sonarQubeParams.projectKey).toBe('test-project');
131 | expect(sonarQubeParams.statuses).toEqual(['OPEN', 'CONFIRMED']);
132 | expect(sonarQubeParams.types).toEqual(['BUG', 'VULNERABILITY']);
133 | });
134 | it('should handle boolean parameters', () => {
135 | const mcpParams = {
136 | project_key: 'test-project',
137 | resolved: true,
138 | on_component_only: false,
139 | };
140 | const sonarQubeParams = mapToSonarQubeParams(mcpParams);
141 | expect(sonarQubeParams.projectKey).toBe('test-project');
142 | expect(sonarQubeParams.resolved).toBe(true);
143 | expect(sonarQubeParams.onComponentOnly).toBe(false);
144 | });
145 | });
146 | describe('Environment Handling', () => {
147 | it('should correctly retrieve environment variables', () => {
148 | expect(process.env.SONARQUBE_TOKEN).toBe('test-token');
149 | expect(process.env.SONARQUBE_URL).toBe('http://localhost:9000');
150 | expect(process.env.SONARQUBE_ORGANIZATION).toBe('test-org');
151 | });
152 | });
153 | });
154 |
```
--------------------------------------------------------------------------------
/.github/workflows/reusable-validate.yml:
--------------------------------------------------------------------------------
```yaml
1 | # =============================================================================
2 | # REUSABLE WORKFLOW: Code Validation Suite
3 | # PURPOSE: Run all quality checks (audit, typecheck, lint, format, test)
4 | # USAGE: Called by PR and main workflows for consistent validation
5 | # OUTPUTS: Coverage reports when requested
6 | # =============================================================================
7 |
8 | name: Reusable Validate
9 |
10 | on:
11 | workflow_call:
12 | inputs:
13 | node-version:
14 | description: 'Node.js version (should match package.json engines.node)'
15 | type: string
16 | default: '22' # UPDATE: When upgrading Node.js
17 | pnpm-version:
18 | description: 'pnpm version (should match package.json packageManager)'
19 | type: string
20 | default: '10.17.0' # UPDATE: When upgrading pnpm
21 | validate-changesets:
22 | description: 'validate that a changeset exists on the branch'
23 | type: boolean
24 | default: false
25 | secrets:
26 | SONAR_TOKEN:
27 | description: 'SonarCloud authentication token'
28 | required: true
29 |
30 | # EXAMPLE USAGE:
31 | # jobs:
32 | # validate:
33 | # uses: ./.github/workflows/reusable-validate.yml
34 | # with:
35 | # upload-coverage: true # For PRs to show coverage
36 |
37 | jobs:
38 | test:
39 | runs-on: ubuntu-latest
40 | steps:
41 | - name: Checkout code
42 | uses: actions/checkout@v4
43 | with:
44 | fetch-depth: 0 # Full history for accurate analysis
45 |
46 | - name: Install pnpm
47 | uses: pnpm/action-setup@v4
48 | with:
49 | version: ${{ inputs.pnpm-version }}
50 | run_install: false
51 | standalone: true
52 |
53 | - name: Setup Node.js
54 | uses: actions/setup-node@v4
55 | with:
56 | node-version: ${{ inputs.node-version }}
57 | cache: pnpm # Cache dependencies for speed
58 |
59 | - name: Install dependencies
60 | # Ensures exact versions from lock file
61 | # FAILS IF: Lock file out of sync with package.json
62 | run: pnpm install --frozen-lockfile
63 |
64 | - name: Tests with coverage
65 | # Run test suite with coverage
66 | # Coverage enforces 80% minimum threshold for all metrics
67 | # FAILS IF: Tests fail or coverage below 80% (when coverage enabled)
68 | # To debug: Check test output and coverage/index.html
69 | run: pnpm test:coverage
70 |
71 | - name: SonarQube Scan
72 | uses: SonarSource/sonarqube-scan-action@v6
73 | env:
74 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
75 |
76 | - name: Upload coverage
77 | # Make coverage reports available for review
78 | # Download from Actions tab to view detailed HTML report
79 | uses: actions/upload-artifact@v4
80 | with:
81 | name: coverage-${{ github.sha }}
82 | path: coverage/
83 | retention-days: 7 # Keep for a week for PR reviews
84 |
85 | lint:
86 | runs-on: ubuntu-latest
87 | steps:
88 | - name: Checkout code
89 | uses: actions/checkout@v4
90 | with:
91 | fetch-depth: 0 # Full history for accurate analysis
92 |
93 | - name: Install pnpm
94 | uses: pnpm/action-setup@v4
95 | with:
96 | version: ${{ inputs.pnpm-version }}
97 | run_install: false
98 | standalone: true
99 |
100 | - name: Setup Node.js
101 | uses: actions/setup-node@v4
102 | with:
103 | node-version: ${{ inputs.node-version }}
104 | cache: pnpm # Cache dependencies for speed
105 |
106 | - name: Install dependencies
107 | # Ensures exact versions from lock file
108 | # FAILS IF: Lock file out of sync with package.json
109 | run: pnpm install --frozen-lockfile
110 |
111 | # =============================================================================
112 | # VALIDATION CHECKS
113 | # All checks run in sequence to provide clear failure messages
114 | # =============================================================================
115 |
116 | - name: Type checking
117 | # Validate TypeScript types without emitting files
118 | # FAILS IF: Type errors in any .ts file
119 | # To debug: Run 'pnpm typecheck' locally for detailed errors
120 | run: pnpm typecheck
121 |
122 | - name: Linting
123 | # Run ESLint with type-aware rules
124 | # FAILS IF: Linting errors (not warnings)
125 | # To fix: Run 'pnpm lint:fix' for auto-fixable issues
126 | run: pnpm lint
127 |
128 | - name: Format checking
129 | # Verify code follows Prettier formatting
130 | # FAILS IF: Any file not formatted
131 | # To fix: Run 'pnpm format:fix' to auto-format
132 | run: pnpm format
133 |
134 | - name: Install actionlint
135 | # Install actionlint for workflow validation
136 | # Uses the official installer script from rhysd/actionlint
137 | run: |
138 | echo "Installing actionlint..."
139 | bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
140 | echo "${PWD}" >> $GITHUB_PATH
141 |
142 | - name: Workflow linting
143 | # Validate GitHub Actions workflow files
144 | # FAILS IF: Workflow syntax errors or issues found
145 | # To debug: Run 'pnpm lint:workflows' locally
146 | run: pnpm lint:workflows
147 |
148 | - name: Fetch main branch for changesets
149 | if: inputs.validate-changesets
150 | # Need main branch to compare changesets
151 | run: git fetch origin main:main
152 |
153 | - name: Changeset status
154 | if: inputs.validate-changesets
155 | # Validates that changesets exist for features/fixes
156 | # FAILS IF: feat/fix commits exist without changesets
157 | # To fix: Run 'pnpm changeset' and commit the generated file
158 | # For non-code changes: Run 'pnpm changeset --empty'
159 | run: pnpm changeset:status
160 |
```
--------------------------------------------------------------------------------
/src/utils/parameter-mappers.ts:
--------------------------------------------------------------------------------
```typescript
1 | import type { IssuesParams } from '../types/index.js';
2 | import { nullToUndefined } from './transforms.js';
3 |
4 | /**
5 | * Maps MCP tool parameters to SonarQube client parameters
6 | * @param params Parameters from the MCP tool
7 | * @returns Parameters for the SonarQube client
8 | */
9 | export function mapToSonarQubeParams(params: Record<string, unknown>): IssuesParams {
10 | const result: IssuesParams = {
11 | page: undefined,
12 | pageSize: undefined,
13 | };
14 |
15 | // Helper function to add property only if not undefined
16 | const addIfDefined = <K extends keyof IssuesParams>(
17 | key: K,
18 | value: IssuesParams[K] | undefined
19 | ): void => {
20 | if (value !== undefined) {
21 | result[key] = value;
22 | }
23 | };
24 |
25 | // Component filters (support both single project_key and multiple projects)
26 | addIfDefined('projectKey', nullToUndefined(params.project_key) as string | undefined);
27 | addIfDefined('projects', nullToUndefined(params.projects) as string[] | undefined);
28 | addIfDefined('componentKeys', nullToUndefined(params.component_keys) as string[] | undefined);
29 | addIfDefined('components', nullToUndefined(params.components) as string[] | undefined);
30 | addIfDefined('onComponentOnly', nullToUndefined(params.on_component_only) as boolean | undefined);
31 | addIfDefined('directories', nullToUndefined(params.directories) as string[] | undefined);
32 | addIfDefined('files', nullToUndefined(params.files) as string[] | undefined);
33 | addIfDefined('scopes', nullToUndefined(params.scopes) as IssuesParams['scopes']);
34 |
35 | // Branch and PR support
36 | addIfDefined('branch', nullToUndefined(params.branch) as string | undefined);
37 | addIfDefined('pullRequest', nullToUndefined(params.pull_request) as string | undefined);
38 |
39 | // Issue filters
40 | addIfDefined('issues', nullToUndefined(params.issues) as string[] | undefined);
41 | addIfDefined('severity', nullToUndefined(params.severity) as IssuesParams['severity']); // Deprecated
42 | addIfDefined('severities', nullToUndefined(params.severities) as IssuesParams['severities']);
43 | addIfDefined('statuses', nullToUndefined(params.statuses) as IssuesParams['statuses']);
44 | addIfDefined('resolutions', nullToUndefined(params.resolutions) as IssuesParams['resolutions']);
45 | addIfDefined('resolved', nullToUndefined(params.resolved) as boolean | undefined);
46 | addIfDefined('types', nullToUndefined(params.types) as IssuesParams['types']);
47 |
48 | // Clean Code taxonomy
49 | addIfDefined(
50 | 'cleanCodeAttributeCategories',
51 | nullToUndefined(
52 | params.clean_code_attribute_categories
53 | ) as IssuesParams['cleanCodeAttributeCategories']
54 | );
55 | addIfDefined(
56 | 'impactSeverities',
57 | nullToUndefined(params.impact_severities) as IssuesParams['impactSeverities']
58 | );
59 | addIfDefined(
60 | 'impactSoftwareQualities',
61 | nullToUndefined(params.impact_software_qualities) as IssuesParams['impactSoftwareQualities']
62 | );
63 | addIfDefined(
64 | 'issueStatuses',
65 | nullToUndefined(params.issue_statuses) as IssuesParams['issueStatuses']
66 | );
67 |
68 | // Rules and tags
69 | addIfDefined('rules', nullToUndefined(params.rules) as string[] | undefined);
70 | addIfDefined('tags', nullToUndefined(params.tags) as string[] | undefined);
71 |
72 | // Date filters
73 | addIfDefined('createdAfter', nullToUndefined(params.created_after) as string | undefined);
74 | addIfDefined('createdBefore', nullToUndefined(params.created_before) as string | undefined);
75 | addIfDefined('createdAt', nullToUndefined(params.created_at) as string | undefined);
76 | addIfDefined('createdInLast', nullToUndefined(params.created_in_last) as string | undefined);
77 |
78 | // Assignment
79 | addIfDefined('assigned', nullToUndefined(params.assigned) as boolean | undefined);
80 | addIfDefined('assignees', nullToUndefined(params.assignees) as string[] | undefined);
81 | addIfDefined('author', nullToUndefined(params.author) as string | undefined);
82 | addIfDefined('authors', nullToUndefined(params.authors) as string[] | undefined);
83 |
84 | // Security standards
85 | addIfDefined('cwe', nullToUndefined(params.cwe) as string[] | undefined);
86 | addIfDefined('owaspTop10', nullToUndefined(params.owasp_top10) as string[] | undefined);
87 | addIfDefined(
88 | 'owaspTop10v2021',
89 | nullToUndefined(params.owasp_top10_v2021) as string[] | undefined
90 | );
91 | addIfDefined('sansTop25', nullToUndefined(params.sans_top25) as string[] | undefined);
92 | addIfDefined(
93 | 'sonarsourceSecurity',
94 | nullToUndefined(params.sonarsource_security) as string[] | undefined
95 | );
96 | addIfDefined(
97 | 'sonarsourceSecurityCategory',
98 | nullToUndefined(params.sonarsource_security_category) as string[] | undefined
99 | );
100 |
101 | // Languages
102 | addIfDefined('languages', nullToUndefined(params.languages) as string[] | undefined);
103 |
104 | // Facets
105 | addIfDefined('facets', nullToUndefined(params.facets) as string[] | undefined);
106 | addIfDefined('facetMode', nullToUndefined(params.facet_mode) as IssuesParams['facetMode']);
107 |
108 | // New code
109 | addIfDefined('sinceLeakPeriod', nullToUndefined(params.since_leak_period) as boolean | undefined);
110 | addIfDefined(
111 | 'inNewCodePeriod',
112 | nullToUndefined(params.in_new_code_period) as boolean | undefined
113 | );
114 |
115 | // Sorting
116 | addIfDefined('s', nullToUndefined(params.s) as string | undefined);
117 | addIfDefined('asc', nullToUndefined(params.asc) as boolean | undefined);
118 |
119 | // Response optimization
120 | addIfDefined(
121 | 'additionalFields',
122 | nullToUndefined(params.additional_fields) as string[] | undefined
123 | );
124 |
125 | // Pagination
126 | addIfDefined('page', nullToUndefined(params.page) as number | undefined);
127 | addIfDefined('pageSize', nullToUndefined(params.page_size) as number | undefined);
128 |
129 | return result;
130 | }
131 |
```
--------------------------------------------------------------------------------
/examples/http-client.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Example HTTP client for the SonarQube MCP Server with HTTP transport.
3 | * This demonstrates how to interact with the MCP server over HTTP.
4 | *
5 | * To run this example:
6 | * 1. Start the server with HTTP transport:
7 | * MCP_TRANSPORT_TYPE=http MCP_HTTP_PORT=3000 pnpm start
8 | * 2. Run this client:
9 | * npx tsx examples/http-client.ts
10 | */
11 |
12 | interface McpHttpRequest {
13 | sessionId?: string;
14 | method: string;
15 | params?: unknown;
16 | }
17 |
18 | interface McpHttpResponse {
19 | sessionId?: string;
20 | result?: unknown;
21 | error?: {
22 | code: number;
23 | message: string;
24 | data?: unknown;
25 | };
26 | }
27 |
28 | class McpHttpClient {
29 | private sessionId?: string;
30 | private baseUrl: string;
31 |
32 | constructor(baseUrl = 'http://localhost:3000') {
33 | this.baseUrl = baseUrl;
34 | }
35 |
36 | /**
37 | * Check server health.
38 | */
39 | async health(): Promise<unknown> {
40 | const response = await fetch(`${this.baseUrl}/health`);
41 | if (!response.ok) {
42 | throw new Error(`Health check failed: ${response.statusText}`);
43 | }
44 | return response.json();
45 | }
46 |
47 | /**
48 | * Initialize a new session.
49 | */
50 | async connect(): Promise<void> {
51 | const response = await fetch(`${this.baseUrl}/session`, {
52 | method: 'POST',
53 | headers: {
54 | 'Content-Type': 'application/json',
55 | },
56 | });
57 |
58 | if (!response.ok) {
59 | throw new Error(`Failed to create session: ${response.statusText}`);
60 | }
61 |
62 | const data = await response.json();
63 | this.sessionId = data.sessionId;
64 | // Session created successfully
65 | }
66 |
67 | /**
68 | * Call an MCP method.
69 | */
70 | async call(method: string, params?: unknown): Promise<unknown> {
71 | if (!this.sessionId) {
72 | throw new Error('Not connected. Call connect() first.');
73 | }
74 |
75 | const request: McpHttpRequest = {
76 | sessionId: this.sessionId,
77 | method,
78 | params,
79 | };
80 |
81 | const response = await fetch(`${this.baseUrl}/mcp`, {
82 | method: 'POST',
83 | headers: {
84 | 'Content-Type': 'application/json',
85 | },
86 | body: JSON.stringify(request),
87 | });
88 |
89 | if (!response.ok) {
90 | throw new Error(`MCP call failed: ${response.statusText}`);
91 | }
92 |
93 | const data = (await response.json()) as McpHttpResponse;
94 |
95 | if (data.error) {
96 | throw new Error(`MCP error: ${data.error.message}`);
97 | }
98 |
99 | return data.result;
100 | }
101 |
102 | /**
103 | * Connect to server-sent events for notifications.
104 | */
105 | connectToEvents(onMessage: (data: unknown) => void): EventSource {
106 | if (!this.sessionId) {
107 | throw new Error('Not connected. Call connect() first.');
108 | }
109 |
110 | // Note: EventSource is not available in Node.js by default
111 | // You'd need to use a library like 'eventsource' for Node.js
112 | if (typeof EventSource === 'undefined') {
113 | throw new Error('EventSource not available. Install "eventsource" package for Node.js.');
114 | }
115 |
116 | const eventSource = new EventSource(`${this.baseUrl}/events/${this.sessionId}`);
117 |
118 | eventSource.onmessage = (event) => {
119 | const data = JSON.parse(event.data);
120 | onMessage(data);
121 | };
122 |
123 | eventSource.onerror = (error) => {
124 | throw new Error(`SSE error: ${error}`);
125 | };
126 |
127 | return eventSource;
128 | }
129 |
130 | /**
131 | * Disconnect and cleanup session.
132 | */
133 | async disconnect(): Promise<void> {
134 | if (!this.sessionId) {
135 | return;
136 | }
137 |
138 | try {
139 | const response = await fetch(`${this.baseUrl}/session/${this.sessionId}`, {
140 | method: 'DELETE',
141 | });
142 |
143 | if (!response.ok) {
144 | throw new Error(`Failed to close session: ${response.statusText}`);
145 | }
146 | } catch {
147 | // Ignore errors during cleanup
148 | } finally {
149 | this.sessionId = undefined;
150 | }
151 | }
152 | }
153 |
154 | // Example usage
155 | async function main() {
156 | /* eslint-disable no-console */
157 | const client = new McpHttpClient();
158 |
159 | try {
160 | // Check server health
161 | console.log('Checking server health...');
162 | const health = await client.health();
163 | console.log('Server health:', health);
164 |
165 | // Connect to the server
166 | console.log('\nConnecting to server...');
167 | await client.connect();
168 |
169 | // Example MCP calls (these would need to be implemented in the server)
170 | console.log('\nMaking example MCP calls...');
171 |
172 | // List available tools
173 | const tools = await client.call('tools/list');
174 | console.log('Available tools:', tools);
175 |
176 | // Call a specific tool (example with SonarQube projects)
177 | const projects = await client.call('tools/execute', {
178 | name: 'projects',
179 | params: {
180 | page: 1,
181 | page_size: 10,
182 | },
183 | });
184 | console.log('Projects:', projects);
185 |
186 | // Connect to events for real-time notifications
187 | console.log('\nConnecting to server events...');
188 | const eventSource = client.connectToEvents((data) => {
189 | console.log('Event received:', data);
190 | });
191 |
192 | // Keep the connection open for a bit to receive events
193 | if (eventSource) {
194 | await new Promise((resolve) => setTimeout(resolve, 5000));
195 | eventSource.close();
196 | }
197 | } catch (error) {
198 | console.error('Error:', error);
199 | } finally {
200 | // Always disconnect when done
201 | console.log('\nDisconnecting...');
202 | await client.disconnect();
203 | }
204 | /* eslint-enable no-console */
205 | }
206 |
207 | // Run the example if this file is executed directly
208 | // Note: For ES modules, use import.meta.url comparison
209 | // For CommonJS compatibility, we check if this is the main module
210 | if (typeof require !== 'undefined' && require.main === module) {
211 | // eslint-disable-next-line no-console
212 | main().catch(console.error);
213 | } else if (typeof import.meta !== 'undefined' && import.meta.url === `file://${process.argv[1]}`) {
214 | // ES module execution detection
215 | // eslint-disable-next-line no-console
216 | main().catch(console.error);
217 | }
218 |
219 | export { McpHttpClient };
220 |
```